Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Rename DocTestRunner.skipped to skips
Document also DocTestRunner API for statistics, document its
attributes.
  • Loading branch information
vstinner committed Sep 2, 2023
commit e2fac2303fecf9ad5a462da3556a7d750ea9c5cd
26 changes: 23 additions & 3 deletions Doc/library/doctest.rst
Original file line number Diff line number Diff line change
Expand Up @@ -1427,8 +1427,7 @@ TestResults objects

Number of skipped tests.

.. versionchanged:: 3.13
Add :attr:`skipped` attribute.
.. versionadded:: 3.13


.. _doctest-doctestrunner:
Expand All @@ -1449,7 +1448,7 @@ DocTestRunner objects
passing a subclass of :class:`OutputChecker` to the constructor.

The test runner's display output can be controlled in two ways. First, an output
function can be passed to :meth:`TestRunner.run`; this function will be called
function can be passed to :meth:`run`; this function will be called
with strings that should be displayed. It defaults to ``sys.stdout.write``. If
capturing the output is not sufficient, then the display output can be also
customized by subclassing DocTestRunner, and overriding the methods
Expand All @@ -1470,6 +1469,10 @@ DocTestRunner objects
runner compares expected output to actual output, and how it displays failures.
For more information, see section :ref:`doctest-options`.

The test runner accumulate statistics. The aggregated number of attempted,
Comment thread
vstinner marked this conversation as resolved.
Outdated
failed and skipped examples is also available via the :attr:`tries`,
:attr:`failures` and :attr:`skips` attributes. The :meth:`run` and
:meth:`summarize` methods return a TestResults instance.
Comment thread
vstinner marked this conversation as resolved.
Outdated

:class:`DocTestParser` defines the following methods:

Expand Down Expand Up @@ -1548,6 +1551,23 @@ DocTestRunner objects
verbosity is not specified, then the :class:`DocTestRunner`'s verbosity is
used.

:class:`DocTestParser` has the following attributes:

.. attribute:: tries

Number of attempted examples.

.. attribute:: failures

Number of failed examples.

.. attribute:: skips

Number of skipped examples.

.. versionadded:: 3.13


.. _doctest-outputchecker:

OutputChecker objects
Expand Down
54 changes: 29 additions & 25 deletions Lib/doctest.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,8 @@ def __repr__(self):
f'attempted={self.attempted}, '
f'skipped={self.skipped})')
else:
# Leave the repr() unchanged for backward compatibility
# if skipped is zero
return super().__repr__()


Expand Down Expand Up @@ -1191,13 +1193,15 @@ class DocTestRunner:
Test passed.
TestResults(failed=0, attempted=7)

The aggregated number of tried examples and failed examples is
also available via the `tries` and `failures` attributes:
The aggregated number of tried examples and failed examples is also
available via the `tries`, `failures` and `skips` attributes:

>>> runner.tries
7
>>> runner.failures
0
>>> runner.skips
0

The comparison between expected outputs and actual outputs is done
by an `OutputChecker`. This comparison may be customized with a
Expand Down Expand Up @@ -1246,7 +1250,7 @@ def __init__(self, checker=None, verbose=None, optionflags=0):
# Keep track of the examples we've run.
self.tries = 0
self.failures = 0
self.skipped = 0
self.skips = 0
self._stats = {}

# Create a fake output target for capturing doctest output.
Expand Down Expand Up @@ -1319,8 +1323,8 @@ def __run(self, test, compileflags, out):
flags that should be used to execute examples. Return a TestResults
instance. The examples are run in the namespace `test.globs`.
"""
# Keep track of the number of failures, attempted and skipped.
failures = attempted = skipped = 0
# Keep track of the number of failed, attempted, skipped examples.
failures = attempted = skips = 0

# Save the option flags (since option directives can be used
# to modify them).
Expand Down Expand Up @@ -1350,7 +1354,7 @@ def __run(self, test, compileflags, out):

# If 'SKIP' is set, then skip this example.
if self.optionflags & SKIP:
skipped += 1
skips += 1
continue

# Record that we started this example.
Expand Down Expand Up @@ -1432,21 +1436,21 @@ def __run(self, test, compileflags, out):
self.optionflags = original_optionflags

# Record and return the number of failures and attempted.
self.__record_outcome(test, failures, attempted, skipped)
return TestResults(failures, attempted, skipped=skipped)
self.__record_outcome(test, failures, attempted, skips)
return TestResults(failures, attempted, skipped=skips)

def __record_outcome(self, test, failures, tries, skipped):
def __record_outcome(self, test, failures, tries, skips):
"""
Record the fact that the given DocTest (`test`) generated `failures`
failures out of `tries` tried examples.
"""
failures2, tries2, skipped2 = self._stats.get(test.name, (0, 0, 0))
failures2, tries2, skips2 = self._stats.get(test.name, (0, 0, 0))
self._stats[test.name] = (failures + failures2,
tries + tries2,
skipped + skipped2)
skips + skips2)
self.failures += failures
self.tries += tries
self.skipped += skipped
self.skips += skips

__LINECACHE_FILENAME_RE = re.compile(r'<doctest '
r'(?P<name>.+)'
Expand Down Expand Up @@ -1546,13 +1550,13 @@ def summarize(self, verbose=None):
notests = []
passed = []
failed = []
total_tries = total_failures = total_skipped = 0
total_tries = total_failures = total_skips = 0
for item in self._stats.items():
name, (failures, tries, skipped) = item
name, (failures, tries, skips) = item
assert failures <= tries
total_tries += tries
total_failures += failures
total_skipped += skipped
total_skips += skips
if tries == 0:
notests.append(name)
elif failures == 0:
Expand All @@ -1574,32 +1578,32 @@ def summarize(self, verbose=None):
print(self.DIVIDER)
print(f"{len(failed)} items had failures:")
failed.sort()
for name, (failures, tries, skipped) in failed:
for name, (failures, tries, skips) in failed:
print(f" {failures:3d} of {tries:3d} in {name}")
if verbose:
print(f"{total_tries} tests in {len(self._stats)} items.")
print(f"{total_tries - total_failures} passed and {total_failures} failed.")
if total_failures:
msg = f"***Test Failed*** {total_failures} failures"
if total_skipped:
msg = f"{msg} and {total_skipped} skipped tests"
if total_skips:
msg = f"{msg} and {total_skips} skipped tests"
print(f"{msg}.")
elif verbose:
print("Test passed.")
return TestResults(total_failures, total_tries, skipped=total_skipped)
return TestResults(total_failures, total_tries, skipped=total_skips)

#/////////////////////////////////////////////////////////////////
# Backward compatibility cruft to maintain doctest.master.
#/////////////////////////////////////////////////////////////////
def merge(self, other):
d = self._stats
for name, (failures, tries, skipped) in other._stats.items():
for name, (failures, tries, skips) in other._stats.items():
if name in d:
failures2, tries2, skipped2 = d[name]
failures2, tries2, skips2 = d[name]
failures = failures + failures2
tries = tries + tries2
skipped = skipped + skipped2
d[name] = (failures, tries, skipped)
skips = skips + skips2
d[name] = (failures, tries, skips)


class OutputChecker:
Expand Down Expand Up @@ -2000,7 +2004,7 @@ class doctest.Tester, then merges the results into (or creates)
else:
master.merge(runner)

return TestResults(runner.failures, runner.tries, skipped=runner.skipped)
return TestResults(runner.failures, runner.tries, skipped=runner.skips)


def testfile(filename, module_relative=True, name=None, package=None,
Expand Down Expand Up @@ -2124,7 +2128,7 @@ class doctest.Tester, then merges the results into (or creates)
else:
master.merge(runner)

return TestResults(runner.failures, runner.tries, skipped=runner.skipped)
return TestResults(runner.failures, runner.tries, skipped=runner.skips)


def run_docstring_examples(f, globs, verbose=False, name="NoName",
Expand Down