aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/python/pytest/py3/_pytest/unittest.py
diff options
context:
space:
mode:
authorarcadia-devtools <arcadia-devtools@yandex-team.ru>2022-02-09 12:00:52 +0300
committerDaniil Cherednik <dcherednik@yandex-team.ru>2022-02-10 15:58:17 +0300
commit8e1413fed79d1e8036e65228af6c93399ccf5502 (patch)
tree502c9df7b2614d20541c7a2d39d390e9a51877cc /contrib/python/pytest/py3/_pytest/unittest.py
parent6b813c17d56d1d05f92c61ddc347d0e4d358fe85 (diff)
downloadydb-8e1413fed79d1e8036e65228af6c93399ccf5502.tar.gz
intermediate changes
ref:614ed510ddd3cdf86a8c5dbf19afd113397e0172
Diffstat (limited to 'contrib/python/pytest/py3/_pytest/unittest.py')
-rw-r--r--contrib/python/pytest/py3/_pytest/unittest.py243
1 files changed, 170 insertions, 73 deletions
diff --git a/contrib/python/pytest/py3/_pytest/unittest.py b/contrib/python/pytest/py3/_pytest/unittest.py
index 36158c62d2f..55f15efe4b7 100644
--- a/contrib/python/pytest/py3/_pytest/unittest.py
+++ b/contrib/python/pytest/py3/_pytest/unittest.py
@@ -1,40 +1,70 @@
-""" discovery and running of std-library "unittest" style tests. """
+"""Discover and run std-library "unittest" style tests."""
import sys
import traceback
+import types
+from typing import Any
+from typing import Callable
+from typing import Generator
+from typing import Iterable
+from typing import List
+from typing import Optional
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import Union
import _pytest._code
import pytest
from _pytest.compat import getimfunc
from _pytest.compat import is_async_function
from _pytest.config import hookimpl
+from _pytest.fixtures import FixtureRequest
+from _pytest.nodes import Collector
+from _pytest.nodes import Item
from _pytest.outcomes import exit
from _pytest.outcomes import fail
from _pytest.outcomes import skip
from _pytest.outcomes import xfail
from _pytest.python import Class
from _pytest.python import Function
+from _pytest.python import PyCollector
from _pytest.runner import CallInfo
from _pytest.skipping import skipped_by_mark_key
from _pytest.skipping import unexpectedsuccess_key
+if TYPE_CHECKING:
+ import unittest
-def pytest_pycollect_makeitem(collector, name, obj):
- # has unittest been imported and is obj a subclass of its TestCase?
+ from _pytest.fixtures import _Scope
+
+ _SysExcInfoType = Union[
+ Tuple[Type[BaseException], BaseException, types.TracebackType],
+ Tuple[None, None, None],
+ ]
+
+
+def pytest_pycollect_makeitem(
+ collector: PyCollector, name: str, obj: object
+) -> Optional["UnitTestCase"]:
+ # Has unittest been imported and is obj a subclass of its TestCase?
try:
- if not issubclass(obj, sys.modules["unittest"].TestCase):
- return
+ ut = sys.modules["unittest"]
+ # Type ignored because `ut` is an opaque module.
+ if not issubclass(obj, ut.TestCase): # type: ignore
+ return None
except Exception:
- return
- # yes, so let's collect it
- return UnitTestCase.from_parent(collector, name=name, obj=obj)
+ return None
+ # Yes, so let's collect it.
+ item: UnitTestCase = UnitTestCase.from_parent(collector, name=name, obj=obj)
+ return item
class UnitTestCase(Class):
- # marker for fixturemanger.getfixtureinfo()
- # to declare that our children do not support funcargs
+ # Marker for fixturemanger.getfixtureinfo()
+ # to declare that our children do not support funcargs.
nofuncargs = True
- def collect(self):
+ def collect(self) -> Iterable[Union[Item, Collector]]:
from unittest import TestLoader
cls = self.obj
@@ -61,82 +91,128 @@ class UnitTestCase(Class):
runtest = getattr(self.obj, "runTest", None)
if runtest is not None:
ut = sys.modules.get("twisted.trial.unittest", None)
- if ut is None or runtest != ut.TestCase.runTest:
- # TODO: callobj consistency
+ # Type ignored because `ut` is an opaque module.
+ if ut is None or runtest != ut.TestCase.runTest: # type: ignore
yield TestCaseFunction.from_parent(self, name="runTest")
- def _inject_setup_teardown_fixtures(self, cls):
+ def _inject_setup_teardown_fixtures(self, cls: type) -> None:
"""Injects a hidden auto-use fixture to invoke setUpClass/setup_method and corresponding
- teardown functions (#517)"""
+ teardown functions (#517)."""
class_fixture = _make_xunit_fixture(
- cls, "setUpClass", "tearDownClass", scope="class", pass_self=False
+ cls,
+ "setUpClass",
+ "tearDownClass",
+ "doClassCleanups",
+ scope="class",
+ pass_self=False,
)
if class_fixture:
- cls.__pytest_class_setup = class_fixture
+ cls.__pytest_class_setup = class_fixture # type: ignore[attr-defined]
method_fixture = _make_xunit_fixture(
- cls, "setup_method", "teardown_method", scope="function", pass_self=True
+ cls,
+ "setup_method",
+ "teardown_method",
+ None,
+ scope="function",
+ pass_self=True,
)
if method_fixture:
- cls.__pytest_method_setup = method_fixture
+ cls.__pytest_method_setup = method_fixture # type: ignore[attr-defined]
-def _make_xunit_fixture(obj, setup_name, teardown_name, scope, pass_self):
+def _make_xunit_fixture(
+ obj: type,
+ setup_name: str,
+ teardown_name: str,
+ cleanup_name: Optional[str],
+ scope: "_Scope",
+ pass_self: bool,
+):
setup = getattr(obj, setup_name, None)
teardown = getattr(obj, teardown_name, None)
if setup is None and teardown is None:
return None
- @pytest.fixture(scope=scope, autouse=True)
- def fixture(self, request):
+ if cleanup_name:
+ cleanup = getattr(obj, cleanup_name, lambda *args: None)
+ else:
+
+ def cleanup(*args):
+ pass
+
+ @pytest.fixture(
+ scope=scope,
+ autouse=True,
+ # Use a unique name to speed up lookup.
+ name=f"unittest_{setup_name}_fixture_{obj.__qualname__}",
+ )
+ def fixture(self, request: FixtureRequest) -> Generator[None, None, None]:
if _is_skipped(self):
reason = self.__unittest_skip_why__
pytest.skip(reason)
if setup is not None:
- if pass_self:
- setup(self, request.function)
- else:
- setup()
+ try:
+ if pass_self:
+ setup(self, request.function)
+ else:
+ setup()
+ # unittest does not call the cleanup function for every BaseException, so we
+ # follow this here.
+ except Exception:
+ if pass_self:
+ cleanup(self)
+ else:
+ cleanup()
+
+ raise
yield
- if teardown is not None:
+ try:
+ if teardown is not None:
+ if pass_self:
+ teardown(self, request.function)
+ else:
+ teardown()
+ finally:
if pass_self:
- teardown(self, request.function)
+ cleanup(self)
else:
- teardown()
+ cleanup()
return fixture
class TestCaseFunction(Function):
nofuncargs = True
- _excinfo = None
- _testcase = None
-
- def setup(self):
- # a bound method to be called during teardown() if set (see 'runtest()')
- self._explicit_tearDown = None
- self._testcase = self.parent.obj(self.name)
+ _excinfo: Optional[List[_pytest._code.ExceptionInfo[BaseException]]] = None
+ _testcase: Optional["unittest.TestCase"] = None
+
+ def setup(self) -> None:
+ # A bound method to be called during teardown() if set (see 'runtest()').
+ self._explicit_tearDown: Optional[Callable[[], None]] = None
+ assert self.parent is not None
+ self._testcase = self.parent.obj(self.name) # type: ignore[attr-defined]
self._obj = getattr(self._testcase, self.name)
if hasattr(self, "_request"):
self._request._fillfixtures()
- def teardown(self):
+ def teardown(self) -> None:
if self._explicit_tearDown is not None:
self._explicit_tearDown()
self._explicit_tearDown = None
self._testcase = None
self._obj = None
- def startTest(self, testcase):
+ def startTest(self, testcase: "unittest.TestCase") -> None:
pass
- def _addexcinfo(self, rawexcinfo):
- # unwrap potential exception info (see twisted trial support below)
+ def _addexcinfo(self, rawexcinfo: "_SysExcInfoType") -> None:
+ # Unwrap potential exception info (see twisted trial support below).
rawexcinfo = getattr(rawexcinfo, "_rawexcinfo", rawexcinfo)
try:
- excinfo = _pytest._code.ExceptionInfo(rawexcinfo)
- # invoke the attributes to trigger storing the traceback
- # trial causes some issue there
+ excinfo = _pytest._code.ExceptionInfo(rawexcinfo) # type: ignore[arg-type]
+ # Invoke the attributes to trigger storing the traceback
+ # trial causes some issue there.
excinfo.value
excinfo.traceback
except TypeError:
@@ -151,7 +227,7 @@ class TestCaseFunction(Function):
fail("".join(values), pytrace=False)
except (fail.Exception, KeyboardInterrupt):
raise
- except: # noqa
+ except BaseException:
fail(
"ERROR: Unknown Incompatible Exception "
"representation:\n%r" % (rawexcinfo,),
@@ -163,7 +239,9 @@ class TestCaseFunction(Function):
excinfo = _pytest._code.ExceptionInfo.from_current()
self.__dict__.setdefault("_excinfo", []).append(excinfo)
- def addError(self, testcase, rawexcinfo):
+ def addError(
+ self, testcase: "unittest.TestCase", rawexcinfo: "_SysExcInfoType"
+ ) -> None:
try:
if isinstance(rawexcinfo[1], exit.Exception):
exit(rawexcinfo[1].msg)
@@ -171,68 +249,82 @@ class TestCaseFunction(Function):
pass
self._addexcinfo(rawexcinfo)
- def addFailure(self, testcase, rawexcinfo):
+ def addFailure(
+ self, testcase: "unittest.TestCase", rawexcinfo: "_SysExcInfoType"
+ ) -> None:
self._addexcinfo(rawexcinfo)
- def addSkip(self, testcase, reason):
+ def addSkip(self, testcase: "unittest.TestCase", reason: str) -> None:
try:
skip(reason)
except skip.Exception:
self._store[skipped_by_mark_key] = True
self._addexcinfo(sys.exc_info())
- def addExpectedFailure(self, testcase, rawexcinfo, reason=""):
+ def addExpectedFailure(
+ self,
+ testcase: "unittest.TestCase",
+ rawexcinfo: "_SysExcInfoType",
+ reason: str = "",
+ ) -> None:
try:
xfail(str(reason))
except xfail.Exception:
self._addexcinfo(sys.exc_info())
- def addUnexpectedSuccess(self, testcase, reason=""):
+ def addUnexpectedSuccess(
+ self, testcase: "unittest.TestCase", reason: str = ""
+ ) -> None:
self._store[unexpectedsuccess_key] = reason
- def addSuccess(self, testcase):
+ def addSuccess(self, testcase: "unittest.TestCase") -> None:
pass
- def stopTest(self, testcase):
+ def stopTest(self, testcase: "unittest.TestCase") -> None:
pass
def _expecting_failure(self, test_method) -> bool:
"""Return True if the given unittest method (or the entire class) is marked
- with @expectedFailure"""
+ with @expectedFailure."""
expecting_failure_method = getattr(
test_method, "__unittest_expecting_failure__", False
)
expecting_failure_class = getattr(self, "__unittest_expecting_failure__", False)
return bool(expecting_failure_class or expecting_failure_method)
- def runtest(self):
+ def runtest(self) -> None:
from _pytest.debugging import maybe_wrap_pytest_function_for_tracing
+ assert self._testcase is not None
+
maybe_wrap_pytest_function_for_tracing(self)
- # let the unittest framework handle async functions
+ # Let the unittest framework handle async functions.
if is_async_function(self.obj):
- self._testcase(self)
+ # Type ignored because self acts as the TestResult, but is not actually one.
+ self._testcase(result=self) # type: ignore[arg-type]
else:
- # when --pdb is given, we want to postpone calling tearDown() otherwise
+ # When --pdb is given, we want to postpone calling tearDown() otherwise
# when entering the pdb prompt, tearDown() would have probably cleaned up
- # instance variables, which makes it difficult to debug
- # arguably we could always postpone tearDown(), but this changes the moment where the
+ # instance variables, which makes it difficult to debug.
+ # Arguably we could always postpone tearDown(), but this changes the moment where the
# TestCase instance interacts with the results object, so better to only do it
- # when absolutely needed
+ # when absolutely needed.
if self.config.getoption("usepdb") and not _is_skipped(self.obj):
self._explicit_tearDown = self._testcase.tearDown
setattr(self._testcase, "tearDown", lambda *args: None)
- # we need to update the actual bound method with self.obj, because
- # wrap_pytest_function_for_tracing replaces self.obj by a wrapper
+ # We need to update the actual bound method with self.obj, because
+ # wrap_pytest_function_for_tracing replaces self.obj by a wrapper.
setattr(self._testcase, self.name, self.obj)
try:
- self._testcase(result=self)
+ self._testcase(result=self) # type: ignore[arg-type]
finally:
delattr(self._testcase, self.name)
- def _prunetraceback(self, excinfo):
+ def _prunetraceback(
+ self, excinfo: _pytest._code.ExceptionInfo[BaseException]
+ ) -> None:
Function._prunetraceback(self, excinfo)
traceback = excinfo.traceback.filter(
lambda x: not x.frame.f_globals.get("__unittest")
@@ -242,7 +334,7 @@ class TestCaseFunction(Function):
@hookimpl(tryfirst=True)
-def pytest_runtest_makereport(item, call):
+def pytest_runtest_makereport(item: Item, call: CallInfo[None]) -> None:
if isinstance(item, TestCaseFunction):
if item._excinfo:
call.excinfo = item._excinfo.pop(0)
@@ -252,21 +344,26 @@ def pytest_runtest_makereport(item, call):
pass
unittest = sys.modules.get("unittest")
- if unittest and call.excinfo and call.excinfo.errisinstance(unittest.SkipTest):
- # let's substitute the excinfo with a pytest.skip one
- call2 = CallInfo.from_call(
- lambda: pytest.skip(str(call.excinfo.value)), call.when
+ if (
+ unittest
+ and call.excinfo
+ and isinstance(call.excinfo.value, unittest.SkipTest) # type: ignore[attr-defined]
+ ):
+ excinfo = call.excinfo
+ # Let's substitute the excinfo with a pytest.skip one.
+ call2 = CallInfo[None].from_call(
+ lambda: pytest.skip(str(excinfo.value)), call.when
)
call.excinfo = call2.excinfo
-# twisted trial support
+# Twisted trial support.
@hookimpl(hookwrapper=True)
-def pytest_runtest_protocol(item):
+def pytest_runtest_protocol(item: Item) -> Generator[None, None, None]:
if isinstance(item, TestCaseFunction) and "twisted.trial.unittest" in sys.modules:
- ut = sys.modules["twisted.python.failure"]
+ ut: Any = sys.modules["twisted.python.failure"]
Failure__init__ = ut.Failure.__init__
check_testcase_implements_trial_reporter()
@@ -293,7 +390,7 @@ def pytest_runtest_protocol(item):
yield
-def check_testcase_implements_trial_reporter(done=[]):
+def check_testcase_implements_trial_reporter(done: List[int] = []) -> None:
if done:
return
from zope.interface import classImplements
@@ -304,5 +401,5 @@ def check_testcase_implements_trial_reporter(done=[]):
def _is_skipped(obj) -> bool:
- """Return True if the given object has been marked with @unittest.skip"""
+ """Return True if the given object has been marked with @unittest.skip."""
return bool(getattr(obj, "__unittest_skip__", False))