Best Python code snippet using pytest
test_skipping.py
Source:test_skipping.py
...144 def test_skipif_markeval_namespace(self, pytester: Pytester) -> None:145 pytester.makeconftest(146 """147 import pytest148 def pytest_markeval_namespace():149 return {"color": "green"}150 """151 )152 p = pytester.makepyfile(153 """154 import pytest155 @pytest.mark.skipif("color == 'green'")156 def test_1():157 assert True158 @pytest.mark.skipif("color == 'red'")159 def test_2():160 assert True161 """162 )163 res = pytester.runpytest(p)164 assert res.ret == 0165 res.stdout.fnmatch_lines(["*1 skipped*"])166 res.stdout.fnmatch_lines(["*1 passed*"])167 def test_skipif_markeval_namespace_multiple(self, pytester: Pytester) -> None:168 """Keys defined by ``pytest_markeval_namespace()`` in nested plugins override top-level ones."""169 root = pytester.mkdir("root")170 root.joinpath("__init__.py").touch()171 root.joinpath("conftest.py").write_text(172 textwrap.dedent(173 """\174 import pytest175 def pytest_markeval_namespace():176 return {"arg": "root"}177 """178 )179 )180 root.joinpath("test_root.py").write_text(181 textwrap.dedent(182 """\183 import pytest184 @pytest.mark.skipif("arg == 'root'")185 def test_root():186 assert False187 """188 )189 )190 foo = root.joinpath("foo")191 foo.mkdir()192 foo.joinpath("__init__.py").touch()193 foo.joinpath("conftest.py").write_text(194 textwrap.dedent(195 """\196 import pytest197 def pytest_markeval_namespace():198 return {"arg": "foo"}199 """200 )201 )202 foo.joinpath("test_foo.py").write_text(203 textwrap.dedent(204 """\205 import pytest206 @pytest.mark.skipif("arg == 'foo'")207 def test_foo():208 assert False209 """210 )211 )212 bar = root.joinpath("bar")213 bar.mkdir()214 bar.joinpath("__init__.py").touch()215 bar.joinpath("conftest.py").write_text(216 textwrap.dedent(217 """\218 import pytest219 def pytest_markeval_namespace():220 return {"arg": "bar"}221 """222 )223 )224 bar.joinpath("test_bar.py").write_text(225 textwrap.dedent(226 """\227 import pytest228 @pytest.mark.skipif("arg == 'bar'")229 def test_bar():230 assert False231 """232 )233 )234 reprec = pytester.inline_run("-vs", "--capture=no")235 reprec.assertoutcome(skipped=3)236 def test_skipif_markeval_namespace_ValueError(self, pytester: Pytester) -> None:237 pytester.makeconftest(238 """239 import pytest240 def pytest_markeval_namespace():241 return True242 """243 )244 p = pytester.makepyfile(245 """246 import pytest247 @pytest.mark.skipif("color == 'green'")248 def test_1():249 assert True250 """251 )252 res = pytester.runpytest(p)253 assert res.ret == 1254 res.stdout.fnmatch_lines(255 [256 "*ValueError: pytest_markeval_namespace() needs to return a dict, got True*"257 ]258 )259class TestXFail:260 @pytest.mark.parametrize("strict", [True, False])261 def test_xfail_simple(self, pytester: Pytester, strict: bool) -> None:262 item = pytester.getitem(263 """264 import pytest265 @pytest.mark.xfail(strict=%s)266 def test_func():267 assert 0268 """269 % strict270 )271 reports = runtestprotocol(item, log=False)272 assert len(reports) == 3273 callreport = reports[1]274 assert callreport.skipped275 assert callreport.wasxfail == ""276 def test_xfail_xpassed(self, pytester: Pytester) -> None:277 item = pytester.getitem(278 """279 import pytest280 @pytest.mark.xfail(reason="this is an xfail")281 def test_func():282 assert 1283 """284 )285 reports = runtestprotocol(item, log=False)286 assert len(reports) == 3287 callreport = reports[1]288 assert callreport.passed289 assert callreport.wasxfail == "this is an xfail"290 def test_xfail_using_platform(self, pytester: Pytester) -> None:291 """Verify that platform can be used with xfail statements."""292 item = pytester.getitem(293 """294 import pytest295 @pytest.mark.xfail("platform.platform() == platform.platform()")296 def test_func():297 assert 0298 """299 )300 reports = runtestprotocol(item, log=False)301 assert len(reports) == 3302 callreport = reports[1]303 assert callreport.wasxfail304 def test_xfail_xpassed_strict(self, pytester: Pytester) -> None:305 item = pytester.getitem(306 """307 import pytest308 @pytest.mark.xfail(strict=True, reason="nope")309 def test_func():310 assert 1311 """312 )313 reports = runtestprotocol(item, log=False)314 assert len(reports) == 3315 callreport = reports[1]316 assert callreport.failed317 assert str(callreport.longrepr) == "[XPASS(strict)] nope"318 assert not hasattr(callreport, "wasxfail")319 def test_xfail_run_anyway(self, pytester: Pytester) -> None:320 pytester.makepyfile(321 """322 import pytest323 @pytest.mark.xfail324 def test_func():325 assert 0326 def test_func2():327 pytest.xfail("hello")328 """329 )330 result = pytester.runpytest("--runxfail")331 result.stdout.fnmatch_lines(332 ["*def test_func():*", "*assert 0*", "*1 failed*1 pass*"]333 )334 @pytest.mark.parametrize(335 "test_input,expected",336 [337 (338 ["-rs"],339 ["SKIPPED [1] test_sample.py:2: unconditional skip", "*1 skipped*"],340 ),341 (342 ["-rs", "--runxfail"],343 ["SKIPPED [1] test_sample.py:2: unconditional skip", "*1 skipped*"],344 ),345 ],346 )347 def test_xfail_run_with_skip_mark(348 self, pytester: Pytester, test_input, expected349 ) -> None:350 pytester.makepyfile(351 test_sample="""352 import pytest353 @pytest.mark.skip354 def test_skip_location() -> None:355 assert 0356 """357 )358 result = pytester.runpytest(*test_input)359 result.stdout.fnmatch_lines(expected)360 def test_xfail_evalfalse_but_fails(self, pytester: Pytester) -> None:361 item = pytester.getitem(362 """363 import pytest364 @pytest.mark.xfail('False')365 def test_func():366 assert 0367 """368 )369 reports = runtestprotocol(item, log=False)370 callreport = reports[1]371 assert callreport.failed372 assert not hasattr(callreport, "wasxfail")373 assert "xfail" in callreport.keywords374 def test_xfail_not_report_default(self, pytester: Pytester) -> None:375 p = pytester.makepyfile(376 test_one="""377 import pytest378 @pytest.mark.xfail379 def test_this():380 assert 0381 """382 )383 pytester.runpytest(p, "-v")384 # result.stdout.fnmatch_lines([385 # "*HINT*use*-r*"386 # ])387 def test_xfail_not_run_xfail_reporting(self, pytester: Pytester) -> None:388 p = pytester.makepyfile(389 test_one="""390 import pytest391 @pytest.mark.xfail(run=False, reason="noway")392 def test_this():393 assert 0394 @pytest.mark.xfail("True", run=False)395 def test_this_true():396 assert 0397 @pytest.mark.xfail("False", run=False, reason="huh")398 def test_this_false():399 assert 1400 """401 )402 result = pytester.runpytest(p, "-rx")403 result.stdout.fnmatch_lines(404 [405 "*test_one*test_this*",406 "*NOTRUN*noway",407 "*test_one*test_this_true*",408 "*NOTRUN*condition:*True*",409 "*1 passed*",410 ]411 )412 def test_xfail_not_run_no_setup_run(self, pytester: Pytester) -> None:413 p = pytester.makepyfile(414 test_one="""415 import pytest416 @pytest.mark.xfail(run=False, reason="hello")417 def test_this():418 assert 0419 def setup_module(mod):420 raise ValueError(42)421 """422 )423 result = pytester.runpytest(p, "-rx")424 result.stdout.fnmatch_lines(425 ["*test_one*test_this*", "*NOTRUN*hello", "*1 xfailed*"]426 )427 def test_xfail_xpass(self, pytester: Pytester) -> None:428 p = pytester.makepyfile(429 test_one="""430 import pytest431 @pytest.mark.xfail432 def test_that():433 assert 1434 """435 )436 result = pytester.runpytest(p, "-rX")437 result.stdout.fnmatch_lines(["*XPASS*test_that*", "*1 xpassed*"])438 assert result.ret == 0439 def test_xfail_imperative(self, pytester: Pytester) -> None:440 p = pytester.makepyfile(441 """442 import pytest443 def test_this():444 pytest.xfail("hello")445 """446 )447 result = pytester.runpytest(p)448 result.stdout.fnmatch_lines(["*1 xfailed*"])449 result = pytester.runpytest(p, "-rx")450 result.stdout.fnmatch_lines(["*XFAIL*test_this*", "*reason:*hello*"])451 result = pytester.runpytest(p, "--runxfail")452 result.stdout.fnmatch_lines(["*1 pass*"])453 def test_xfail_imperative_in_setup_function(self, pytester: Pytester) -> None:454 p = pytester.makepyfile(455 """456 import pytest457 def setup_function(function):458 pytest.xfail("hello")459 def test_this():460 assert 0461 """462 )463 result = pytester.runpytest(p)464 result.stdout.fnmatch_lines(["*1 xfailed*"])465 result = pytester.runpytest(p, "-rx")466 result.stdout.fnmatch_lines(["*XFAIL*test_this*", "*reason:*hello*"])467 result = pytester.runpytest(p, "--runxfail")468 result.stdout.fnmatch_lines(469 """470 *def test_this*471 *1 fail*472 """473 )474 def xtest_dynamic_xfail_set_during_setup(self, pytester: Pytester) -> None:475 p = pytester.makepyfile(476 """477 import pytest478 def setup_function(function):479 pytest.mark.xfail(function)480 def test_this():481 assert 0482 def test_that():483 assert 1484 """485 )486 result = pytester.runpytest(p, "-rxX")487 result.stdout.fnmatch_lines(["*XFAIL*test_this*", "*XPASS*test_that*"])488 def test_dynamic_xfail_no_run(self, pytester: Pytester) -> None:489 p = pytester.makepyfile(490 """491 import pytest492 @pytest.fixture493 def arg(request):494 request.applymarker(pytest.mark.xfail(run=False))495 def test_this(arg):496 assert 0497 """498 )499 result = pytester.runpytest(p, "-rxX")500 result.stdout.fnmatch_lines(["*XFAIL*test_this*", "*NOTRUN*"])501 def test_dynamic_xfail_set_during_funcarg_setup(self, pytester: Pytester) -> None:502 p = pytester.makepyfile(503 """504 import pytest505 @pytest.fixture506 def arg(request):507 request.applymarker(pytest.mark.xfail)508 def test_this2(arg):509 assert 0510 """511 )512 result = pytester.runpytest(p)513 result.stdout.fnmatch_lines(["*1 xfailed*"])514 def test_dynamic_xfail_set_during_runtest_failed(self, pytester: Pytester) -> None:515 # Issue #7486.516 p = pytester.makepyfile(517 """518 import pytest519 def test_this(request):520 request.node.add_marker(pytest.mark.xfail(reason="xfail"))521 assert 0522 """523 )524 result = pytester.runpytest(p)525 result.assert_outcomes(xfailed=1)526 def test_dynamic_xfail_set_during_runtest_passed_strict(527 self, pytester: Pytester528 ) -> None:529 # Issue #7486.530 p = pytester.makepyfile(531 """532 import pytest533 def test_this(request):534 request.node.add_marker(pytest.mark.xfail(reason="xfail", strict=True))535 """536 )537 result = pytester.runpytest(p)538 result.assert_outcomes(failed=1)539 @pytest.mark.parametrize(540 "expected, actual, matchline",541 [542 ("TypeError", "TypeError", "*1 xfailed*"),543 ("(AttributeError, TypeError)", "TypeError", "*1 xfailed*"),544 ("TypeError", "IndexError", "*1 failed*"),545 ("(AttributeError, TypeError)", "IndexError", "*1 failed*"),546 ],547 )548 def test_xfail_raises(549 self, expected, actual, matchline, pytester: Pytester550 ) -> None:551 p = pytester.makepyfile(552 """553 import pytest554 @pytest.mark.xfail(raises=%s)555 def test_raises():556 raise %s()557 """558 % (expected, actual)559 )560 result = pytester.runpytest(p)561 result.stdout.fnmatch_lines([matchline])562 def test_strict_sanity(self, pytester: Pytester) -> None:563 """Sanity check for xfail(strict=True): a failing test should behave564 exactly like a normal xfail."""565 p = pytester.makepyfile(566 """567 import pytest568 @pytest.mark.xfail(reason='unsupported feature', strict=True)569 def test_foo():570 assert 0571 """572 )573 result = pytester.runpytest(p, "-rxX")574 result.stdout.fnmatch_lines(["*XFAIL*", "*unsupported feature*"])575 assert result.ret == 0576 @pytest.mark.parametrize("strict", [True, False])577 def test_strict_xfail(self, pytester: Pytester, strict: bool) -> None:578 p = pytester.makepyfile(579 """580 import pytest581 @pytest.mark.xfail(reason='unsupported feature', strict=%s)582 def test_foo():583 with open('foo_executed', 'w'): pass # make sure test executes584 """585 % strict586 )587 result = pytester.runpytest(p, "-rxX")588 if strict:589 result.stdout.fnmatch_lines(590 ["*test_foo*", "*XPASS(strict)*unsupported feature*"]591 )592 else:593 result.stdout.fnmatch_lines(594 [595 "*test_strict_xfail*",596 "XPASS test_strict_xfail.py::test_foo unsupported feature",597 ]598 )599 assert result.ret == (1 if strict else 0)600 assert pytester.path.joinpath("foo_executed").exists()601 @pytest.mark.parametrize("strict", [True, False])602 def test_strict_xfail_condition(self, pytester: Pytester, strict: bool) -> None:603 p = pytester.makepyfile(604 """605 import pytest606 @pytest.mark.xfail(False, reason='unsupported feature', strict=%s)607 def test_foo():608 pass609 """610 % strict611 )612 result = pytester.runpytest(p, "-rxX")613 result.stdout.fnmatch_lines(["*1 passed*"])614 assert result.ret == 0615 @pytest.mark.parametrize("strict", [True, False])616 def test_xfail_condition_keyword(self, pytester: Pytester, strict: bool) -> None:617 p = pytester.makepyfile(618 """619 import pytest620 @pytest.mark.xfail(condition=False, reason='unsupported feature', strict=%s)621 def test_foo():622 pass623 """624 % strict625 )626 result = pytester.runpytest(p, "-rxX")627 result.stdout.fnmatch_lines(["*1 passed*"])628 assert result.ret == 0629 @pytest.mark.parametrize("strict_val", ["true", "false"])630 def test_strict_xfail_default_from_file(631 self, pytester: Pytester, strict_val632 ) -> None:633 pytester.makeini(634 """635 [pytest]636 xfail_strict = %s637 """638 % strict_val639 )640 p = pytester.makepyfile(641 """642 import pytest643 @pytest.mark.xfail(reason='unsupported feature')644 def test_foo():645 pass646 """647 )648 result = pytester.runpytest(p, "-rxX")649 strict = strict_val == "true"650 result.stdout.fnmatch_lines(["*1 failed*" if strict else "*1 xpassed*"])651 assert result.ret == (1 if strict else 0)652 def test_xfail_markeval_namespace(self, pytester: Pytester) -> None:653 pytester.makeconftest(654 """655 import pytest656 def pytest_markeval_namespace():657 return {"color": "green"}658 """659 )660 p = pytester.makepyfile(661 """662 import pytest663 @pytest.mark.xfail("color == 'green'")664 def test_1():665 assert False666 @pytest.mark.xfail("color == 'red'")667 def test_2():668 assert False669 """670 )...
skipping.py
Source:skipping.py
...84 "platform": platform,85 "config": item.config,86 }87 for dictionary in reversed(88 item.ihook.pytest_markeval_namespace(config=item.config)89 ):90 if not isinstance(dictionary, Mapping):91 raise ValueError(92 "pytest_markeval_namespace() needs to return a dict, got {!r}".format(93 dictionary94 )95 )96 globals_.update(dictionary)97 if hasattr(item, "obj"):98 globals_.update(item.obj.__globals__) # type: ignore[attr-defined]99 try:100 filename = f"<{mark.name} condition>"101 condition_code = compile(condition, filename, "eval")102 result = eval(condition_code, globals_)103 except SyntaxError as exc:104 msglines = [105 "Error evaluating %r condition" % mark.name,106 " " + condition,...
Looking for an in-depth tutorial around pytest? LambdaTest covers the detailed pytest tutorial that has everything related to the pytest, from setting up the pytest framework to automation testing. Delve deeper into pytest testing by exploring advanced use cases like parallel testing, pytest fixtures, parameterization, executing multiple test cases from a single file, and more.
Skim our below pytest tutorial playlist to get started with automation testing using the pytest framework.
https://www.youtube.com/playlist?list=PLZMWkkQEwOPlcGgDmHl8KkXKeLF83XlrP
Get 100 minutes of automation test minutes FREE!!