def pytest_runtest_setup(item): # Check if skip or skipif are specified as pytest marks item._skipped_by_mark = False eval_skipif = MarkEvaluator(item, "skipif") if eval_skipif.istrue(): item._skipped_by_mark = True skip(eval_skipif.getexplanation()) for skip_info in item.iter_markers(name="skip"): item._skipped_by_mark = True if "reason" in skip_info.kwargs: skip(skip_info.kwargs["reason"]) elif skip_info.args: skip(skip_info.args[0]) else: skip("unconditional skip") item._evalxfail = MarkEvaluator(item, "xfail") check_xfail_no_run(item)
def pytest_runtest_setup(item): # Check if skip or skipif are specified as pytest marks item._skipped_by_mark = False skipif_info = item.keywords.get('skipif') if isinstance(skipif_info, (MarkInfo, MarkDecorator)): eval_skipif = MarkEvaluator(item, 'skipif') if eval_skipif.istrue(): item._skipped_by_mark = True skip(eval_skipif.getexplanation()) skip_info = item.keywords.get('skip') if isinstance(skip_info, (MarkInfo, MarkDecorator)): item._skipped_by_mark = True if 'reason' in skip_info.kwargs: skip(skip_info.kwargs['reason']) elif skip_info.args: skip(skip_info.args[0]) else: skip("unconditional skip") item._evalxfail = MarkEvaluator(item, 'xfail') check_xfail_no_run(item)