Exemple #1
0
    def __get_value(self, key, default):
        """
        Generic method to resolve value to be used during runtime for a particular setting/property.
        1. Attempt to retrieve value from the explicitly passed in kwargs during Runner initiation
        2. If value is still __undefined__, attempt to retrieve value from the config __IF__ config was provided to
           the Runner during the initiation
        3. If value is still __undefined__, will use default values
        :param key: STRING, property key aka features, owners, test_multithreading_limit etc
        :param default: DATA VALUE, value to default to aka None, False, True, 1 etc
        :return: DATA VALUE
        """
        value = Undefined  # we start with __undefined__, because None is a valid value
        source = "DEFAULTS"

        # if we have kwargs, attempt to retrieve value for the key
        if self.kwargs is not None:
            value = self.kwargs.get(key, Undefined)
            source = "KWARGS"

        # if value is still __undefined__ and config provided, will check the config for a value to use
        if value is Undefined and self.config is not None:
            source = "DEFAULTS"
            if key in self.config.config.options("runtime"):
                value = self.config.get_value(key)
                if value is not Undefined:
                    try:
                        value = ast.literal_eval(value)
                    except SyntaxError:
                        pass
                    source = "CONFIG @ {}".format(self.config.path)

        LogJunkie.debug("Setting: {setting} Source: {source}".format(
            setting=key, source=source))
        # if value is still __undefined__, will return default value
        return value if value is not Undefined else default
        def get_copy(value):

            try:
                return copy.deepcopy(value)
            except:
                LogJunkie.error("Failed to deepcopy: {}. Metrics may be missing in the HTML report.".format(value))
                LogJunkie.error(traceback.format_exc())
                return None
Exemple #3
0
 def suite_limit_reached(self):
     active = 0
     for suite, data in ParallelProcessor.__PARALLELS.items():
         if data["thread"].isAlive():
             active += 1
     if active >= self.__suite_limit:
         LogJunkie.debug("Suite limit: {}/{}".format(
             active, self.__suite_limit))
         return True
     return False
    def test_limit_reached(self, parallels):

        active = 0
        for parallel in parallels:
            if parallel.isAlive():
                active += 1
        if active >= self.__test_limit:
            LogJunkie.debug("Test limit: {}/{}".format(active,
                                                       self.__test_limit))
            return True
        return False
Exemple #5
0
    def run(self):
        parser = argparse.ArgumentParser(
            description='Run tests from command line',
            usage="tj run [OPTIONS]")

        parser.add_argument("-x",
                            "--suites",
                            nargs="+",
                            default=None,
                            help="Test Junkie will only run suites provided, "
                            "given that they are found in the SOURCE")

        parser.add_argument(
            "-v",
            "--verbose",
            action="store_true",
            default=False,
            help="Enables Test Junkie's logs for debugging purposes")

        parser.add_argument(
            "--config",
            type=str,
            default=Undefined,
            help=
            "Provide your own config FILE with settings for test execution.")

        CliUtils.add_standard_tj_args(parser)

        args = parser.parse_args(sys.argv[2:])

        if args.verbose:
            from test_junkie.debugger import LogJunkie
            LogJunkie.enable_logging(10)

        from test_junkie.cli.cli_runner import CliRunner
        try:
            tj = CliRunner(sources=args.sources,
                           ignore=[".git"],
                           suites=args.suites,
                           code_cov=args.code_cov,
                           cov_rcfile=args.cov_rcfile,
                           guess_root=args.guess_root,
                           config=args.config)
            tj.scan()
        except BadCliParameters as error:
            print("[{status}] {error}".format(
                status=CliUtils.format_color_string("ERROR", "red"),
                error=error))
            return
        tj.run_suites(args)
Exemple #6
0
 def test_limit_reached(self):
     active = 0
     for suite, info in list(ParallelProcessor.__PARALLELS.items()):
         for test in info["tests"]:
             if test["thread"].isAlive():
                 active += 1
             else:
                 # so we don't accumulate large number of stale data we don't need
                 ParallelProcessor.__PARALLELS[suite]["tests"].remove(test)
     if active >= self.__test_limit:
         LogJunkie.debug("Test limit: {}/{}".format(active,
                                                    self.__test_limit))
         return True
     return False
 def _passes_restriction():
     """
     If current suite does not have any active restrictions, we can run it
     :return: BOOLEAN
     """
     if ParallelProcessor.__PARALLELS.get(restriction,
                                          None) is not None:
         if ParallelProcessor.__PARALLELS[restriction][
                 "thread"].isAlive():
             LogJunkie.debug(
                 "Suite: {} can't run while: {} is running.".format(
                     suite.get_class_object(), restriction))
             return False
     return True
 def _build_reverse_restriction():
     """
     Bidirectional parallel restriction will be automatically added.
     If suite `A` is restricted to run when suite `B` is running - suite `B` will be automatically restricted
     to run when suite `A` is running
     :return: None
     """
     if restriction not in ParallelProcessor.__REVERSE_PARALLEL_RESTRICTIONS:
         ParallelProcessor.__REVERSE_PARALLEL_RESTRICTIONS.update(
             {restriction: [test.get_function_object()]})
     elif test.get_function_object(
     ) not in ParallelProcessor.__REVERSE_PARALLEL_RESTRICTIONS[
             restriction]:
         ParallelProcessor.__REVERSE_PARALLEL_RESTRICTIONS[
             restriction].append(test.get_function_object())
     else:
         return  # if nothing to add, return - to avoid logging
     LogJunkie.debug(
         "Added reverse test restriction! {} will not be processed while test: {} is running"
         .format(restriction, test.get_function_object()))
 def _passes_reverse_restriction():
     """
     If current suite is part of parallel restriction in another suite which is currently active, can't run it.
     :return: BOOLEAN
     """
     if suite.get_class_object(
     ) in ParallelProcessor.__REVERSE_PARALLEL_RESTRICTIONS:
         reverse_suites = ParallelProcessor.__REVERSE_PARALLEL_RESTRICTIONS[
             suite.get_class_object()]
         for reverse_suite in reverse_suites:
             if ParallelProcessor.__PARALLELS.get(reverse_suite,
                                                  None) is not None:
                 if ParallelProcessor.__PARALLELS[reverse_suite][
                         "thread"].isAlive():
                     LogJunkie.debug(
                         "Suite: {} can't run while: {} is running due to reverse restriction."
                         .format(suite.get_class_object(),
                                 reverse_suite))
                     return False
     return True
Exemple #10
0
    def __run_suite(self, suite):
        def before_group_rule_failed():
            for group, _result in self.__before_group_failure_records.items():
                if suite.get_class_object() in _result["definition"]["suites"]:
                    return _result["trace"]

        suite_start_time = time.time()
        unsuccessful_tests = None
        exception = Runner.__validate_suite_parameters(suite)

        if not exception:
            exception = before_group_rule_failed()
            if not exception:
                result = self.__group_rules.run_before_group(
                    suite, DecoratorType.BEFORE_GROUP)
                if result is not None:
                    self.__before_group_failure_records.update(result)
                    exception = result[list(result.keys())[0]]["trace"]

        if not suite.can_skip(
                self.__settings) and not self.__cancel and not exception:
            Runner.__process_event(event=Event.ON_CLASS_IN_PROGRESS,
                                   suite=suite)
            for suite_retry_attempt in range(1, suite.get_retry_limit() + 1):
                if suite_retry_attempt == 1 or suite.get_status(
                ) in SuiteCategory.ALL_UN_SUCCESSFUL:

                    for class_param in suite.get_parameters(
                            process_functions=True):
                        LogJunkie.debug("Running suite: {}".format(
                            suite.get_class_object()))
                        LogJunkie.debug(
                            "Suite Retry {}/{} with Param: {}".format(
                                suite_retry_attempt, suite.get_retry_limit(),
                                class_param))

                        before_class_error = Runner.__run_before_class(
                            suite, class_param)

                        if suite_retry_attempt > 1:
                            unsuccessful_tests = suite.get_unsuccessful_tests()
                            LogJunkie.debug(
                                "There are {} unsuccessful tests that need to be retried"
                                .format(len(unsuccessful_tests)))
                            if not unsuccessful_tests:
                                break
                            tests = unsuccessful_tests
                        else:
                            tests = list(suite.get_test_objects())

                        while tests:
                            for test in list(tests):

                                test_start_time = time.time(
                                )  # will use in case of a failure in context of this loop

                                if not self.__positive_skip_condition(test=test) and \
                                        Runner.__runnable_tags(test=test, tag_config=self.__settings.tags):

                                    if not test.is_parallelized():
                                        LogJunkie.debug(
                                            "Cant run test: {} in parallel with any other tests"
                                            .format(
                                                test.get_function_object()))
                                        ParallelProcessor.wait_currently_active_tests_to_finish(
                                        )

                                    bad_params = Runner.__validate_test_parameters(
                                        test)
                                    if bad_params is not None:
                                        tests.remove(test)
                                        test.metrics.update_metrics(
                                            status=TestCategory.IGNORE,
                                            start_time=test_start_time,
                                            exception=bad_params["exception"],
                                            formatted_traceback=bad_params[
                                                "trace"])
                                        Runner.__process_event(
                                            event=Event.ON_IGNORE,
                                            suite=suite,
                                            test=test,
                                            class_param=class_param,
                                            error=bad_params)
                                        continue

                                    while not self.__processor.test_qualifies(
                                            test):
                                        time.sleep(0.2)
                                        if test.get_priority() is None:
                                            continue

                                    for param in test.get_parameters(
                                            process_functions=True):
                                        if unsuccessful_tests is not None and \
                                                not test.is_qualified_for_retry(param, class_param=class_param):
                                            # If does not qualify with current parameter, will move to the next
                                            continue
                                        if ((self.__processor.
                                             test_multithreading()
                                             and param is None) or
                                            (self.__processor.
                                             test_multithreading() and
                                             test.parallelized_parameters()
                                             and param is not None)):

                                            while self.__processor.test_limit_reached(
                                            ):
                                                time.sleep(0.2)
                                            time.sleep(
                                                Limiter.get_test_throttling())
                                            self.__processor.run_test_in_a_thread(
                                                Runner.__run_test, suite, test,
                                                param, class_param,
                                                before_class_error,
                                                self.__cancel)
                                        else:
                                            Runner.__run_test(
                                                suite=suite,
                                                test=test,
                                                parameter=param,
                                                class_parameter=class_param,
                                                before_class_error=
                                                before_class_error,
                                                cancel=self.__cancel)
                                    tests.remove(test)

                                else:
                                    tests.remove(test)
                                    test.metrics.update_metrics(
                                        status=TestCategory.SKIP,
                                        start_time=test_start_time)
                                    Runner.__process_event(
                                        event=Event.ON_SKIP,
                                        suite=suite,
                                        test=test,
                                        class_param=class_param)
                        ParallelProcessor.wait_currently_active_tests_to_finish(
                        )
                        Runner.__run_after_class(suite, class_param)
                    suite.metrics.update_suite_metrics(
                        status=SuiteCategory.FAIL
                        if suite.has_unsuccessful_tests() else
                        SuiteCategory.SUCCESS,
                        start_time=suite_start_time)
            Runner.__process_event(event=Event.ON_CLASS_COMPLETE, suite=suite)
            after_group_failed = self.__group_rules.run_after_group(suite)
            if after_group_failed:
                event = Event.ON_AFTER_GROUP_FAIL if isinstance(
                    after_group_failed["exception"],
                    AssertionError) else Event.ON_AFTER_GROUP_ERROR
                Runner.__process_event(
                    event=event,
                    suite=suite,
                    error=after_group_failed["exception"],
                    formatted_traceback=after_group_failed["trace"])
        elif self.__cancel:
            suite.metrics.update_suite_metrics(status=SuiteCategory.CANCEL,
                                               start_time=suite_start_time)
            Runner.__process_event(event=Event.ON_CLASS_CANCEL, suite=suite)
        elif exception or before_group_rule_failed():
            suite.metrics.update_suite_metrics(status=SuiteCategory.IGNORE,
                                               start_time=suite_start_time,
                                               initiation_error=exception)
            Runner.__process_event(event=Event.ON_CLASS_IGNORE, suite=suite)
        else:
            suite.metrics.update_suite_metrics(status=SuiteCategory.SKIP,
                                               start_time=suite_start_time)
            Runner.__process_event(event=Event.ON_CLASS_SKIP, suite=suite)
Exemple #11
0
    def run(self, **kwargs):
        """
        Initiates the execution process that runs tests
        :return: None
        """
        self.__settings = Settings(runner_kwargs=self.__kwargs,
                                   run_kwargs=kwargs)
        initial_start_time = time.time()
        resource_monitor = None
        try:
            if self.__settings.monitor_resources:
                resource_monitor = ResourceMonitor()
                resource_monitor.start()
            self.__processor = ParallelProcessor(self.__settings)

            with suppressed_stdout(self.__settings.quiet):
                while self.__suites:
                    for suite in list(self.__suites):
                        suite_object = Builder.get_execution_roster().get(
                            suite, None)
                        if suite_object is not None:
                            if self.__processor.suite_multithreading(
                            ) and suite_object.is_parallelized():
                                while True:
                                    if self.__processor.suite_qualifies(
                                            suite_object):
                                        time.sleep(
                                            Limiter.get_suite_throttling())
                                        self.__executed_suites.append(
                                            suite_object)
                                        ParallelProcessor.run_suite_in_a_thread(
                                            self.__run_suite, suite_object)
                                        self.__suites.remove(suite)
                                        break
                                    elif suite_object.get_priority() is None:
                                        break
                                    else:
                                        time.sleep(1)
                            else:
                                if not suite_object.is_parallelized():
                                    LogJunkie.debug(
                                        "Cant run suite: {} in parallel with any other suites. Waiting for "
                                        "parallel suites to finish so I can run it by itself."
                                        .format(
                                            suite_object.get_class_object()))
                                    ParallelProcessor.wait_currently_active_suites_to_finish(
                                    )
                                self.__executed_suites.append(suite_object)
                                self.__run_suite(suite_object)
                                self.__suites.remove(suite)
                        else:
                            LogJunkie.warn(
                                "Suite: {} not found! Make sure that your input is correct. "
                                "If it is, make sure the use of Test Junkie's decorators "
                                "is correct.".format(suite))
                            self.__suites.remove(suite)
                    LogJunkie.debug("{} Suite(s) left in queue.".format(
                        len(self.__suites)))
                    time.sleep(0.2)

                ParallelProcessor.wait_currently_active_suites_to_finish()
        finally:
            if self.__settings.monitor_resources:
                resource_monitor.shutdown()

        runtime = time.time() - initial_start_time
        print("========== Test Junkie finished in {:0.2f} seconds ==========".
              format(runtime))
        aggregator = Aggregator(self.get_executed_suites())
        Aggregator.present_console_output(aggregator)
        if self.__settings.html_report:
            reporter = Reporter(
                monitoring_file=resource_monitor.get_file_path()
                if resource_monitor is not None else None,
                runtime=runtime,
                aggregator=aggregator,
                multi_threading_enabled=self.__processor.test_multithreading()
                or self.__processor.suite_multithreading())
            reporter.generate_html_report(self.__settings.html_report)
        XmlReporter.create_xml_report(write_file=self.__settings.xml_report,
                                      suites=self.get_executed_suites())
        if self.__settings.monitor_resources:
            resource_monitor.cleanup()
        return aggregator
    def create_xml_report(write_file, suites):

        def __update_tag_stats(tag, status):

            tag.set("tests", str(int(suite.get("tests")) + 1))
            if status == TestCategory.SUCCESS:
                tag.set("passed", str(int(tag.get("passed")) + 1))
            else:
                tag.set("failures", str(int(tag.get("failures")) + 1))
            return tag

        if write_file is not None:
            try:
                import os
                from xml.etree.ElementTree import ElementTree, Element, SubElement
                import xml
                if not os.path.exists(write_file):
                    request = Element("root")
                    ElementTree(request).write(write_file)

                xml_file = xml.etree.ElementTree.parse(write_file)
                root = xml_file.getroot()

                for suite_object in suites:

                    test_suite = suite_object.get_class_name()
                    tests = suite_object.get_test_objects()

                    for test_object in tests:

                        test_name = test_object.get_function_name()
                        test_metrics = test_object.metrics.get_metrics()

                        for class_param, class_param_data in test_metrics.items():
                            for param, param_data in class_param_data.items():

                                test_status = param_data["status"]
                                if test_status != TestCategory.SUCCESS:
                                    test_status = "failure"
                                suite_found = False

                                for suite in root.iter("testsuite"):
                                    suite_found = suite.attrib["name"] == test_suite
                                    if suite_found:
                                        __update_tag_stats(suite, test_status)
                                        test = Element("testcase", name=str(test_name), status=str(test_status))
                                        if test_status == "failure":
                                            failure = Element("failure", type="failure")
                                            test.append(failure)
                                        suite.append(test)
                                        ElementTree(root).write(write_file)
                                        break

                                if not suite_found:
                                    suite = SubElement(root, "testsuite", name=test_suite,
                                                       tests="0", passed="0", failures="0")
                                    __update_tag_stats(suite, test_status)
                                    test = SubElement(suite, "testcase", name=str(test_name), status=str(test_status))
                                    if test_status == "failure":
                                        SubElement(test, "failure", type="failure")
                                    ElementTree(root).write(write_file)
            except:
                LogJunkie.error(traceback.format_exc())
    def test_qualifies(self, suite, test):
        def _build_reverse_restriction():
            """
            Bidirectional parallel restriction will be automatically added.
            If suite `A` is restricted to run when suite `B` is running - suite `B` will be automatically restricted
            to run when suite `A` is running
            :return: None
            """
            if restriction not in ParallelProcessor.__REVERSE_PARALLEL_RESTRICTIONS:
                ParallelProcessor.__REVERSE_PARALLEL_RESTRICTIONS.update(
                    {restriction: [test.get_function_object()]})
            elif test.get_function_object(
            ) not in ParallelProcessor.__REVERSE_PARALLEL_RESTRICTIONS[
                    restriction]:
                ParallelProcessor.__REVERSE_PARALLEL_RESTRICTIONS[
                    restriction].append(test.get_function_object())
            else:
                return  # if nothing to add, return - to avoid logging
            LogJunkie.debug(
                "Added reverse test restriction! {} will not be processed while test: {} is running"
                .format(restriction, test.get_function_object()))

        def _passes_restriction():
            """
            If current suite does not have any active restrictions, we can run it
            :return: BOOLEAN
            """
            for class_object, suite_mapping in list(
                    ParallelProcessor.__PARALLELS.items()):
                for test_mapping in suite_mapping["tests"]:
                    if test_mapping["test"].get_function_object(
                    ) in test.get_parallel_restrictions():
                        if test_mapping["thread"].isAlive():
                            return False
            return True

        def _passes_reverse_restriction():
            """
            If current suite is part of parallel restriction in another suite which is currently active, can't run it.
            :return: BOOLEAN
            """
            if test.get_function_object(
            ) in ParallelProcessor.__REVERSE_PARALLEL_RESTRICTIONS:
                reverse_tests = ParallelProcessor.__REVERSE_PARALLEL_RESTRICTIONS[
                    test.get_function_object()]
                for class_object, suite_mapping in list(
                        ParallelProcessor.__PARALLELS.items()):
                    for test_mapping in suite_mapping["tests"]:
                        if test_mapping["test"].get_function_object(
                        ) in reverse_tests:
                            if test_mapping["thread"].isAlive():
                                return False
            return True

        if test.get_parallel_restrictions():
            for restriction in test.get_parallel_restrictions():
                if not inspect.isfunction(
                        restriction) and not inspect.ismethod(restriction):
                    raise Exception(
                        "Parallel test restrictions must be function objects. "
                        "Instead test: {} was restricted by: {}".format(
                            test.get_function_name(), restriction))
                _build_reverse_restriction()
                if not _passes_restriction():
                    return False
        if not _passes_reverse_restriction():
            return False

        if self.__test_limit is not None:
            while ParallelProcessor.get_active_test_parallels_count(
                    suite) >= self.__test_limit:
                LogJunkie.debug(
                    "Test level Thread limit reached! Active tests: {}/{}".
                    format(
                        ParallelProcessor.get_active_test_parallels_count(
                            suite), self.__test_limit))
                time.sleep(5)
        return True
Exemple #14
0
import pprint

from test_junkie.debugger import LogJunkie
from tests.QualityManager import QualityManager

LogJunkie.enable_logging(10)
from test_junkie.runner import Runner
from tests.junkie_suites.Retry import Retries

runner = Runner([Retries])
runner.run()
results = runner.get_executed_suites()
tests = results[0].get_test_objects()
pprint.pprint(results[0].metrics.get_metrics())
for test in results[0].get_test_objects():
    print(test.get_function_name())
    pprint.pprint(test.metrics.get_metrics())


def test_class_metrics():

    metrics = results[0].metrics.get_metrics()
    QualityManager.check_class_metrics(metrics,
                                       expected_status="fail",
                                       expected_retry_count=3)


def test_retry_on_exception():

    tested = False
    for test in tests:
    def __init__(self, settings):

        # self.__test_limit = int(kwargs.get("test_multithreading_limit", 1))
        self.__test_limit = settings.test_thread_limit
        if self.__test_limit == 0 or self.__test_limit is None:
            LogJunkie.warn("Thread limit for tests cannot be 0 or None, "
                           "falling back to limit of 1 thread per test case.")
            self.__test_limit = 1

        # self.__suite_limit = int(kwargs.get("suite_multithreading_limit", 1))
        self.__suite_limit = settings.suite_thread_limit
        if self.__suite_limit == 0 or self.__suite_limit is None:
            LogJunkie.warn("Thread limit for suites cannot be 0 or None, "
                           "falling back to limit of 1 thread per test suite.")
            self.__suite_limit = 1

        LogJunkie.debug(
            "=======================Parallel Processor Settings============================="
        )
        LogJunkie.debug(">> Suite level multi-threading enabled: {}".format(
            self.suite_multithreading()))
        LogJunkie.debug(">> Suite level multi-threading limit: {}".format(
            self.__suite_limit))
        LogJunkie.debug(">> Test level multi-threading enabled: {}".format(
            self.test_multithreading()))
        LogJunkie.debug(">> Test level multi-threading limit: {}".format(
            self.__test_limit))
        LogJunkie.debug(
            "==============================================================================="
        )
Exemple #16
0
    def audit(self):
        parser = argparse.ArgumentParser(
            description=
            'Scan and display aggregated and/or filtered test information',
            usage="""tj audit [COMMAND] [OPTIONS]

Aggregate, pivot, and display data about your tests.

Commands:
suites\t\t Pivot test information from suite's perspective
features\t Pivot test information from feature's perspective
components\t Pivot test information from component's perspective
tags\t\t Pivot test information from tag's perspective
owners\t\t Pivot test information from owner's perspective

usage: tj audit [COMMAND] [OPTIONS]
""")
        parser.add_argument('command', help='command to run')

        parser.add_argument(
            "--by-components",
            action="store_true",
            default=False,
            help="Present aggregated data broken down by components")

        parser.add_argument(
            "--by-features",
            action="store_true",
            default=False,
            help="Present aggregated data broken down by features")

        parser.add_argument(
            "--no-rules",
            action="store_true",
            default=False,
            help="Aggregate data only for suites that do not have any rules set"
        )

        parser.add_argument(
            "--no-listeners",
            action="store_true",
            default=False,
            help=
            "Aggregate data only for suites that do not have any event listeners set"
        )

        parser.add_argument(
            "--no-suite-retries",
            action="store_true",
            default=False,
            help="Aggregate data only for suites that do not have retries set")

        parser.add_argument(
            "--no-test-retries",
            action="store_true",
            default=False,
            help="Aggregate data only for tests that do not have retries set")

        parser.add_argument(
            "--no-suite-meta",
            action="store_true",
            default=False,
            help=
            "Aggregate data only for suites that do not have any meta information set"
        )

        parser.add_argument(
            "--no-test-meta",
            action="store_true",
            default=False,
            help=
            "Aggregate data only for tests that do not have any meta information set"
        )

        parser.add_argument(
            "--no-owners",
            action="store_true",
            default=False,
            help=
            "Aggregate data only for tests that do not have any owners defined"
        )

        parser.add_argument(
            "--no-features",
            action="store_true",
            default=False,
            help=
            "Aggregate data only for suites that do not have features defined")

        parser.add_argument(
            "--no-components",
            action="store_true",
            default=False,
            help=
            "Aggregate data only for tests that do not have any components defined"
        )

        parser.add_argument(
            "--no-tags",
            action="store_true",
            default=False,
            help="Aggregate data only for tests that do not have tags defined")

        parser.add_argument("-x",
                            "--suites",
                            nargs="+",
                            default=None,
                            help="Test Junkie will only run suites provided, "
                            "given that they are found in the SOURCE")

        parser.add_argument(
            "-v",
            "--verbose",
            action="store_true",
            default=False,
            help="Enables Test Junkie's logs for debugging purposes")

        CliUtils.add_standard_tj_args(parser, audit=True)

        if len(sys.argv) >= 3:
            args = parser.parse_args(sys.argv[2:])
            command = args.command
            if command not in [
                    "suites", "features", "components", "tags", "owners"
            ]:
                print(
                    "[{status}]\t\'{command}\' is not a test-junkie command\n".
                    format(status=CliUtils.format_color_string(value="ERROR",
                                                               color="red"),
                           command=command))
                parser.print_help()
                exit(120)
            else:
                if args.verbose:
                    from test_junkie.debugger import LogJunkie
                    LogJunkie.enable_logging(10)

                from test_junkie.cli.cli_runner import CliRunner
                try:
                    tj = CliRunner(sources=args.sources,
                                   ignore=[".git"],
                                   suites=args.suites,
                                   guess_root=args.guess_root)
                    tj.scan()
                except BadCliParameters as error:
                    print("[{status}] {error}".format(
                        status=CliUtils.format_color_string("ERROR", "red"),
                        error=error))
                    return
                aggregator = CliAudit(suites=tj.suites, args=args)
                aggregator.aggregate()
                aggregator.print_results()
                return
        else:
            print("[{status}]\tDude, what do you want to audit?".format(
                status=CliUtils.format_color_string(value="ERROR",
                                                    color="red")))
        parser.print_help()
Exemple #17
0
    def __print_settings(self):

        LogJunkie.debug("============= Runtime Settings =============")
        LogJunkie.debug("Test Thread Limit: {value}:({type})".format(
            value=self.test_thread_limit, type=type(self.test_thread_limit)))
        LogJunkie.debug("Suite Thread Limit: {value}".format(
            value=self.suite_thread_limit))
        LogJunkie.debug("Features: {value}".format(value=self.features))
        LogJunkie.debug("Components: {value}".format(value=self.components))
        LogJunkie.debug("Owners: {value}".format(value=self.owners))
        LogJunkie.debug("Tests: {value}".format(value=self.tests))
        LogJunkie.debug("Tags: {value}".format(value=self.tags))
        LogJunkie.debug(
            "Monitor Resources: {value}".format(value=self.monitor_resources))
        LogJunkie.debug("HTML Report: {value}:({type})".format(
            value=self.html_report, type=type(self.html_report)))
        LogJunkie.debug("XML Report: {value}:({type})".format(
            value=self.xml_report, type=type(self.xml_report)))
        LogJunkie.debug("Quiet: {value}:({type})".format(value=self.quiet,
                                                         type=type(
                                                             self.quiet)))
        LogJunkie.debug("============================================")
from test_junkie.debugger import LogJunkie
from test_junkie.errors import BadParameters
from test_junkie.runner import Runner

LogJunkie.enable_logging(10)

LogJunkie.debug("1")
LogJunkie.info("2")
LogJunkie.warn("3")
LogJunkie.error("4")
LogJunkie.disable_logging()


def test_bad_runner_initiation1():
    try:
        Runner(suites=None)
        raise AssertionError(
            "Must have raised exception because bad args were passed in")
    except Exception as error:
        assert isinstance(error,
                          BadParameters), "Type of exception is incorrect"
Exemple #19
0
 def cleanup(self):
     try:
         os.remove(self.file_path)
     except:
         LogJunkie.error(traceback.format_exc())
Exemple #20
0
    def __run_test(suite,
                   test,
                   parameter=None,
                   class_parameter=None,
                   before_class_error=None,
                   cancel=False):
        def run_before_test():
            try:
                if not test.skip_before_test_rule():
                    suite.get_rules().before_test(test=copy.deepcopy(test))
                if not test.skip_before_test():
                    before_test_error = Runner.__process_decorator(
                        decorator_type=DecoratorType.BEFORE_TEST,
                        suite=suite,
                        class_parameter=class_parameter,
                        test=test,
                        parameter=parameter)
                    if before_test_error is not None:  # updating **test** metrics (no decorator passed in)
                        process_failure(before_test_error, pre_processed=True)
                        return False
                return True
            except Exception as before_test_error:
                process_failure(before_test_error,
                                decorator=DecoratorType.BEFORE_TEST)
                process_failure(
                    before_test_error
                )  # updating **test** metrics (no decorator passed in)
                return False

        def process_failure(error, pre_processed=False, decorator=None):
            _runtime = time.time(
            ) - start_time  # start time defined in the outside scope before each decorated func
            if not isinstance(error, TestJunkieExecutionError):
                if pre_processed:
                    trace = error.message if sys.version_info[0] < 3 else str(
                        error)
                else:
                    trace = traceback.format_exc()
                __category, __event = TestCategory.ERROR, Event.ON_ERROR
                if isinstance(error, AssertionError):
                    __category, __event = TestCategory.FAIL, Event.ON_FAILURE
                test.metrics.update_metrics(status=__category,
                                            start_time=test_start_time,
                                            param=parameter,
                                            class_param=class_parameter,
                                            exception=error,
                                            formatted_traceback=trace,
                                            runtime=_runtime,
                                            decorator=decorator)
                if decorator is None:
                    Runner.__process_event(event=__event,
                                           suite=suite,
                                           test=test,
                                           error=error,
                                           class_param=class_parameter,
                                           param=parameter,
                                           formatted_traceback=trace)
                else:
                    suite.metrics.update_decorator_metrics(
                        decorator, start_time, error, trace)
            else:
                raise error

        def run_after_test(_record_test_failure=True):
            try:
                # Running after test functions
                if not test.skip_after_test():
                    after_test_error = Runner.__process_decorator(
                        decorator_type=DecoratorType.AFTER_TEST,
                        suite=suite,
                        class_parameter=class_parameter,
                        test=test,
                        parameter=parameter)
                    if after_test_error is not None:  # updating **test** metrics (no decorator passed in)
                        process_failure(after_test_error, pre_processed=True)
                        return False
                if not test.skip_after_test_rule():
                    suite.get_rules().after_test(test=copy.deepcopy(test))
                return True
            except Exception as after_test_error:
                if _record_test_failure:
                    process_failure(after_test_error,
                                    decorator=DecoratorType.AFTER_TEST)
                    process_failure(
                        after_test_error
                    )  # updating **test** metrics (no decorator passed in)
                return False

        if not test.accepts_suite_parameters():
            # for reporting purposes, so reports are properly nested
            class_parameter = None

        test_start_time = time.time()
        if before_class_error is not None or cancel:
            if not test.accepts_suite_parameters():
                if None in test.suite.metrics.get_metrics()[DecoratorType.BEFORE_CLASS]["exceptions"] \
                        and test.get_status(parameter, class_parameter) is not None:
                    return  # fixes ticket: #19
                elif test.get_number_of_actual_retries(
                        parameter,
                        class_parameter) >= test.suite.get_retry_limit():
                    return  # fixes ticket: #27
            _status = TestCategory.IGNORE if not cancel else TestCategory.CANCEL
            _event = Event.ON_IGNORE if not cancel else Event.ON_CANCEL
            test.metrics.update_metrics(
                status=_status,
                start_time=test_start_time,
                param=parameter,
                class_param=class_parameter,
                exception=before_class_error["exception"],
                formatted_traceback=before_class_error["traceback"])
            Runner.__process_event(
                event=_event,
                error=before_class_error["exception"],
                suite=suite,
                test=test,
                class_param=class_parameter,
                param=parameter,
                formatted_traceback=before_class_error["traceback"])
            return

        status = test.get_status(parameter, class_parameter)
        if not test.accepts_suite_parameters() and status is not None:
            """
            making sure that we do not run tests with suite parameters if suite parameters are not accepted in the
            test signature. But we still want to run all other parameters and tests without any params.
            Also we want to make sure we honor the suite level retries
            """
            if status != TestCategory.SUCCESS:  # test already ran and was unsuccessful
                # if it did not reach its max retry limit, will rerun it again
                max_retry_allowed = suite.get_retry_limit(
                ) * test.get_retry_limit()
                actual_retries = test.get_number_of_actual_retries(
                    parameter, class_parameter)
                if actual_retries >= max_retry_allowed:
                    return
            else:
                return

        Runner.__process_event(event=Event.ON_IN_PROGRESS,
                               suite=suite,
                               test=test,
                               class_param=class_parameter,
                               param=parameter)
        try:
            for retry_attempt in range(1, test.get_retry_limit() + 1):
                if test.is_qualified_for_retry(parameter,
                                               class_param=class_parameter):
                    LogJunkie.debug(
                        "\n===============Running test==================\n"
                        "Test Case: {}\n"
                        "Test Suite: {}\n"
                        "Test Parameter: {}\n"
                        "Class Parameter: {}\n"
                        "Retry Attempt: {}/{}\n"
                        "=============================================".format(
                            test.get_function_name(), suite.get_class_name(),
                            parameter, class_parameter, retry_attempt,
                            test.get_retry_limit()))
                    record_test_failure = True
                    try:
                        start_time = time.time()  # before test start time
                        if run_before_test(
                        ) is False:  # if before test failed, moving on without running the test
                            continue  # everything recorded at this point in the metrics and flow is solid
                        # Running actual test
                        start_time = time.time()  # test start time
                        Runner.__process_decorator(
                            decorator_type=DecoratorType.TEST_CASE,
                            suite=suite,
                            test=test,
                            parameter=parameter,
                            class_parameter=class_parameter)
                        runtime = time.time() - start_time
                    except Exception as test_error:
                        runtime = time.time() - start_time
                        process_failure(test_error)
                        record_test_failure = False  # already recorded the failure just above this
                    start_time = time.time()  # after test start time
                    if run_after_test(record_test_failure
                                      ) is True:  # if did not fail, test is OK
                        if record_test_failure:  # Test failed and failure was already recorded thus can't pass it
                            test.metrics.update_metrics(
                                status=TestCategory.SUCCESS,
                                start_time=None,
                                param=parameter,
                                class_param=class_parameter,
                                runtime=runtime)
                            Runner.__process_event(event=Event.ON_SUCCESS,
                                                   suite=suite,
                                                   test=test,
                                                   class_param=class_parameter,
                                                   param=parameter)
                            return
        finally:
            Runner.__process_event(event=Event.ON_COMPLETE,
                                   suite=suite,
                                   test=test,
                                   class_param=class_parameter,
                                   param=parameter)
    def suite_qualifies(self, suite):
        def _build_reverse_restriction():
            """
            Bidirectional parallel restriction will be automatically added.
            If suite `A` is restricted to run when suite `B` is running - suite `B` will be automatically restricted
            to run when suite `A` is running
            :return: None
            """
            if restriction not in ParallelProcessor.__REVERSE_PARALLEL_RESTRICTIONS:
                ParallelProcessor.__REVERSE_PARALLEL_RESTRICTIONS.update(
                    {restriction: [suite.get_class_object()]})
            elif suite.get_class_object(
            ) not in ParallelProcessor.__REVERSE_PARALLEL_RESTRICTIONS[
                    restriction]:
                ParallelProcessor.__REVERSE_PARALLEL_RESTRICTIONS[
                    restriction].append(suite.get_class_object())
            else:
                return  # if nothing to add, return - to avoid logging
            LogJunkie.debug(
                "Added reverse restriction! {} will not be processed while {} is running"
                .format(restriction, suite.get_class_object()))

        def _passes_restriction():
            """
            If current suite does not have any active restrictions, we can run it
            :return: BOOLEAN
            """
            if ParallelProcessor.__PARALLELS.get(restriction,
                                                 None) is not None:
                if ParallelProcessor.__PARALLELS[restriction][
                        "thread"].isAlive():
                    LogJunkie.debug(
                        "Suite: {} can't run while: {} is running.".format(
                            suite.get_class_object(), restriction))
                    return False
            return True

        def _passes_reverse_restriction():
            """
            If current suite is part of parallel restriction in another suite which is currently active, can't run it.
            :return: BOOLEAN
            """
            if suite.get_class_object(
            ) in ParallelProcessor.__REVERSE_PARALLEL_RESTRICTIONS:
                reverse_suites = ParallelProcessor.__REVERSE_PARALLEL_RESTRICTIONS[
                    suite.get_class_object()]
                for reverse_suite in reverse_suites:
                    if ParallelProcessor.__PARALLELS.get(reverse_suite,
                                                         None) is not None:
                        if ParallelProcessor.__PARALLELS[reverse_suite][
                                "thread"].isAlive():
                            LogJunkie.debug(
                                "Suite: {} can't run while: {} is running due to reverse restriction."
                                .format(suite.get_class_object(),
                                        reverse_suite))
                            return False
            return True

        if suite.get_parallel_restrictions():
            for restriction in suite.get_parallel_restrictions():
                if not inspect.isclass(restriction):
                    raise Exception(
                        "Parallel suite restrictions must be class objects. "
                        "Instead suite: {} was restricted by a function: {}".
                        format(suite.get_class_object(), restriction))
                _build_reverse_restriction()
                if not _passes_restriction():
                    return False
        if not _passes_reverse_restriction():
            return False

        if self.__suite_limit is not None:
            while ParallelProcessor.get_active_parallels_count(
            ) >= self.__suite_limit:
                LogJunkie.debug(
                    "Suite level Thread limit reached! Active suites: {}/{}".
                    format(ParallelProcessor.get_active_parallels_count(),
                           self.__suite_limit))
                time.sleep(5)
        return True
Exemple #22
0
    def build_suite_definitions(decorated_function, decorator_kwargs,
                                decorator_type):
        from test_junkie.decorators import DecoratorType
        from test_junkie.debugger import LogJunkie
        from test_junkie.objects import SuiteObject

        if decorator_type == DecoratorType.TEST_CASE:
            Builder.__TEST_ID += 1
        elif decorator_type == DecoratorType.TEST_SUITE:
            Builder.__SUITE_ID += 1

        _function_name = None
        _class_name = None
        if inspect.isfunction(decorated_function):
            decorator_kwargs.update({
                "testjunkie_test_id": Builder.__TEST_ID,
                "testjunkie_suite_id": Builder.__SUITE_ID
            })
            Builder.__validate_test_kwargs(decorator_kwargs,
                                           decorated_function)
            _function_name = decorated_function.__name__
            if Builder.__FILE_CONTROL is not None \
                    and Builder.__FILE_CONTROL != inspect.getsourcefile(decorated_function):
                Builder.__set_current_suite_object_defaults()
            Builder.__FILE_CONTROL = inspect.getsourcefile(decorated_function)
        else:
            if decorated_function is not None:
                Builder.__validate_suite_kwargs(decorator_kwargs)
                _class_name = decorated_function.__name__
                Builder.__CURRENT_SUITE_OBJECT[
                    "class_object"] = decorated_function
                Builder.__CURRENT_SUITE_OBJECT[
                    "class_retry"] = decorator_kwargs.get("retry", 1)
                Builder.__CURRENT_SUITE_OBJECT[
                    "class_skip"] = decorator_kwargs.get("skip", False)
                Builder.__CURRENT_SUITE_OBJECT[
                    "class_meta"] = decorator_kwargs.get("meta", {})
                Builder.__CURRENT_SUITE_OBJECT[
                    "test_listener"] = decorator_kwargs.get(
                        "listener", Listener)
                Builder.__CURRENT_SUITE_OBJECT[
                    "test_rules"] = decorator_kwargs.get("rules", Rules)
                Builder.__CURRENT_SUITE_OBJECT[
                    "class_parameters"] = decorator_kwargs.get(
                        "parameters", [None])
                Builder.__CURRENT_SUITE_OBJECT[
                    "parallelized"] = decorator_kwargs.get(
                        "parallelized", True)
                decorator_kwargs.update(
                    {"testjunkie_suite_id": Builder.__SUITE_ID})
                Builder.__CURRENT_SUITE_OBJECT[
                    "decorator_kwargs"] = decorator_kwargs

        if Builder.__CURRENT_SUITE_OBJECT is None:
            Builder.__set_current_suite_object_defaults()
        elif Builder.__CURRENT_SUITE_OBJECT.get("class_name", None) is None:
            Builder.__CURRENT_SUITE_OBJECT["class_name"] = _class_name

        if _function_name is not None:
            Builder.__CURRENT_SUITE_OBJECT["suite_definition"][
                decorator_type].append({
                    "decorated_function": decorated_function,
                    "decorator_kwargs": decorator_kwargs
                })
            LogJunkie.debug(
                "=======================Suite Definition Updated============================="
            )
            LogJunkie.debug("Function: {}".format(_function_name))
            LogJunkie.debug("Decorator Type: {}".format(decorator_type))
            LogJunkie.debug("Decorator Arguments: {}".format(decorator_kwargs))
            LogJunkie.debug("Function object: {}".format(decorated_function))
            LogJunkie.debug(
                "============================================================================"
            )
        else:
            LogJunkie.debug(
                "=======================Suite Definition Finished============================="
            )
            LogJunkie.debug("Suite: {}".format(_class_name))
            LogJunkie.debug("Suite Definition: {}".format(
                Builder.__CURRENT_SUITE_OBJECT))
            Builder.__EXECUTION_ROSTER.update({
                decorated_function:
                SuiteObject(Builder.__CURRENT_SUITE_OBJECT)
            })
            Builder.__set_current_suite_object_defaults()
            LogJunkie.debug(">> Definition reset for next suite <<")
            LogJunkie.debug(
                "=============================================================================\n\n"
            )