Esempio n. 1
0
 def __init__(self, report_name, output_dir, logs_dir, naarad_dir):
   """
   :param report_name: used in the title of the front-end
   :param output_dir: directory where the report will be generated
   :param logs_dir: directory of where the logs will be collected
   :param naarad_dir: directory containing the naarad reports
   """
   self.name = report_name
   self.env = Environment(loader=FileSystemLoader(constants.WEB_RESOURCE_DIR))  # used to load html pages for Jinja2
   self.data_source = runtime.get_collector()
   self.report_info = _ReportInfo(output_dir, logs_dir, naarad_dir)
Esempio n. 2
0
 def __init__(self, report_name, output_dir, logs_dir, naarad_dir):
   """
   :param report_name: used in the title of the front-end
   :param output_dir: directory where the report will be generated
   :param logs_dir: directory of where the logs will be collected
   :param naarad_dir: directory containing the naarad reports
   """
   self.name = report_name
   self.env = Environment(loader=FileSystemLoader(constants.WEB_RESOURCE_DIR))  # used to load html pages for Jinja2
   self.data_source = runtime.get_collector()
   self.report_info = _ReportInfo(output_dir, logs_dir, naarad_dir)
Esempio n. 3
0
    def run(self):
        """
    This is the main executable function that will run the test
    """
        self._setup()
        failure_handler = FailureHandler(
            self.master_config.mapping.get("max_suite_failures_before_abort"))

        naarad_obj = Naarad()
        for config in self.configs:
            self._reset_tests()
            if not failure_handler.get_abort_status():
                config.result = constants.SKIPPED
                config.message += error_messages.CONFIG_ABORT
                self._skip_all_tests()
                logger.debug(
                    "Skipping " + config.name +
                    "due to too many setup_suite/teardown_suite failures")
            else:
                runtime.set_active_config(config)
                setup_fail = False
                if not self.master_config.mapping.get("no_perf", False):
                    config.naarad_id = naarad_obj.signal_start(
                        self.dynamic_config_module.naarad_config(
                            config.mapping))
                config.start_time = time.time()

                logger.info("Setting up configuration: " + config.name)
                try:
                    if hasattr(self.deployment_module, 'setup_suite'):
                        self.deployment_module.setup_suite()
                except BaseException:
                    config.result = constants.SKIPPED
                    config.message += error_messages.SETUP_SUITE_FAILED + traceback.format_exc(
                    )
                    self._skip_all_tests()
                    setup_fail = True
                    failure_handler.notify_failure()
                    logger.error(
                        "Aborting {0} due to setup_suite failure:\n{1}".format(
                            config.name, traceback.format_exc()))
                else:
                    logger.debug("Running tests for configuration: " +
                                 config.name)
                    self._execute_run(config, naarad_obj)
                    self._copy_logs()
                    if not self.master_config.mapping.get("no_perf", False):
                        naarad_obj.signal_stop(config.naarad_id)
                        self._execute_performance(naarad_obj)
                    self._execute_verification()

                logger.debug("Tearing down configuration: " + config.name)
                try:
                    if hasattr(self.deployment_module, 'teardown_suite'):
                        self.deployment_module.teardown_suite()
                    if not setup_fail:
                        failure_handler.notify_success()
                except BaseException:
                    config.message += error_messages.TEARDOWN_SUITE_FAILED + traceback.format_exc(
                    )
                    if not setup_fail:
                        failure_handler.notify_failure()
                    logger.error("{0} failed teardown_suite(). {1}".format(
                        config.name, traceback.format_exc()))
                config.end_time = time.time()
                logger.info("Execution of configuration: {0} complete".format(
                    config.name))

            tests = [test for test in self.tests if not isinstance(test, list)] +\
                  [individual_test for test in self.tests if isinstance(test, list) for individual_test in test]
            runtime.get_collector().collect(config, tests)
            # prints result to standard out - delete/comment out when things are working
            # self._print_debug()

        # analysis.generate_diff_reports()
        self.reporter.generate()

        if not self.master_config.mapping.get("no-display", False):
            self._display_results()
Esempio n. 4
0
  def run(self):
    """
    This is the main executable function that will run the test
    """
    self._setup()
    failure_handler = FailureHandler(self.master_config.mapping.get("max_suite_failures_before_abort"))

    naarad_obj = Naarad()
    for config in self.configs:
      config.mapping.iterkeys()
      self._reset_tests()
      if not failure_handler.get_abort_status():
        config.result = constants.SKIPPED
        config.message += error_messages.CONFIG_ABORT
        self._skip_all_tests()
        logger.debug("Skipping " + config.name + "due to too many setup_suite/teardown_suite failures")
      else:
        runtime.set_active_config(config)
        setup_fail = False
        if not self.master_config.mapping.get("no_perf", False):
          try:
            naarad_config_file = self.dynamic_config_module.naarad_config()
          except TypeError: # Support backwards compatability
            naarad_config_file = self.dynamic_config_module.naarad_config(config.mapping)
          config.naarad_id = naarad_obj.signal_start(naarad_config_file)
        config.start_time = time.time()

        logger.info("Setting up configuration: " + config.name)
        try:
          if hasattr(self.deployment_module, 'setup_suite'):
            self.deployment_module.setup_suite()
        except BaseException:
          config.result = constants.SKIPPED
          config.message += error_messages.SETUP_SUITE_FAILED + traceback.format_exc()
          self._skip_all_tests()
          setup_fail = True
          failure_handler.notify_failure()
          logger.error("Aborting {0} due to setup_suite failure:\n{1}".format(config.name, traceback.format_exc()))
        else:
          try:
            logger.debug("Running tests for configuration: " + config.name)
            self._execute_run(config, naarad_obj)
            logger.debug("Tearing down configuration: " + config.name)
          finally:
            try:
              if hasattr(self.deployment_module, 'teardown_suite'):
                self.deployment_module.teardown_suite()
              if not setup_fail:
                failure_handler.notify_success()
            except BaseException:
              config.message += error_messages.TEARDOWN_SUITE_FAILED + traceback.format_exc()
              if not setup_fail:
                failure_handler.notify_failure()
              logger.error("{0} failed teardown_suite(). {1}".format(config.name, traceback.format_exc()))
        finally:
          # kill all orphaned process
          for deployer in runtime.get_deployers():
            deployer.kill_all_process()

        config.end_time = time.time()
        logger.info("Execution of configuration: {0} complete".format(config.name))

      tests = [test for test in self.tests if not isinstance(test, list)] +\
            [individual_test for test in self.tests if isinstance(test, list) for individual_test in test]
      runtime.get_collector().collect(config, tests)
      # log results of tests so that it can be used easily via command-line
      self._log_results(tests)

    # analysis.generate_diff_reports()
    self.reporter.data_source.end_time = time.time()
    self.reporter.generate()
    if not self.master_config.mapping.get("no-display", False):
      self._display_results()
Esempio n. 5
0
  def run(self):
    """
    This is the main executable function that will run the test
    """
    self._setup()
    failure_handler = FailureHandler(self.master_config.mapping.get("max_suite_failures_before_abort"))

    naarad_obj = Naarad()
    for config in self.configs:
      config.mapping.iterkeys()
      self._reset_tests()
      if not failure_handler.get_abort_status():
        config.result = constants.SKIPPED
        config.message += error_messages.CONFIG_ABORT
        self._skip_all_tests()
        logger.debug("Skipping " + config.name + "due to too many setup_suite/teardown_suite failures")
      else:
        runtime.set_active_config(config)
        setup_fail = False
        if not self.master_config.mapping.get("no_perf", False):
          try:
            naarad_config_file = self.dynamic_config_module.naarad_config()
          except TypeError: # Support backwards compatability
            naarad_config_file = self.dynamic_config_module.naarad_config(config.mapping)
          config.naarad_id = naarad_obj.signal_start(naarad_config_file)
        config.start_time = time.time()

        logger.info("Setting up configuration: " + config.name)
        try:
          if hasattr(self.deployment_module, 'setup_suite'):
            self.deployment_module.setup_suite()
        except BaseException:
          config.result = constants.SKIPPED
          config.message += error_messages.SETUP_SUITE_FAILED + traceback.format_exc()
          self._skip_all_tests()
          setup_fail = True
          failure_handler.notify_failure()
          logger.error("Aborting {0} due to setup_suite failure:\n{1}".format(config.name, traceback.format_exc()))
        else:
          try:
            logger.debug("Running tests for configuration: " + config.name)
            self._execute_run(config, naarad_obj)
            logger.debug("Tearing down configuration: " + config.name)
          finally:
            try:
              if hasattr(self.deployment_module, 'teardown_suite'):
                self.deployment_module.teardown_suite()
              if not setup_fail:
                failure_handler.notify_success()
            except BaseException:
              config.message += error_messages.TEARDOWN_SUITE_FAILED + traceback.format_exc()
              if not setup_fail:
                failure_handler.notify_failure()
              logger.error("{0} failed teardown_suite(). {1}".format(config.name, traceback.format_exc()))
        finally:
          # kill all orphaned process
          for deployer in runtime.get_deployers():
            deployer.kill_all_process()

        config.end_time = time.time()
        logger.info("Execution of configuration: {0} complete".format(config.name))

      tests = [test for test in self.tests if not isinstance(test, list)] +\
            [individual_test for test in self.tests if isinstance(test, list) for individual_test in test]
      runtime.get_collector().collect(config, tests)
      # log results of tests so that it can be used easily via command-line
      self._log_results(tests)

    # analysis.generate_diff_reports()
    self.reporter.data_source.end_time = time.time()
    self.reporter.generate()
    if self.master_config.mapping.get("display", False) and not  self.master_config.mapping.get("junit_reporter", False):
      self._display_results()
Esempio n. 6
0
  def run(self):
    """
    This is the main executable function that will run the test
    """
    self._setup()
    failure_handler = FailureHandler(self.master_config.mapping.get("max_suite_failures_before_abort"))

    naarad_obj = Naarad()
    for config in self.configs:
      self._reset_tests()
      if not failure_handler.get_abort_status():
        config.result = constants.SKIPPED
        config.message += error_messages.CONFIG_ABORT
        self._skip_all_tests()
        logger.debug("Skipping " + config.name + "due to too many setup_suite/teardown_suite failures")
      else:
        runtime.set_active_config(config)
        setup_fail = False
        if not self.master_config.mapping.get("no_perf", False):
          config.naarad_id = naarad_obj.signal_start(self.dynamic_config_module.naarad_config(config.mapping))
        config.start_time = time.time()

        logger.info("Setting up configuration: " + config.name)
        try:
          if hasattr(self.deployment_module, 'setup_suite'):
            self.deployment_module.setup_suite()
        except BaseException:
          config.result = constants.SKIPPED
          config.message += error_messages.SETUP_SUITE_FAILED + traceback.format_exc()
          self._skip_all_tests()
          setup_fail = True
          failure_handler.notify_failure()
          logger.error("Aborting {0} due to setup_suite failure:\n{1}".format(config.name, traceback.format_exc()))
        else:
          logger.debug("Running tests for configuration: " + config.name)
          self._execute_run(config, naarad_obj)
          self._copy_logs()
          if not self.master_config.mapping.get("no_perf", False):
            naarad_obj.signal_stop(config.naarad_id)
            self._execute_performance(naarad_obj)
          self._execute_verification()

        logger.debug("Tearing down configuration: " + config.name)
        try:
          if hasattr(self.deployment_module, 'teardown_suite'):
            self.deployment_module.teardown_suite()
          if not setup_fail:
            failure_handler.notify_success()
        except BaseException:
          config.message += error_messages.TEARDOWN_SUITE_FAILED + traceback.format_exc()
          if not setup_fail:
            failure_handler.notify_failure()
          logger.error("{0} failed teardown_suite(). {1}".format(config.name, traceback.format_exc()))
        config.end_time = time.time()
        logger.info("Execution of configuration: {0} complete".format(config.name))

      tests = [test for test in self.tests if not isinstance(test, list)] +\
            [individual_test for test in self.tests if isinstance(test, list) for individual_test in test]
      runtime.get_collector().collect(config, tests)
      # prints result to standard out - delete/comment out when things are working
      # self._print_debug()

    # analysis.generate_diff_reports()
    self.reporter.generate()

    if not self.master_config.mapping.get("no-display", False):
      self._display_results()