Exemplo n.º 1
0
    def flush_result_data(self):
        file_path = self.conf.get('file_path', consts.DEFAULT_OUTPUT_FILE)

        res = utils.read_json_from_file(file_path).get('result')
        res.extend(self.result)

        data = {'status': 0, 'result': res}
        utils.write_json_to_file(file_path, data)
Exemplo n.º 2
0
    def pre_start(self, args, **kwargs):
        atexit.register(self.atexit_handler)
        task_id = getattr(args, 'task_id')
        self.task_id = task_id if task_id else str(uuid.uuid4())
        self._set_log()
        try:
            output_config = utils.parse_ini_file(config_file)
        except Exception:
            # all error will be ignore, the default value is {}
            output_config = {}

        self._init_output_config(output_config)
        self._set_output_config(output_config, args.output_file)
        LOG.debug('Output configuration is: %s', output_config)

        self._set_dispatchers(output_config)

        # update dispatcher list
        if 'file' in output_config['DEFAULT']['dispatcher']:
            result = {'status': 0, 'result': {}}
            utils.write_json_to_file(args.output_file, result)

        total_start_time = time.time()
        parser = TaskParser(args.inputfile[0])
        if args.suite:
            # 1.parse suite, return suite_params info
            task_files, task_args, task_args_fnames = \
                parser.parse_suite()
        else:
            task_files = [parser.path]
            task_args = [args.task_args]
            task_args_fnames = [args.task_args_file]

            LOG.debug("task_files:%s, task_args:%s, task_args_fnames:%s",
                      task_files, task_args, task_args_fnames)

        if args.parse_only:
            sys.exit(0)

        return task_files, task_args, task_args_fnames, parser
Exemplo n.º 3
0
 def flush_result_data(self, data):
     utils.write_json_to_file(self.target, data)
Exemplo n.º 4
0
    def start(self, args, **kwargs):
        """Start a benchmark scenario."""

        atexit.register(self.atexit_handler)

        task_id = getattr(args, 'task_id')
        self.task_id = task_id if task_id else str(uuid.uuid4())

        self._set_log()

        try:
            output_config = utils.parse_ini_file(config_file)
        except Exception:
            # all error will be ignore, the default value is {}
            output_config = {}

        self._init_output_config(output_config)
        self._set_output_config(output_config, args.output_file)
        LOG.debug('Output configuration is: %s', output_config)

        self._set_dispatchers(output_config)

        # update dispatcher list
        if 'file' in output_config['DEFAULT']['dispatcher']:
            result = {'status': 0, 'result': {}}
            utils.write_json_to_file(args.output_file, result)

        total_start_time = time.time()
        parser = TaskParser(args.inputfile[0])

        if args.suite:
            # 1.parse suite, return suite_params info
            task_files, task_args, task_args_fnames = \
                parser.parse_suite()
        else:
            task_files = [parser.path]
            task_args = [args.task_args]
            task_args_fnames = [args.task_args_file]

        LOG.info("\ntask_files:%s, \ntask_args:%s, \ntask_args_fnames:%s",
                 task_files, task_args, task_args_fnames)

        if args.parse_only:
            sys.exit(0)

        testcases = {}
        # parse task_files
        for i in range(0, len(task_files)):
            one_task_start_time = time.time()
            parser.path = task_files[i]
            scenarios, run_in_parallel, meet_precondition, contexts = \
                parser.parse_task(self.task_id, task_args[i],
                                  task_args_fnames[i])

            self.contexts.extend(contexts)

            if not meet_precondition:
                LOG.info("meet_precondition is %s, please check envrionment",
                         meet_precondition)
                continue

            case_name = os.path.splitext(os.path.basename(task_files[i]))[0]
            try:
                data = self._run(scenarios, run_in_parallel, args.output_file)
            except KeyboardInterrupt:
                raise
            except Exception:
                LOG.exception("Running test case %s failed!", case_name)
                testcases[case_name] = {'criteria': 'FAIL', 'tc_data': []}
            else:
                testcases[case_name] = {'criteria': 'PASS', 'tc_data': data}

            if args.keep_deploy:
                # keep deployment, forget about stack
                # (hide it for exit handler)
                self.contexts = []
            else:
                for context in self.contexts[::-1]:
                    context.undeploy()
                self.contexts = []
            one_task_end_time = time.time()
            LOG.info("task %s finished in %d secs", task_files[i],
                     one_task_end_time - one_task_start_time)

        result = self._get_format_result(testcases)

        self._do_output(output_config, result)
        self._generate_reporting(result)

        total_end_time = time.time()
        LOG.info("total finished in %d secs",
                 total_end_time - total_start_time)

        scenario = scenarios[0]
        print("To generate report execute => yardstick report generate ",
              scenario['task_id'], scenario['tc'])

        print("Done, exiting")
        return result
Exemplo n.º 5
0
 def _write_error_data(self, error):
     data = {'status': 2, 'result': str(error)}
     write_json_to_file(self.output_file, data)
Exemplo n.º 6
0
    def start(self, args, **kwargs):  # pylint: disable=unused-argument
        """Start a benchmark scenario."""

        atexit.register(self.atexit_handler)

        task_id = getattr(args, 'task_id')
        self.task_id = task_id if task_id else str(uuid.uuid4())

        self._set_log()

        try:
            output_config = utils.parse_ini_file(CONF_FILE)
        except Exception:  # pylint: disable=broad-except
            # all error will be ignore, the default value is {}
            output_config = {}

        self._init_output_config(output_config)
        self._set_output_config(output_config, args.output_file)
        LOG.debug('Output configuration is: %s', output_config)

        self._set_dispatchers(output_config)

        # update dispatcher list
        if 'file' in output_config['DEFAULT']['dispatcher']:
            result = {'status': 0, 'result': {}}
            utils.write_json_to_file(args.output_file, result)

        total_start_time = time.time()
        parser = TaskParser(args.inputfile[0])

        if args.suite:
            # 1.parse suite, return suite_params info
            task_files, task_args, task_args_fnames = parser.parse_suite()
        else:
            task_files = [parser.path]
            task_args = [args.task_args]
            task_args_fnames = [args.task_args_file]

        LOG.debug("task_files:%s, task_args:%s, task_args_fnames:%s",
                  task_files, task_args, task_args_fnames)

        if args.parse_only:
            sys.exit(0)

        testcases = {}
        tasks = self._parse_tasks(parser, task_files, args, task_args,
                                  task_args_fnames)

        # Execute task files.
        for i, _ in enumerate(task_files):
            one_task_start_time = time.time()
            self.contexts.extend(tasks[i]['contexts'])
            if not tasks[i]['meet_precondition']:
                LOG.info('"meet_precondition" is %s, please check environment',
                         tasks[i]['meet_precondition'])
                continue

            try:
                success, data = self._run(tasks[i]['scenarios'],
                                          tasks[i]['run_in_parallel'],
                                          output_config)
            except KeyboardInterrupt:
                raise
            except Exception:  # pylint: disable=broad-except
                LOG.error('Testcase: "%s" FAILED!!!',
                          tasks[i]['case_name'],
                          exc_info=True)
                testcases[tasks[i]['case_name']] = {
                    'criteria': 'FAIL',
                    'tc_data': []
                }
            else:
                if success:
                    LOG.info('Testcase: "%s" SUCCESS!!!',
                             tasks[i]['case_name'])
                    testcases[tasks[i]['case_name']] = {
                        'criteria': 'PASS',
                        'tc_data': data
                    }
                else:
                    LOG.error('Testcase: "%s" FAILED!!!',
                              tasks[i]['case_name'],
                              exc_info=True)
                    testcases[tasks[i]['case_name']] = {
                        'criteria': 'FAIL',
                        'tc_data': data
                    }

            if args.keep_deploy:
                # keep deployment, forget about stack
                # (hide it for exit handler)
                self.contexts = []
            else:
                for context in self.contexts[::-1]:
                    context.undeploy()
                self.contexts = []
            one_task_end_time = time.time()
            LOG.info("Task %s finished in %d secs", task_files[i],
                     one_task_end_time - one_task_start_time)

        result = self._get_format_result(testcases)

        self._do_output(output_config, result)
        self._generate_reporting(result)

        total_end_time = time.time()
        LOG.info("Total finished in %d secs",
                 total_end_time - total_start_time)

        LOG.info(
            'To generate report, execute command "yardstick report '
            'generate %s <YAML_NAME>"', self.task_id)
        LOG.info("Task ALL DONE, exiting")
        return result
Exemplo n.º 7
0
 def _finish(self):
     result = read_json_from_file(self.output_file).get('result')
     data = {'status': 1, 'result': result}
     write_json_to_file(self.output_file, data)
Exemplo n.º 8
0
 def _init_result_file(self):
     data = {'status': 0, 'result': []}
     write_json_to_file(self.output_file, data)