Example #1
0
    def get_load(self):
        """
        Helper method to read load specification

        :return:
        """
        prov_type = self.engine.config.get(Provisioning.PROV, None)

        ensure_is_dict(self.execution, ScenarioExecutor.THRPT, prov_type)
        throughput = self.execution[ScenarioExecutor.THRPT].get(prov_type, 0)

        ensure_is_dict(self.execution, ScenarioExecutor.CONCURR, prov_type)
        concurrency = self.execution[ScenarioExecutor.CONCURR].get(prov_type, 0)

        iterations = self.execution.get("iterations", None)

        ramp_up = self.execution.get(ScenarioExecutor.RAMP_UP, None)
        steps = self.execution.get(ScenarioExecutor.STEPS, None)
        hold = dehumanize_time(self.execution.get(ScenarioExecutor.HOLD_FOR, 0))
        if ramp_up is None:
            ramp_up = None
            duration = hold
        else:
            ramp_up = dehumanize_time(ramp_up)
            duration = hold + ramp_up

        if duration and not iterations:
            iterations = 0  # which means infinite

        res = namedtuple("LoadSpec",
                         ('concurrency', "throughput", 'ramp_up', 'hold', 'iterations', 'duration', 'steps'))
        return res(concurrency=concurrency, ramp_up=ramp_up,
                   throughput=throughput, hold=hold, iterations=iterations,
                   duration=duration, steps=steps)
Example #2
0
    def startup(self):
        args = [self.tool.tool_path]
        load = self.get_load()

        if load.iterations:
            args += ['--reps', str(load.iterations)]
        elif load.hold:
            hold_for = ceil(dehumanize_time(load.hold))
            args += ['--time', '%sS' % hold_for]
        else:
            raise TaurusConfigError("Siege: You must specify either 'hold-for' or 'iterations'")

        think_time = self.scenario.get_think_time()
        if think_time:
            args += ['--delay', str(dehumanize_time(think_time))]
        else:
            args += ['--benchmark']

        load_concurrency = load.concurrency
        args += ['--concurrent', str(load_concurrency)]
        self.reader.concurrency = load_concurrency

        args += ['--file', self.__url_name]

        for key, val in iteritems(self.scenario.get_headers()):
            args += ['--header', "%s: %s" % (key, val)]

        self.env.set({"SIEGERC": self.__rc_name})
        self.process = self._execute(args)
Example #3
0
    def __gen_task(self):
        task = self.gen_method_definition("generated_task", ['self'])

        think_time = dehumanize_time(self.scenario.get_think_time())
        global_headers = self.scenario.get_headers()
        if not self.scenario.get("keepalive", True):
            global_headers['Connection'] = 'close'

        for req in self.scenario.get_requests():
            if not isinstance(req, HTTPRequest):
                msg = "Locust script generator doesn't support '%s' blocks, skipping"
                self.log.warning(msg, req.NAME)
                continue

            method = req.method.lower()
            if method not in ('get', 'delete', 'head', 'options', 'path', 'put', 'post'):
                raise TaurusConfigError("Wrong Locust request type: %s" % method)

            timeout = req.priority_option('timeout', default='30s')

            self.__gen_check(method, req, task, dehumanize_time(timeout), global_headers)

            if req.get_think_time():
                task.append(self.gen_statement("sleep(%s)" % dehumanize_time(req.get_think_time())))
            else:
                if think_time:
                    task.append(self.gen_statement("sleep(%s)" % think_time))
            task.append(self.gen_new_line())
        return task
Example #4
0
    def prepare(self):
        """
        Read options for uploading, check that they're sane
        """
        super(BlazeMeterUploader, self).prepare()
        self.client.address = self.settings.get("address", self.client.address)
        self.client.data_address = self.settings.get("data-address", self.client.data_address)
        self.client.timeout = dehumanize_time(self.settings.get("timeout", self.client.timeout))
        self.send_interval = dehumanize_time(self.settings.get("send-interval", self.send_interval))
        self.browser_open = self.settings.get("browser-open", self.browser_open)
        token = self.settings.get("token", "")
        if not token:
            self.log.warning("No BlazeMeter API key provided, will upload anonymously")
        self.client.token = token

        self.client.active_session_id = self.parameters.get("session-id", None)
        self.client.test_id = self.parameters.get("test-id", None)
        self.client.user_id = self.parameters.get("user-id", None)
        self.client.data_signature = self.parameters.get("signature", None)

        if not self.client.test_id:
            try:
                self.client.ping()  # to check connectivity and auth
            except HTTPError:
                self.log.error("Cannot reach online results storage, maybe the address/token is wrong")
                raise

            self.__get_test_id(token)

        self.sess_name = self.parameters.get("report-name", self.settings.get("report-name", self.sess_name))
        if self.sess_name == 'ask' and sys.stdin.isatty():
            self.sess_name = r_input("Please enter report-name: ")
Example #5
0
    def prepare(self):
        """
        Read options for uploading, check that they're sane
        """
        super(BlazeMeterUploader, self).prepare()
        self.client.address = self.settings.get("address", self.client.address)
        self.client.data_address = self.settings.get("data-address", self.client.data_address)
        self.client.timeout = dehumanize_time(self.settings.get("timeout", self.client.timeout))
        self.send_interval = dehumanize_time(self.settings.get("send-interval", self.send_interval))
        self.browser_open = self.settings.get("browser-open", self.browser_open)
        token = self.settings.get("token", "")
        if not token:
            self.log.warning("No BlazeMeter API key provided, will upload anonymously")
        self.client.token = token

        self.client.active_session_id = self.parameters.get("session-id", None)
        self.client.test_id = self.parameters.get("test-id", None)
        self.client.user_id = self.parameters.get("user-id", None)
        self.client.data_signature = self.parameters.get("signature", None)

        if not self.client.test_id:
            test_name = self.parameters.get("test", "Taurus Test")  # TODO: provide a way to put datetime into test name
            try:
                self.client.ping()  # to check connectivity and auth
                if token:
                    self.test_id = self.client.test_by_name(test_name, {"type": "external"}, self.engine.config, [])
            except HTTPError:
                self.log.error("Cannot reach online results storage, maybe the address/token is wrong")
                raise
Example #6
0
    def startup(self):
        args = [self.tool_path]
        load = self.get_load()

        if load.iterations:
            args += ['--reps', str(load.iterations)]
        elif load.hold:
            hold_for = ceil(dehumanize_time(load.hold))
            args += ['--time', '%sS' % hold_for]
        else:
            raise ValueError("You must specify either 'hold-for' or 'iterations' for siege")

        if self.scenario.get('think-time'):
            think_time = dehumanize_time(self.scenario.get('think-time'))
            args += ['--delay', str(think_time)]
        else:
            args += ['--benchmark']

        load_concurrency = load.concurrency
        args += ['--concurrent', str(load_concurrency)]
        self.reader.concurrency = load_concurrency

        args += ['--file', self.__url_name]

        for key, val in iteritems(self.scenario.get_headers()):
            args += ['--header', "%s: %s" % (key, val)]

        env = BetterDict()
        env.merge(dict(environ))
        env.merge({"SIEGERC": self.__rc_name})
        self.start_time = time.time()

        self.process = shell_exec(args, stdout=self.__out, stderr=self.__err, env=env)
Example #7
0
    def startup(self):
        script_dir = get_full_path(self.script, step_up=1)
        script_file = os.path.basename(self.script)
        cmdline = self.get_launch_cmdline(
            "run",
            script_file,
            "--reporters", "taurus",
            "--reporter-taurus-filename", self.report_file,
            "--suppress-exit-code", "--insecure",
        )

        scenario = self.get_scenario()
        timeout = scenario.get('timeout', None)
        if timeout is not None:
            cmdline += ["--timeout-request", str(int(dehumanize_time(timeout) * 1000))]

        think = scenario.get_think_time()
        if think is not None:
            cmdline += ["--delay-request", str(int(dehumanize_time(think) * 1000))]

        cmdline += self._dump_vars("globals")
        cmdline += self._dump_vars("environment")

        load = self.get_load()
        if load.iterations:
            cmdline += ['--iteration-count', str(load.iterations)]

        # TODO: allow running several collections like directory, see https://github.com/postmanlabs/newman/issues/871
        # TODO: support hold-for, probably by having own runner
        # if load.hold:
        #    cmdline += ['--hold-for', str(load.hold)]

        self.process = self._execute(cmdline, cwd=script_dir)
Example #8
0
    def __gen_task(self):
        task = self.gen_method_definition("generated_task", ['self'])

        think_time = dehumanize_time(self.scenario.get('think-time', None))
        timeout = dehumanize_time(self.scenario.get("timeout", 30))
        global_headers = self.scenario.get("headers", None)

        for req in self.scenario.get_requests():
            method = req.method.lower()
            if method not in ('get', 'delete', 'head', 'options', 'path', 'put', 'post'):
                raise RuntimeError("Wrong Locust request type: %s" % method)

            if req.timeout:
                local_timeout = dehumanize_time(req.timeout)
            else:
                local_timeout = timeout
            self.__gen_check(method, req, task, local_timeout, global_headers)

            if req.think_time:
                task.append(self.gen_statement("sleep(%s)" % dehumanize_time(req.think_time)))
            else:
                if think_time:
                    task.append(self.gen_statement("sleep(%s)" % think_time))
            task.append(self.gen_new_line())
        return task
Example #9
0
    def startup(self):
        load = self.get_load()

        cmdline = [self.molotov.tool_path]

        if load.concurrency is not None:
            cmdline += ['--workers', str(load.concurrency)]

        if 'processes' in self.execution:
            cmdline += ['--processes', str(self.execution['processes'])]

        # TODO: autosizing as `concurrency: auto`?

        duration = 0
        if load.ramp_up:
            ramp_up = int(ceil(dehumanize_time(load.hold)))
            duration += ramp_up
            cmdline += ['--ramp-up', str(ramp_up)]
        if load.hold:
            hold = int(ceil(dehumanize_time(load.hold)))
            duration += hold
        cmdline += ['--duration', str(duration)]

        cmdline += ['--use-extension=bzt.resources.molotov_ext']

        cmdline += [self.get_script_path(required=True)]

        self.env.set({"MOLOTOV_TAURUS_REPORT": self.report_file_name})
        self.env.add_path({"PYTHONPATH": get_full_path(__file__, step_up=3)})

        self.process = self._execute(cmdline)
Example #10
0
    def get_load(self):
        """
        Helper method to read load specification
        """

        def eval_int(value):
            try:
                return int(value)
            except (ValueError, TypeError):
                return value

        def eval_float(value):
            try:
                return int(value)
            except (ValueError, TypeError):
                return value

        raw_load = self.get_raw_load()

        iterations = eval_int(raw_load.iterations)
        ramp_up = raw_load.ramp_up

        throughput = eval_float(raw_load.throughput or 0)
        concurrency = eval_int(raw_load.concurrency or 0)

        steps = eval_int(raw_load.steps)
        hold = dehumanize_time(raw_load.hold or 0)

        if ramp_up is None:
            duration = hold
        else:
            ramp_up = dehumanize_time(raw_load.ramp_up)
            duration = hold + ramp_up

        if not iterations:
            if duration:
                iterations = 0  # infinite
            else:
                iterations = 1

        msg = ''
        if not isinstance(concurrency, numeric_types + (type(None),)):
            msg += "Invalid concurrency value[%s]: %s " % (type(concurrency).__name__, concurrency)
        if not isinstance(throughput, numeric_types + (type(None),)):
            msg += "Invalid throughput value[%s]: %s " % (type(throughput).__name__, throughput)
        if not isinstance(steps, numeric_types + (type(None),)):
            msg += "Invalid throughput value[%s]: %s " % (type(steps).__name__, steps)
        if not isinstance(iterations, numeric_types + (type(None),)):
            msg += "Invalid throughput value[%s]: %s " % (type(iterations).__name__, iterations)

        if msg:
            raise TaurusConfigError(msg)

        return self.LOAD_FMT(concurrency=concurrency, ramp_up=ramp_up, throughput=throughput, hold=hold,
                             iterations=iterations, duration=duration, steps=steps)
Example #11
0
    def get_load(self):
        """
        Helper method to read load specification

        :return:
        """
        prov_type = self.engine.config.get(Provisioning.PROV, None)

        ensure_is_dict(self.execution, ScenarioExecutor.THRPT, prov_type)
        throughput = self.execution[ScenarioExecutor.THRPT].get(prov_type, 0)

        ensure_is_dict(self.execution, ScenarioExecutor.CONCURR, prov_type)
        concurrency = self.execution[ScenarioExecutor.CONCURR].get(prov_type, 0)

        iterations = self.execution.get("iterations", None)

        ramp_up = self.execution.get(ScenarioExecutor.RAMP_UP, None)
        steps = self.execution.get(ScenarioExecutor.STEPS, None)
        hold = dehumanize_time(self.execution.get(ScenarioExecutor.HOLD_FOR, 0))
        if ramp_up is None:
            ramp_up = None
            duration = hold
        else:
            ramp_up = dehumanize_time(ramp_up)
            duration = hold + ramp_up

        if duration and not iterations:
            iterations = 0  # which means infinite

        if not isinstance(concurrency, numeric_types + (type(None),)):
            raise ValueError("Invalid concurrency value[%s]: %s" % (type(concurrency).__name__, concurrency))

        if not isinstance(throughput, numeric_types + (type(None),)):
            raise ValueError("Invalid throughput value[%s]: %s" % (type(throughput).__name__, throughput))

        if not isinstance(steps, numeric_types + (type(None),)):
            raise ValueError("Invalid throughput value[%s]: %s" % (type(steps).__name__, steps))

        if not isinstance(iterations, numeric_types + (type(None),)):
            raise ValueError("Invalid throughput value[%s]: %s" % (type(iterations).__name__, iterations))

        res = namedtuple(
            "LoadSpec", ("concurrency", "throughput", "ramp_up", "hold", "iterations", "duration", "steps")
        )

        return res(
            concurrency=concurrency,
            ramp_up=ramp_up,
            throughput=throughput,
            hold=hold,
            iterations=iterations,
            duration=duration,
            steps=steps,
        )
Example #12
0
    def get_load(self):
        """
        Helper method to read load specification
        """

        def eval_int(value):
            try:
                return int(value)
            except (ValueError, TypeError):
                return value

        def eval_float(value):
            try:
                return int(value)
            except (ValueError, TypeError):
                return value

        prov_type = self.engine.config.get(Provisioning.PROV)

        ensure_is_dict(self.execution, ScenarioExecutor.THRPT, prov_type)
        throughput = eval_float(self.execution[ScenarioExecutor.THRPT].get(prov_type, 0))

        ensure_is_dict(self.execution, ScenarioExecutor.CONCURR, prov_type)
        concurrency = eval_int(self.execution[ScenarioExecutor.CONCURR].get(prov_type, 0))

        iterations = eval_int(self.execution.get("iterations", None))

        ramp_up = self.execution.get(ScenarioExecutor.RAMP_UP, None)
        steps = eval_int(self.execution.get(ScenarioExecutor.STEPS, None))
        hold = dehumanize_time(self.execution.get(ScenarioExecutor.HOLD_FOR, 0))
        if ramp_up is None:
            duration = hold
        else:
            ramp_up = dehumanize_time(ramp_up)
            duration = hold + ramp_up

        if duration and not iterations:
            iterations = 0  # which means infinite

        msg = ''
        if not isinstance(concurrency, numeric_types + (type(None),)):
            msg += "Invalid concurrency value[%s]: %s " % (type(concurrency).__name__, concurrency)
        if not isinstance(throughput, numeric_types + (type(None),)):
            msg += "Invalid throughput value[%s]: %s " % (type(throughput).__name__, throughput)
        if not isinstance(steps, numeric_types + (type(None),)):
            msg += "Invalid throughput value[%s]: %s " % (type(steps).__name__, steps)
        if not isinstance(iterations, numeric_types + (type(None),)):
            msg += "Invalid throughput value[%s]: %s " % (type(iterations).__name__, iterations)

        if msg:
            raise TaurusConfigError(msg)

        return self.LOAD_FMT(concurrency=concurrency, ramp_up=ramp_up, throughput=throughput, hold=hold,
                             iterations=iterations, duration=duration, steps=steps)
Example #13
0
    def _get_url(self):
        params = [('target', field) for field in self.config.get('metrics', ValueError("Metrics list required"))]
        from_t = int(dehumanize_time(self.config.get('from', self.interval * 1000)))
        until_t = int(dehumanize_time(self.config.get('until', 0)))
        params += [
            ('from', '-%ss' % from_t),
            ('until', '-%ss' % until_t),
            ('format', 'json')
        ]

        url = self.address + '/render?' + urlencode(params)
        if not url.startswith('http'):
            url = 'http://' + url
        return url
Example #14
0
    def _get_url(self):
        exc = TaurusConfigError('Graphite client requires metrics list')
        params = [('target', field)
                  for field in self.config.get('metrics', exc)]
        from_t = int(
            dehumanize_time(self.config.get('from', self.interval * 1000)))
        until_t = int(dehumanize_time(self.config.get('until', 0)))
        params += [('from', '-%ss' % from_t), ('until', '-%ss' % until_t),
                   ('format', 'json')]

        url = self.address + '/render?' + urlencode(params)
        if not url.startswith('http'):
            url = 'http://' + url
        return url
Example #15
0
    def __init__(self, parent_log, label, config, engine):
        super(GraphiteClient, self).__init__(parent_log, engine)
        self.config = config
        exc = TaurusConfigError('Graphite client requires address parameter')
        self.address = self.config.get("address", exc)
        self.timeout = int(dehumanize_time(self.config.get("timeout", "5s")))
        self.interval = int(dehumanize_time(self.config.get("interval", "5s")))     # interval for client
        self._cached_data = None
        self.url = self._get_url()

        if label:
            self.host_label = label
        else:
            self.host_label = self.address
Example #16
0
    def __init__(self, parent_log, label, config, engine):
        super(GraphiteClient, self).__init__(parent_log, engine)
        self.config = config
        exc = TaurusConfigError('Graphite client requires address parameter')
        self.address = self.config.get("address", exc)
        self.timeout = int(dehumanize_time(self.config.get("timeout", "5s")))
        self.interval = int(dehumanize_time(self.config.get("interval", "5s")))     # interval for client
        self._cached_data = None
        self.url = self._get_url()

        if label:
            self.host_label = label
        else:
            self.host_label = self.address
Example #17
0
 def __init__(self, parent_logger, label, config):
     super(GraphiteClient, self).__init__()
     self.log = parent_logger.getChild(self.__class__.__name__)
     self.config = config
     self.address = self.config.get("address", ValueError("Address parameter required"))
     self.interval = int(dehumanize_time(self.config.get('interval', '5s')))
     self.url = self._get_url()
     if label:
         self.host_label = label
     else:
         self.host_label = self.address
     self.start_time = None
     self.check_time = None
     self.timeout = int(dehumanize_time(self.config.get('timeout', '5s')))
Example #18
0
    def prepare(self):
        """
        Read aggregation options
        """
        super(ConsolidatingAggregator, self).prepare()

        # make unique & sort
        percentiles = self.settings.get("percentiles", self.track_percentiles)
        percentiles = list(set(percentiles))
        percentiles.sort()
        self.track_percentiles = percentiles
        self.settings['percentiles'] = percentiles

        self.ignored_labels = self.settings.get("ignore-labels",
                                                self.ignored_labels)
        self.generalize_labels = self.settings.get("generalize-labels",
                                                   self.generalize_labels)

        self.min_buffer_len = dehumanize_time(
            self.settings.get("min-buffer-len", self.min_buffer_len))

        max_buffer_len = self.settings.get("max-buffer-len",
                                           self.max_buffer_len)
        try:  # for max_buffer_len == float('inf')
            self.max_buffer_len = dehumanize_time(max_buffer_len)
        except ValueError as verr:
            if str(verr).find('inf') != -1:
                self.max_buffer_len = max_buffer_len
            else:
                raise

        self.buffer_multiplier = self.settings.get("buffer-multiplier",
                                                   self.buffer_multiplier)

        percentile = self.settings.get("buffer-scale-choice", 0.5)
        count = len(self.track_percentiles)
        if count == 1:
            self.buffer_scale_idx = str(float(self.track_percentiles[0]))
        if count > 1:
            percentiles = [i / (count - 1.0) for i in range(count)]
            distances = [
                abs(percentile - percentiles[i]) for i in range(count)
            ]
            index_position = distances.index(min(distances))
            self.buffer_scale_idx = str(
                float(self.track_percentiles[index_position]))

        debug_str = 'Buffer scaling setup: percentile %s from %s selected'
        self.log.debug(debug_str, self.buffer_scale_idx,
                       self.track_percentiles)
Example #19
0
 def __init__(self, parent_logger, label, config):
     super(GraphiteClient, self).__init__()
     self.log = parent_logger.getChild(self.__class__.__name__)
     self.config = config
     self.address = self.config.get("address", ValueError("Address parameter required"))
     self.interval = int(dehumanize_time(self.config.get('interval', '5s')))
     self.url = self._get_url()
     if label:
         self.host_label = label
     else:
         self.host_label = self.address
     self.start_time = None
     self.check_time = None
     self.timeout = int(dehumanize_time(self.config.get('timeout', '5s')))
Example #20
0
    def prepare(self):
        """
        Read options for uploading, check that they're sane
        """
        super(BlazeMeterUploader, self).prepare()
        self.client.logger_limit = self.settings.get("request-logging-limit",
                                                     self.client.logger_limit)
        self.client.address = self.settings.get("address", self.client.address)
        self.client.data_address = self.settings.get("data-address",
                                                     self.client.data_address)
        self.client.timeout = dehumanize_time(
            self.settings.get("timeout", self.client.timeout))
        self.send_interval = dehumanize_time(
            self.settings.get("send-interval", self.send_interval))
        self.browser_open = self.settings.get("browser-open",
                                              self.browser_open)
        token = self.settings.get("token", "")
        if not token:
            self.log.warning(
                "No BlazeMeter API key provided, will upload anonymously")
        self.client.token = token

        self.client.active_session_id = self.parameters.get("session-id", None)
        self.client.test_id = self.parameters.get("test-id", None)
        self.client.user_id = self.parameters.get("user-id", None)
        self.client.data_signature = self.parameters.get("signature", None)

        if not self.client.test_id:
            try:
                self.client.ping()  # to check connectivity and auth
            except HTTPError:
                self.log.error(
                    "Cannot reach online results storage, maybe the address/token is wrong"
                )
                raise

            if token:
                finder = ProjectFinder(self.parameters, self.settings,
                                       self.client, self.engine)
                self.test_id = finder.resolve_test_id({"type": "external"},
                                                      self.engine.config, [])

        self.sess_name = self.parameters.get(
            "report-name", self.settings.get("report-name", self.sess_name))
        if self.sess_name == 'ask' and sys.stdin.isatty():
            self.sess_name = r_input("Please enter report-name: ")

        if isinstance(self.engine.aggregator, ResultsProvider):
            self.engine.aggregator.add_listener(self)
Example #21
0
    def startup(self):
        script_dir = get_full_path(self.script, step_up=1)
        script_file = os.path.basename(self.script)
        cmdline = [
            self.node_tool.executable,
            self.newman_tool.entrypoint,
            "run",
            script_file,
            "--reporters",
            "taurus",
            "--reporter-taurus-filename",
            self.report_file,
            "--suppress-exit-code",
            "--insecure",
        ]

        scenario = self.get_scenario()
        timeout = scenario.get('timeout', None)
        if timeout is not None:
            cmdline += [
                "--timeout-request",
                str(int(dehumanize_time(timeout) * 1000))
            ]

        think = scenario.get('think-time', None)
        if think is not None:
            cmdline += [
                "--delay-request",
                str(int(dehumanize_time(think) * 1000))
            ]

        cmdline += self._dump_vars("globals")
        cmdline += self._dump_vars("environment")

        load = self.get_load()
        if load.iterations:
            cmdline += ['--iteration-count', str(load.iterations)]

        # TODO: allow running several collections like directory, see https://github.com/postmanlabs/newman/issues/871
        # TODO: support hold-for, probably by having own runner
        # if load.hold:
        #    cmdline += ['--hold-for', str(load.hold)]

        self.env["NODE_PATH"] = self.newman_tool.get_node_path_envvar(
        ) + os.pathsep + os.path.join(os.path.dirname(__file__), "..",
                                      "resources")

        self._start_subprocess(cmdline, cwd=script_dir)
Example #22
0
    def _get_exec(self):
        exec_str = ''
        for req in self.scenario.get_requests():
            if len(exec_str) > 0:
                exec_str += '.'

            if len(self.scenario.get('default-address')) > 0:
                url = req.url
            else:
                url = self.fixed_addr(req.url)

            exec_template = 'exec(\n\t\t\thttp("%(req_label)s").%(method)s("%(url)s")\n'
            exec_str += exec_template % {'req_label': req.label, 'method': req.method.lower(), 'url': url}

            for key in req.headers:
                exec_str += '\t\t\t\t.header("%(key)s", "%(val)s")\n' % {'key': key, 'val': req.headers[key]}

            if req.body is not None:
                if isinstance(req.body, str):
                    exec_str += '\t\t\t\t.body(%(method)s(""""%(body)s"""))\n'
                    exec_str = exec_str % {'method': 'StringBody', 'body': req.body}
                else:
                    self.log.warning('Only string and file are supported body content, "%s" ignored' % str(req.body))

            exec_str += self.__get_assertions(req.config.get('assert', []))

            if req.think_time is None:
                think_time = 0
            else:
                think_time = int(dehumanize_time(req.think_time))
            exec_str += '\t\t).pause(%(think_time)s)' % {'think_time': think_time}

        return exec_str
Example #23
0
 def startup(self):
     self.start_time = time.time()
     for executor in self.executors:
         start_shift = self._get_start_shift(executor.execution.get('start-at', ''))
         delay = dehumanize_time(executor.execution.get('delay', 0))
         executor.delay = delay + start_shift
         self.log.debug("Delay setup: %s(start-at) + %s(delay) = %s", start_shift, delay, executor.delay)
Example #24
0
    def __gen_sessions(self, scenario):
        sessions = etree.Element("sessions")
        session = etree.Element("session",
                                name="taurus_requests",
                                probability="100",
                                type="ts_http")
        for request in scenario.get_requests():
            request_elem = etree.Element("request")
            http_elem = etree.Element("http",
                                      url=request.url,
                                      method=request.method,
                                      version="1.1")
            if request.body:
                http_elem.set('contents', request.body)

            headers = {}
            headers.update(scenario.data.get('headers', {}))
            headers.update(request.headers)
            for header_name, header_value in iteritems(headers):
                http_elem.append(
                    etree.Element("http_header",
                                  name=header_name,
                                  value=header_value))

            request_elem.append(http_elem)
            session.append(request_elem)
            if request.think_time is not None:
                think_time = int(dehumanize_time(request.think_time))
                session.append(
                    etree.Element("thinktime",
                                  value=str(think_time),
                                  random="false"))
        sessions.append(session)
        return sessions
Example #25
0
    def __gen_sessions(self, scenario):
        sessions = etree.Element("sessions")
        session = etree.Element("session", name="taurus_requests", probability="100", type="ts_http")
        for request in scenario.get_requests():
            if not isinstance(request, HTTPRequest):
                msg = "Tsung config generator doesn't support '%s' blocks, skipping"
                self.log.warning(msg, request.NAME)
                continue

            request_elem = etree.Element("request")
            http_elem = etree.Element("http", url=request.url, method=request.method, version="1.1")
            if request.body:
                http_elem.set('contents', request.body)

            headers = copy.deepcopy(scenario.get_headers())
            headers.update(copy.deepcopy(request.headers))
            for header_name, header_value in iteritems(headers):
                http_elem.append(etree.Element("http_header", name=header_name, value=header_value))

            request_elem.append(http_elem)
            session.append(request_elem)
            if request.think_time is not None:
                think_time = int(dehumanize_time(request.think_time))
                session.append(etree.Element("thinktime", value=str(think_time), random="false"))
        sessions.append(session)
        return sessions
Example #26
0
    def _parse_request(self, req):
        if 'if' in req:
            condition = req.get("if")

            # TODO: apply some checks to `condition`?
            then_clause = req.get("then", TaurusConfigError("'then' clause is mandatory for 'if' blocks"))
            then_requests = self._parse_requests(then_clause)
            else_clause = req.get("else", [])
            else_requests = self._parse_requests(else_clause)
            return IfBlock(condition, then_requests, else_requests, req)
        elif 'once' in req:
            do_block = req.get("once", TaurusConfigError("operation list is mandatory for 'once' blocks"))
            do_requests = self._parse_requests(do_block)
            return OnceBlock(do_requests, req)
        elif 'loop' in req:
            loops = req.get("loop")
            do_block = req.get("do", TaurusConfigError("'do' option is mandatory for 'loop' blocks"))
            do_requests = self._parse_requests(do_block)
            return LoopBlock(loops, do_requests, req)
        elif 'while' in req:
            condition = req.get("while")
            do_block = req.get("do", TaurusConfigError("'do' option is mandatory for 'while' blocks"))
            do_requests = self._parse_requests(do_block)
            return WhileBlock(condition, do_requests, req)
        elif 'foreach' in req:
            iteration_str = req.get("foreach")
            match = re.match(r'(.+) in (.+)', iteration_str)
            if not match:
                msg = "'foreach' value should be in format '<elementName> in <collection>' but '%s' found"
                raise TaurusConfigError(msg % iteration_str)
            loop_var, input_var = match.groups()
            do_block = req.get("do", TaurusConfigError("'do' field is mandatory for 'foreach' blocks"))
            do_requests = self._parse_requests(do_block)
            return ForEachBlock(input_var, loop_var, do_requests, req)
        elif TransactionBlock.NAME in req:
            name = req.get(TransactionBlock.NAME)
            do_block = req.get('do', TaurusConfigError("'do' field is mandatory for transaction blocks"))
            do_requests = self._parse_requests(do_block)
            include_timers = req.get('include-timers')
            return TransactionBlock(name, do_requests, include_timers, req, self.scenario)
        elif 'include-scenario' in req:
            name = req.get('include-scenario')
            return IncludeScenarioBlock(name, req)
        elif 'action' in req:
            action = req.get('action')
            if action not in ('pause', 'stop', 'stop-now', 'continue'):
                raise TaurusConfigError("Action should be either 'pause', 'stop', 'stop-now' or 'continue'")
            target = req.get('target', 'current-thread')
            if target not in ('current-thread', 'all-threads'):
                msg = "Target for action should be either 'current-thread' or 'all-threads' but '%s' found"
                raise TaurusConfigError(msg % target)
            duration = req.get('pause-duration', None)
            if duration is not None:
                duration = dehumanize_time(duration)
            return ActionBlock(action, target, duration, req)
        elif 'set-variables' in req:
            mapping = req.get('set-variables')
            return SetVariables(mapping, req)
        else:
            return HierarchicHTTPRequest(req, self.scenario, self.engine)
Example #27
0
    def startup(self):
        self.start_time = time.time()
        load = self.get_load()
        concurrency = load.concurrency or 1
        if load.ramp_up:
            hatch = concurrency / float(load.ramp_up)
        else:
            hatch = concurrency

        wrapper = os.path.join(get_full_path(__file__, step_up=2), "resources", "locustio-taurus-wrapper.py")

        self.env.add_path({"PYTHONPATH": self.engine.artifacts_dir})
        self.env.add_path({"PYTHONPATH": os.getcwd()})
        self.env.set({"LOCUST_DURATION": dehumanize_time(load.duration)})

        self.log_file = self.engine.create_artifact("locust", ".log")
        args = [sys.executable, wrapper, '-f', self.script]
        args += ['--logfile=%s' % self.log_file]
        args += ["--no-web", "--only-summary", ]
        args += ["--clients=%d" % concurrency, "--hatch-rate=%f" % hatch]
        if load.iterations:
            args.append("--num-request=%d" % load.iterations)

        if self.is_master:
            args.extend(["--master", '--expect-slaves=%s' % self.expected_slaves])

        host = self.get_scenario().get("default-address")
        if host:
            args.append('--host=%s' % host)

        self.__out = open(self.engine.create_artifact("locust", ".out"), 'w')
        self.process = self.execute(args, stderr=STDOUT, stdout=self.__out)
Example #28
0
    def prepare(self):
        super(CloudProvisioning, self).prepare()
        self.browser_open = self.settings.get("browser-open", self.browser_open)

        # TODO: go to "blazemeter" section for these settings by default?
        self.client.address = self.settings.get("address", self.client.address)
        self.client.token = self.settings.get("token", self.client.token)
        self.client.timeout = dehumanize_time(self.settings.get("timeout", self.client.timeout))

        if not self.client.token:
            raise ValueError("You must provide API token to use cloud provisioning")

        user_info = self.client.get_user_info()
        available_locations = {str(x['id']): x for x in user_info['locations']}

        for executor in self.executors:
            locations = self._get_locations(available_locations, executor)

            for location in locations.keys():
                if location not in available_locations:
                    self.log.warning("List of supported locations for you is: %s", sorted(available_locations.keys()))
                    raise ValueError("Invalid location requested: %s" % location)

            if executor.parameters.get("locations-weighted", True):
                self.weight_locations(locations, executor.get_load(), available_locations)

        config = self.__get_config_for_cloud()
        rfiles = self.__get_rfiles()
        bza_plugin = self.__get_bza_test_config()
        test_name = self.settings.get("test-name", "Taurus Test")
        self.test_id = self.client.test_by_name(test_name, bza_plugin, config, rfiles, None)  # FIXME: set project id

        if isinstance(self.engine.aggregator, ConsolidatingAggregator):
            self.results_reader = ResultsFromBZA(self.client)
            self.engine.aggregator.add_underling(self.results_reader)
Example #29
0
    def prepare(self):
        super(CloudProvisioning, self).prepare()

        # TODO: go to "blazemeter" section for these settings by default
        self.client.address = self.settings.get("address", self.client.address)
        self.client.token = self.settings.get("token", self.client.token)
        self.client.timeout = dehumanize_time(self.settings.get("timeout", self.client.timeout))

        if not self.client.token:
            raise ValueError("You must provide API token to use cloud provisioning")

        config = copy.deepcopy(self.engine.config)
        config.pop(Provisioning.PROV)

        bza_plugin = {
            "type": "taurus",
            "plugins": {
                "taurus": {
                    "filename": ""  # without this line it does not work
                }
            }
        }

        test_name = self.settings.get("test-name", "Taurus Test")

        rfiles = []
        for executor in self.executors:
            rfiles += executor.get_resource_files()

        self.test_id = self.client.test_by_name(test_name, bza_plugin, config, rfiles, None)  # FIXME: set project id
Example #30
0
File: ab.py Project: vijayj/taurus
    def startup(self):
        args = [self.tool_path]
        load = self.get_load()
        load_iterations = load.iterations if load.iterations is not None else 1
        load_concurrency = load.concurrency if load.concurrency is not None else 1

        if load.hold:
            hold = int(ceil(dehumanize_time(load.hold)))
            args += ['-t', str(hold)]
        else:
            args += ['-n', str(load_iterations * load_concurrency)
                     ]  # ab waits for total number of iterations

        args += ['-c', str(load_concurrency)]
        args += [
            '-d'
        ]  # do not print 'Processed *00 requests' every 100 requests or so
        args += ['-r']  # do not crash on socket level errors
        args += ['-l']  # accept variable-len responses
        args += ['-g', str(self._tsv_file)]  # dump stats to TSV file

        # add global scenario headers
        for key, val in iteritems(self.scenario.get_headers()):
            args += ['-H', "%s: %s" % (key, val)]

        requests = self.scenario.get_requests()
        if not requests:
            raise TaurusConfigError(
                "You must specify at least one request for ab")
        if len(requests) > 1:
            self.log.warning(
                "ab doesn't support multiple requests. Only first one will be used."
            )
        request = self.__first_http_request()
        if request is None:
            raise TaurusConfigError(
                "ab supports only HTTP requests, while scenario doesn't have any"
            )

        # add request-specific headers
        for header in request.headers:
            for key, val in iteritems(header):
                args += ['-H', "%s: %s" % (key, val)]

        if request.method != 'GET':
            raise TaurusConfigError(
                "ab supports only GET requests, but '%s' is found" %
                request.method)

        if request.priority_option('keepalive', default=True):
            args += ['-k']

        args += [request.url]

        self.reader.setup(load_concurrency, request.label)

        self.start_time = time.time()
        self.process = self.execute(args,
                                    stdout=self.stdout_file,
                                    stderr=self.stderr_file)
Example #31
0
    def __parse_request(self, req):
        if 'if' in req:
            condition = req.get("if")

            # TODO: apply some checks to `condition`?
            then_clause = req.get("then", TaurusConfigError("'then' clause is mandatory for 'if' blocks"))
            then_requests = self.__parse_requests(then_clause)
            else_clause = req.get("else", [])
            else_requests = self.__parse_requests(else_clause)
            return IfBlock(condition, then_requests, else_requests, req)
        elif 'once' in req:
            do_block = req.get("once", TaurusConfigError("operation list is mandatory for 'once' blocks"))
            do_requests = self.__parse_requests(do_block)
            return OnceBlock(do_requests, req)
        elif 'loop' in req:
            loops = req.get("loop")
            do_block = req.get("do", TaurusConfigError("'do' option is mandatory for 'loop' blocks"))
            do_requests = self.__parse_requests(do_block)
            return LoopBlock(loops, do_requests, req)
        elif 'while' in req:
            condition = req.get("while")
            do_block = req.get("do", TaurusConfigError("'do' option is mandatory for 'while' blocks"))
            do_requests = self.__parse_requests(do_block)
            return WhileBlock(condition, do_requests, req)
        elif 'foreach' in req:
            iteration_str = req.get("foreach")
            match = re.match(r'(.+) in (.+)', iteration_str)
            if not match:
                msg = "'foreach' value should be in format '<elementName> in <collection>' but '%s' found"
                raise TaurusConfigError(msg % iteration_str)
            loop_var, input_var = match.groups()
            do_block = req.get("do", TaurusConfigError("'do' field is mandatory for 'foreach' blocks"))
            do_requests = self.__parse_requests(do_block)
            return ForEachBlock(input_var, loop_var, do_requests, req)
        elif 'transaction' in req:
            name = req.get('transaction')
            do_block = req.get('do', TaurusConfigError("'do' field is mandatory for transaction blocks"))
            do_requests = self.__parse_requests(do_block)
            include_timers = req.get('include-timers')
            return TransactionBlock(name, do_requests, include_timers, req, self.scenario)
        elif 'include-scenario' in req:
            name = req.get('include-scenario')
            return IncludeScenarioBlock(name, req)
        elif 'action' in req:
            action = req.get('action')
            if action not in ('pause', 'stop', 'stop-now', 'continue'):
                raise TaurusConfigError("Action should be either 'pause', 'stop', 'stop-now' or 'continue'")
            target = req.get('target', 'current-thread')
            if target not in ('current-thread', 'all-threads'):
                msg = "Target for action should be either 'current-thread' or 'all-threads' but '%s' found"
                raise TaurusConfigError(msg % target)
            duration = req.get('pause-duration', None)
            if duration is not None:
                duration = dehumanize_time(duration)
            return ActionBlock(action, target, duration, req)
        elif 'set-variables' in req:
            mapping = req.get('set-variables')
            return SetVariables(mapping, req)
        else:
            return HierarchicHTTPRequest(req, self.scenario, self.engine)
Example #32
0
    def startup(self):
        load = self.get_load()

        cmdline = [self.molotov.tool_path]

        if load.concurrency:
            cmdline += ['--workers', str(load.concurrency)]

        if 'processes' in self.execution:
            cmdline += ['--processes', str(self.execution['processes'])]

        if load.ramp_up:
            cmdline += ['--ramp-up', str(int(ceil(load.ramp_up)))]

        cmdline += ['--duration', str(int(ceil(load.hold)))]

        think_time = self.get_scenario().get("think-time", None)
        if think_time:
            cmdline += ['--delay', str(dehumanize_time(think_time))]

        user_cmd = self.settings.get("cmdline")
        if user_cmd:
            cmdline += user_cmd.split(" ")

        cmdline += ['--use-extension=bzt.resources.molotov_ext']

        cmdline += [self.get_script_path(required=True)]

        self.env.set({"MOLOTOV_TAURUS_REPORT": self.report_file_name})
        self.env.add_path({"PYTHONPATH": get_full_path(__file__, step_up=3)})

        self.process = self._execute(cmdline)
Example #33
0
    def __get_params_for_scala(self):
        params_for_scala = self.settings.get('properties')
        load = self.get_load()
        scenario = self.get_scenario()

        timeout = scenario.get('timeout', None)
        if timeout is not None:
            params_for_scala['gatling.http.ahc.requestTimeout'] = int(dehumanize_time(timeout) * 1000)
        if scenario.get('keepalive', True):
            # gatling <= 2.2.0
            params_for_scala['gatling.http.ahc.allowPoolingConnections'] = 'true'
            params_for_scala['gatling.http.ahc.allowPoolingSslConnections'] = 'true'
            # gatling > 2.2.0
            params_for_scala['gatling.http.ahc.keepAlive'] = 'true'
        else:
            # gatling <= 2.2.0
            params_for_scala['gatling.http.ahc.allowPoolingConnections'] = 'false'
            params_for_scala['gatling.http.ahc.allowPoolingSslConnections'] = 'false'
            # gatling > 2.2.0
            params_for_scala['gatling.http.ahc.keepAlive'] = 'false'
        if load.concurrency is not None:
            params_for_scala['concurrency'] = load.concurrency
        if load.ramp_up is not None:
            params_for_scala['ramp-up'] = int(load.ramp_up)
        if load.hold is not None:
            params_for_scala['hold-for'] = int(load.hold)
        if load.iterations is not None and load.iterations != 0:
            params_for_scala['iterations'] = int(load.iterations)
        if load.throughput:
            if load.duration:
                params_for_scala['throughput'] = load.throughput
            else:
                self.log.warning("You should set up 'ramp-up' and/or 'hold-for' for usage of 'throughput'")

        return ''.join([" -D%s=%s" % (key, params_for_scala[key]) for key in params_for_scala])
Example #34
0
    def _get_exec(self):
        exec_str = ''
        for req in self.scenario.get_requests():
            if len(exec_str) > 0:
                exec_str += '.'

            if len(self.scenario.get('default-address')) > 0:
                url = req.url
            else:
                url = self.fixed_addr(req.url)

            exec_template = 'exec(\n\t\t\thttp("%(req_label)s").%(method)s("%(url)s")\n'
            exec_str += exec_template % {'req_label': req.label, 'method': req.method.lower(), 'url': url}

            for key in req.headers:
                exec_str += '\t\t\t\t.header("%(key)s", "%(val)s")\n' % {'key': key, 'val': req.headers[key]}

            if req.body is not None:
                if isinstance(req.body, str):
                    exec_str += '\t\t\t\t.body(%(method)s(""""%(body)s"""))\n'
                    exec_str = exec_str % {'method': 'StringBody', 'body': req.body}
                else:
                    self.log.warning('Only string and file are supported body content, "%s" ignored' % str(req.body))

            exec_str += self.__get_assertions(req.config.get('assert', []))

            if req.think_time is None:
                think_time = 0
            else:
                think_time = int(dehumanize_time(req.think_time))
            exec_str += '\t\t).pause(%(think_time)s)' % {'think_time': think_time}

        return exec_str
Example #35
0
    def prepare(self):
        super(CloudProvisioning, self).prepare()

        # TODO: go to "blazemeter" section for these settings by default
        self.client.address = self.settings.get("address", self.client.address)
        self.client.token = self.settings.get("token", self.client.token)
        self.client.timeout = dehumanize_time(
            self.settings.get("timeout", self.client.timeout))

        if not self.client.token:
            raise ValueError(
                "You must provide API token to use cloud provisioning")

        config = copy.deepcopy(self.engine.config)
        config.pop(Provisioning.PROV)

        bza_plugin = {
            "type": "taurus",
            "plugins": {
                "taurus": {
                    "filename": ""  # without this line it does not work
                }
            }
        }

        test_name = self.settings.get("test-name", "Taurus Test")

        rfiles = []
        for executor in self.executors:
            rfiles += executor.get_resource_files()

        self.test_id = self.client.test_by_name(test_name, bza_plugin, config,
                                                rfiles,
                                                None)  # FIXME: set project id
Example #36
0
    def __gen_options(self, scenario):
        options = etree.Element("options")

        global_think_time = scenario.get_think_time()
        if global_think_time:
            think_time = int(dehumanize_time(global_think_time))
            options.append(etree.Element("option", name="thinktime", value=str(think_time), random="false"))

        global_tcp_timeout = scenario.get("timeout")
        if global_tcp_timeout:
            timeout = int(dehumanize_time(global_tcp_timeout) * 1000)
            options.append(etree.Element("option", name="connect_timeout", value=str(timeout)))

        global_max_retries = scenario.get("max-retries", 1)
        options.append(etree.Element("option", name="max_retries", value=str(global_max_retries)))
        return options
Example #37
0
    def __gen_sessions(self, scenario):
        sessions = etree.Element("sessions")
        session = etree.Element("session", name="taurus_requests", probability="100", type="ts_http")
        for request in scenario.get_requests():
            if not isinstance(request, HTTPRequest):
                msg = "Tsung config generator doesn't support '%s' blocks, skipping"
                self.log.warning(msg, request.NAME)
                continue

            request_elem = etree.Element("request")
            http_elem = etree.Element("http", url=request.url, method=request.method, version="1.1")
            if request.body:
                http_elem.set('contents', request.body)

            headers = copy.deepcopy(scenario.get_headers())
            headers.update(copy.deepcopy(request.headers))
            for header_name, header_value in iteritems(headers):
                http_elem.append(etree.Element("http_header", name=header_name, value=header_value))

            request_elem.append(http_elem)
            session.append(request_elem)
            if request.get_think_time():
                think_time = int(dehumanize_time(request.get_think_time()))
                session.append(etree.Element("thinktime", value=str(think_time), random="false"))
        sessions.append(session)
        return sessions
Example #38
0
    def gen_runner_class(self):
        runner_classdef = self.gen_class_definition("TestRunner", ["object"],
                                                    indent=0)
        main_method = self.gen_method_definition("__call__", ["self"],
                                                 indent=4)

        global_think_time = self.scenario.get('think-time', None)

        for req in self.scenario.get_requests():
            method = req.method.upper()
            url = req.url
            think_time = dehumanize_time(req.think_time or global_think_time)
            local_headers = req.config.get("headers", {})

            params = "[]"
            headers = self.__list_to_nvpair_list(iteritems(local_headers))

            main_method.append(
                self.gen_statement("request.%s(%r, %s, %s)" %
                                   (method, url, params, headers),
                                   indent=8))

            if think_time:
                main_method.append(
                    self.gen_statement("grinder.sleep(%s)" %
                                       int(think_time * 1000),
                                       indent=8))

        runner_classdef.append(main_method)

        return runner_classdef
Example #39
0
    def startup(self):
        self.start_time = time.time()
        load = self.get_load()
        concurrency = load.concurrency or 1
        if load.ramp_up:
            hatch = concurrency / float(load.ramp_up)
        else:
            hatch = concurrency

        wrapper = os.path.join(get_full_path(__file__, step_up=2), "resources", "locustio-taurus-wrapper.py")

        self.env.add_path({"PYTHONPATH": self.engine.artifacts_dir})
        self.env.add_path({"PYTHONPATH": os.getcwd()})
        self.env.set({"LOCUST_DURATION": dehumanize_time(load.duration)})

        self.log_file = self.engine.create_artifact("locust", ".log")
        args = [sys.executable, wrapper, '-f', self.script]
        args += ['--logfile=%s' % self.log_file]
        args += ["--no-web", "--only-summary", ]
        args += ["--clients=%d" % concurrency, "--hatch-rate=%f" % hatch]
        if load.iterations:
            args.append("--num-request=%d" % load.iterations)

        if self.is_master:
            args.extend(["--master", '--expect-slaves=%s' % self.expected_slaves])

        host = self.get_scenario().get("default-address")
        if host:
            args.append('--host=%s' % host)

        self.__out = open(self.engine.create_artifact("locust", ".out"), 'w')
        self.process = self.execute(args, stderr=STDOUT, stdout=self.__out)
Example #40
0
    def build_source_code(self):
        self.log.debug("Generating Python script for Grinder")
        self.root.append(self.gen_comment("This script was generated by Taurus", indent=0))
        self.root.append(self.add_imports())

        self.root.append(self.gen_new_line(indent=0))

        default_address = self.scenario.get("default-address", "")
        url_arg = "url=%r" % default_address if default_address else ""
        self.root.append(self.gen_statement('request = HTTPRequest(%s)' % url_arg, indent=0))
        self.root.append(self.gen_statement('test = Test(1, "BZT Requests")', indent=0))
        self.root.append(self.gen_statement('test.record(request)', indent=0))

        self.root.append(self.gen_new_line(indent=0))

        self.root.append(self.gen_statement("defaults = HTTPPluginControl.getConnectionDefaults()", indent=0))
        self.root.append(self.gen_statement("utilities = HTTPPluginControl.getHTTPUtilities()", indent=0))

        headers = self.scenario.get_headers()
        if headers:
            self.root.append(self.gen_statement("defaults.setDefaultHeaders([", indent=0))
            for header, value in iteritems(headers):
                self.root.append(self.gen_statement("NVPair(%r, %r)," % (header, value), indent=4))
            self.root.append(self.gen_statement("])", indent=0))

        global_timeout = dehumanize_time(self.scenario.get("timeout", None))
        if global_timeout:
            self.root.append(self.gen_statement("defaults.setTimeout(%s)" % int(global_timeout * 1000), indent=0))

        cookie_flag = int(self.scenario.get("store-cookie", True))
        self.root.append(self.gen_statement("defaults.setUseCookies(%s)" % cookie_flag, indent=0))

        self.root.append(self.gen_new_line(indent=0))

        self.root.append(self.gen_runner_class())
Example #41
0
    def smart_time(any_time):  # FIXME: bad name for the function, does not reflect what it does
        try:
            smart_time = int(1000 * dehumanize_time(any_time))
        except TaurusInternalException:
            smart_time = any_time

        return smart_time
Example #42
0
    def test_locust_fractional_hatch_rate(self):
        if six.PY3:
            logging.warning("No locust available for python 3")

        test_concurrency, test_ramp_up = 4, "60s"
        expected_hatch_rate = test_concurrency / dehumanize_time(test_ramp_up)

        self.obj.execution.merge({
            "concurrency": test_concurrency,
            "ramp-up": test_ramp_up,
            "iterations": 10,
            "hold-for": 30,
            "scenario": {
                "default-address": "http://blazedemo.com",
                "script": RESOURCES_DIR + "locust/simple.py"
            }
        })

        self.obj.prepare()
        with mock.patch('bzt.modules.locustio.LocustIOExecutor.execute') as m:
            self.obj.startup()
            # Extract the hatch-rate cmdline arg that bzt passed to locust.
            hatch = [
                x.split('=')[1] for x in m.call_args[0][0]
                if x.startswith("--hatch-rate")
            ]
            self.assertEqual(hatch[0], "%f" % expected_hatch_rate)
Example #43
0
    def __init__(self, parent_log, label, config, engine):
        """
        :type parent_log: logging.Logger
        :type config: dict
        """
        super(ServerAgentClient, self).__init__(parent_log, engine)
        self.host_label = label
        exc = TaurusConfigError('ServerAgent client requires address parameter')
        self.address = config.get("address", exc)
        if ':' in self.address:
            self.port = int(self.address[self.address.index(":") + 1:])
            self.address = self.address[:self.address.index(":")]
        else:
            self.port = 4444

        self._partial_buffer = ""
        exc = TaurusConfigError('ServerAgent client requires metrics list')
        metrics = config.get('metrics', exc)

        # TODO: handle more complex metric specifications and labeling
        self._result_fields = [x for x in metrics]

        self._metrics_command = "\t".join([x for x in metrics])
        self.socket = socket.socket()
        self.select = select.select

        # interval for server (ServerAgent)
        self.interval = int(dehumanize_time(config.get("interval", "1s")))
Example #44
0
    def smart_time(any_time):  # FIXME: bad name for the function, does not reflect what it does
        try:
            smart_time = int(1000 * dehumanize_time(any_time))
        except TaurusInternalException:
            smart_time = any_time

        return smart_time
Example #45
0
    def __init__(self, parent_log, label, config, engine):
        """
        :type parent_log: logging.Logger
        :type config: dict
        """
        super(ServerAgentClient, self).__init__(parent_log, engine)
        self.host_label = label
        exc = TaurusConfigError('ServerAgent client requires address parameter')
        self.address = config.get("address", exc)
        if ':' in self.address:
            self.port = int(self.address[self.address.index(":") + 1:])
            self.address = self.address[:self.address.index(":")]
        else:
            self.port = 4444

        self._partial_buffer = ""
        exc = TaurusConfigError('ServerAgent client requires metrics list')
        metrics = config.get('metrics', exc)

        # TODO: handle more complex metric specifications and labeling
        self._result_fields = [x for x in metrics]

        self._metrics_command = "\t".join([x for x in metrics])
        self.socket = socket.socket()
        self.select = select.select

        # interval for server (ServerAgent)
        self.interval = int(dehumanize_time(config.get("interval", "1s")))
Example #46
0
 def gen_setup(self, test_method):
     timeout = self.scenario.get("timeout", None)
     if timeout is None:
         timeout = '30s'
     scenario_timeout = dehumanize_time(timeout)
     test_method.append(self.gen_impl_wait(scenario_timeout))
     test_method.append(self.gen_new_line(indent=0))
Example #47
0
    def generate_config(self, scenario, load, hostaliases):
        self.kpi_file = self.engine.create_artifact("pbench-kpi", ".txt")
        self.stats_file = self.engine.create_artifact("pbench-additional",
                                                      ".ldjson")
        self.config_file = self.engine.create_artifact('pbench', '.conf')

        conf_path = os.path.join(
            os.path.abspath(os.path.dirname(resources.__file__)),
            'pbench.conf')
        with open(conf_path) as _fhd:
            tpl = _fhd.read()

        instances = load.concurrency if load.concurrency else 1

        timeout = int(dehumanize_time(scenario.get("timeout", "10s")) * 1000)

        threads = 1 if psutil.cpu_count() < 2 else (psutil.cpu_count() - 1)
        threads = int(self.execution.get("worker-threads", threads))

        if self.hostname in hostaliases:
            address = hostaliases[self.hostname]
        else:
            address = socket.gethostbyname(self.hostname)
        params = {
            "modules_path":
            self.modules_path,
            "threads":
            threads,
            "log":
            self.engine.create_artifact("pbench", ".log"),
            "kpi_file":
            self.kpi_file,
            "full_log":
            self.engine.create_artifact("pbench-request-response", ".txt"),
            "full_log_level":
            self.execution.get("log-responses",
                               "proto_warning"),  # proto_error, all
            "source":
            self._get_source(load),
            "ssl":
            self.SSL_STR if self.use_ssl else "",
            "reply_limits":
            "",  # TODO
            "address":
            address,
            "port":
            self.port,
            "timeout":
            timeout,
            "instances":
            instances,
            "stat_log":
            self.stats_file,
            "additional_modules":
            self._get_additional_modules()
        }

        with open(self.config_file, 'w') as _fhd:
            substituter = string.Template(tpl)
            _fhd.write(substituter.substitute(params))
Example #48
0
    def _extract_named_args(self, req):
        named_args = OrderedDict()
        if req.timeout is not None:
            named_args['timeout'] = dehumanize_time(req.timeout)
        if req.follow_redirects is not None:
            named_args['allow_redirects'] = req.priority_option('follow-redirects', default=True)

        headers = {}
        scenario_headers = self.scenario.get("headers", None)
        if scenario_headers:
            headers.update(scenario_headers)
        if req.headers:
            headers.update(req.headers)
        if headers:
            named_args['headers'] = self.gen_expr(headers)

        merged_headers = dict([(key.lower(), value) for key, value in iteritems(headers)])
        content_type = merged_headers.get('content-type', None)

        if content_type == 'application/json' and isinstance(req.body, (dict, list)):  # json request body
            named_args['json'] = self.gen_expr(req.body)
        elif req.method.lower() == "get" and isinstance(req.body, dict):  # request URL params (?a=b&c=d)
            named_args['params'] = self.gen_expr(req.body)
        elif isinstance(req.body, dict):  # form data
            named_args['data'] = self.gen_expr(list(iteritems(req.body)))
        elif isinstance(req.body, string_types):
            named_args['data'] = self.gen_expr(req.body)
        elif req.body:
            msg = "Cannot handle 'body' option of type %s: %s"
            raise TaurusConfigError(msg % (type(req.body), req.body))

        return named_args
Example #49
0
    def gen_runner_class(self):
        runner_classdef = self.gen_class_definition("TestRunner", ["object"],
                                                    indent=0)
        main_method = self.gen_method_definition("__call__", ["self"],
                                                 indent=4)

        for req in self.scenario.get_requests():
            if not isinstance(req, HTTPRequest):
                msg = "Grinder script generator doesn't support '%s' blocks, skipping"
                self.log.warning(msg, req.NAME)
                continue

            method = req.method.upper()
            url = req.url
            local_headers = req.headers

            params = "[]"
            headers = self.__list_to_nvpair_list(iteritems(local_headers))

            main_method.append(
                self.gen_statement("request.%s(%r, %s, %s)" %
                                   (method, url, params, headers),
                                   indent=8))

            think_time = dehumanize_time(req.priority_option('think-time'))
            if think_time:
                main_method.append(
                    self.gen_statement("grinder.sleep(%s)" %
                                       int(think_time * 1000),
                                       indent=8))

        runner_classdef.append(main_method)

        return runner_classdef
Example #50
0
    def __gen_options(self, scenario):
        options = etree.Element("options")

        global_think_time = scenario.data.get('think-time', None)
        if global_think_time:
            think_time = int(dehumanize_time(global_think_time))
            options.append(etree.Element("option", name="thinktime", value=str(think_time), random="false"))

        global_tcp_timeout = scenario.data.get('timeout')
        if global_tcp_timeout:
            timeout = int(dehumanize_time(global_tcp_timeout)) * 1000
            options.append(etree.Element("option", name="connect_timeout", value=str(timeout)))

        global_max_retries = scenario.data.get('max-retries', 1)
        options.append(etree.Element("option", name="max_retries", value=str(global_max_retries)))
        return options
Example #51
0
    def test_locust_fractional_hatch_rate(self):
        test_concurrency, test_ramp_up = 4, "60s"
        expected_hatch_rate = test_concurrency / dehumanize_time(test_ramp_up)

        self.configure({"execution": {
            "concurrency": test_concurrency,
            "ramp-up": test_ramp_up,
            "iterations": 10,
            "hold-for": 30,
            "scenario": {
                "default-address": "http://blazedemo.com",
                "script": RESOURCES_DIR + "locust/simple.py"
            }
        }})

        self.obj.prepare()
        with mock.patch('bzt.modules.locustio.LocustIOExecutor._execute') as m:
            self.obj.startup()
            # Extract the hatch-rate cmdline arg that bzt passed to locust.
            hatch = [
                x.split('=')[1] for x in m.call_args[0][0]
                if x.startswith("--hatch-rate")
            ]
            self.assertEqual(hatch[0], "%f" % expected_hatch_rate)
            self.obj.shutdown()
        self.obj.post_process()
Example #52
0
    def connect(self):
        exc = TaurusConfigError(
            'Metric is required in Local monitoring client')
        metric_names = self.config.get('metrics', exc)

        bad_list = set(metric_names) - set(self.AVAILABLE_METRICS)
        if bad_list:
            self.log.warning('Wrong metrics found: %s', bad_list)

        good_list = set(metric_names) & set(self.AVAILABLE_METRICS)
        if not good_list:
            raise exc

        self.metrics = list(set(good_list))

        self.monitor = LocalMonitor(self.log, self.metrics, self.engine)
        self.interval = dehumanize_time(
            self.config.get("interval", self.engine.check_interval))

        if self.config.get("logging", False):
            if not PY3:
                self.log.warning("Logging option doesn't work on python2.")
            else:
                self.logs_file = self.engine.create_artifact(
                    "local_monitoring_logs", ".csv")
                with open(self.logs_file, "a", newline='') as mon_logs:
                    logs_writer = csv.writer(mon_logs, delimiter=',')
                    metrics = ['ts'] + sorted([metric for metric in good_list])
                    logs_writer.writerow(metrics)
Example #53
0
    def safe_time(any_time):
        try:
            smart_time = int(1000 * dehumanize_time(any_time))
        except TaurusInternalException:
            smart_time = any_time

        return smart_time
Example #54
0
    def safe_time(any_time):
        try:
            smart_time = int(1000 * dehumanize_time(any_time))
        except TaurusInternalException:
            smart_time = any_time

        return smart_time
Example #55
0
    def _get_exec(self):
        exec_str = ''
        for req in self.scenario.get_requests():
            if not isinstance(req, HTTPRequest):
                msg = "Gatling simulation generator doesn't support '%s' blocks, skipping"
                self.log.warning(msg, req.NAME)
                continue

            if len(exec_str) > 0:
                exec_str += '.'

            default_address = self.scenario.get("default-address")
            if default_address:
                url = req.url
            else:
                url = self.fixed_addr(req.url)

            exec_template = 'exec(\n' + self.indent(
                'http("%(req_label)s").%(method)s("%(url)s")', level=2) + '\n'
            exec_str += exec_template % {
                'req_label': req.label,
                'method': req.method.lower(),
                'url': url
            }

            for key in req.headers:
                exec_str += self.indent('.header("%(key)s", "%(val)s")\n' % {
                    'key': key,
                    'val': req.headers[key]
                },
                                        level=3)

            if req.body is not None:
                if isinstance(req.body, string_types):
                    exec_str += self.indent(
                        '.body(%(method)s("""%(body)s"""))\n', level=3)
                    exec_str = exec_str % {
                        'method': 'StringBody',
                        'body': req.body
                    }
                else:
                    self.log.warning(
                        'Only string and file are supported body content, "%s" ignored'
                        % str(req.body))

            exec_str += self.__get_assertions(req.config.get('assert', []))

            if not req.priority_option('follow-redirects', default=True):
                exec_str += self.indent('.disableFollowRedirect\n', level=3)

            exec_str += self.indent(')', level=1)

            think_time = int(dehumanize_time(
                req.priority_option('think-time')))
            if think_time:
                exec_str += '.pause(%(think_time)s)' % {
                    'think_time': think_time
                }

        return exec_str
Example #56
0
    def gen_action(self, action_config):
        aby, atype, param, selector = self._parse_action(action_config)

        bys = {
            'byxpath': "XPATH",
            'bycss': "CSS_SELECTOR",
            'byname': "NAME",
            'byid': "ID",
            'bylinktext': "LINK_TEXT"
        }
        if atype in ('click', 'keys'):
            tpl = "self.driver.find_element(By.%s, %r).%s"
            if atype == 'click':
                action = "click()"
            else:
                action = "send_keys(%r)" % param

            return self.gen_statement(tpl % (bys[aby], selector, action))
        elif atype == 'wait':
            tpl = "WebDriverWait(self.driver, %s).until(econd.%s_of_element_located((By.%s, %r)), %r)"
            mode = "visibility" if param == 'visible' else 'presence'
            exc = TaurusConfigError(
                "wait action requires timeout in scenario: \n%s" %
                self.scenario)
            timeout = dehumanize_time(self.scenario.get("timeout", exc))
            errmsg = "Element %r failed to appear within %ss" % (selector,
                                                                 timeout)
            return self.gen_statement(
                tpl % (timeout, mode, bys[aby], selector, errmsg))

        raise TaurusInternalException("Could not build code for action: %s" %
                                      action_config)
Example #57
0
    def gen_runner_class(self):
        runner_classdef = self.gen_class_definition("TestRunner", ["object"],
                                                    indent=0)

        sleep_method = self.gen_method_definition("rampUpSleeper", ["self"],
                                                  indent=4)
        sleep_method.append(
            self.gen_statement("if grinder.runNumber != 0: return"))
        sleep_method.append(
            self.gen_statement(
                "tprops = grinder.properties.getPropertySubset('taurus.')"))
        sleep_method.append(
            self.gen_statement(
                "inc = tprops.getDouble('ramp_up', 0)/tprops.getInt('concurrency', 1)"
            ))
        sleep_method.append(
            self.gen_statement(
                "sleep_time = int(1000 * grinder.threadNumber * inc)"))
        sleep_method.append(self.gen_statement("grinder.sleep(sleep_time, 0)"))
        sleep_method.append(
            self.gen_statement(
                "if sleep_time: grinder.logger.info('slept for %sms' % sleep_time)"
            ))
        sleep_method.append(
            self.gen_statement("else: grinder.logger.info('No sleep needed')"))
        sleep_method.append(self.gen_new_line(indent=0))
        runner_classdef.append(sleep_method)

        main_method = self.gen_method_definition("__call__", ["self"],
                                                 indent=4)
        main_method.append(self.gen_statement("self.rampUpSleeper()"))

        for req in self.scenario.get_requests():
            if not isinstance(req, HTTPRequest):
                msg = "Grinder script generator doesn't support '%s' blocks, skipping"
                self.log.warning(msg, req.NAME)
                continue

            method = req.method.upper()
            url = req.url
            local_headers = req.headers

            params = "[]"
            headers = self.__list_to_nvpair_list(iteritems(local_headers))

            main_method.append(
                self.gen_statement("request.%s(%r, %s, %s)" %
                                   (method, url, params, headers),
                                   indent=8))

            think_time = dehumanize_time(req.priority_option('think-time'))
            if think_time:
                main_method.append(
                    self.gen_statement("grinder.sleep(%s)" %
                                       int(think_time * 1000),
                                       indent=8))

        runner_classdef.append(main_method)

        return runner_classdef
Example #58
0
    def prepare(self):
        self.data_file = self.execution.get("data-file", self.data_file)
        self._data_file_pattern = self.execution.get("data-file-pattern", self._data_file_pattern)
        assert self._data_file_pattern or self.data_file, "Option is required: data-file or data-file-pattern"
        self.label = self.data_file
        self.errors_file = self.execution.get("errors-jtl", None)
        if self.errors_file:
            self.errors_file = self.engine.find_file(self.errors_file)

        str_wait = self.execution.get("wait-for-file", self.settings.get("wait-for-file", self._file_exists_wait))
        self._file_exists_wait = dehumanize_time(str_wait)

        str_to = self.execution.get("results-timeout", self.settings.get("results-timeout", self._result_timeout))
        self._result_timeout = dehumanize_time(str_to)

        self._file_check_ts = time.time()
        self._try_make_reader()
Example #59
0
    def prepare(self):
        super(ExternalResultsLoader, self).prepare()
        self._read_options()
        assert self._data_file_pattern or self.data_file, "Option is required: data-file or data-file-pattern"
        self.label = self.data_file
        if self.errors_file:
            self.errors_file = self.engine.find_file(self.errors_file)

        str_wait = self.execution.get("wait-for-file", self.settings.get("wait-for-file", self._file_exists_wait))
        self._file_exists_wait = dehumanize_time(str_wait)

        def_timout = self.engine.check_interval * 10
        str_to = self.execution.get("results-timeout", self.settings.get("results-timeout", def_timout))
        self._result_timeout = dehumanize_time(str_to)

        self._file_check_ts = time.time()
        self._try_make_reader()