Exemple #1
0
    def _extract_rest_request(self, test_step):
        label = test_step.get('name')
        config = test_step.find('./con:config', namespaces=self.NAMESPACES)
        method = config.get('method')

        method_name = config.get('methodName')
        method_obj = self.interface.find('.//con:method[@name="%s"]' % method_name, namespaces=self.NAMESPACES)
        params = BetterDict()
        if method_obj is not None:
            parent = method_obj.getparent()
            while parent.tag.endswith('resource'):
                for param in parent.findall('./con:parameters/con:parameter', namespaces=self.NAMESPACES):
                    param_name = param.findtext('./con:name', namespaces=self.NAMESPACES)
                    param_value = param.findtext('./con:value', namespaces=self.NAMESPACES)
                    def_value = param.findtext('./con:default', namespaces=self.NAMESPACES)
                    if param_value:
                        params[param_name] = param_value
                    elif def_value:
                        params[param_name] = def_value

                parent = parent.getparent()

        url = self._calc_base_address(test_step) + config.get('resourcePath')
        headers = self._extract_headers(config)
        assertions = self._extract_assertions(config)

        params.merge({
            entry.get("key"): entry.get("value")
            for entry in config.findall('./con:restRequest/con:parameters/con:entry', namespaces=self.NAMESPACES)
        })

        for param_name in copy.copy(list(params.keys())):
            template = "{" + param_name + "}"
            if template in url:
                param_value = params.pop(param_name)
                url = url.replace(template, param_value)

        request = {"url": url, "label": label}

        if method is not None and method != "GET":
            request["method"] = method

        if headers:
            request["headers"] = headers

        if assertions:
            request["assert"] = assertions

        body = {}
        for key, value in iteritems(params):
            body[key] = value

        if body:
            request["body"] = body

        return request
Exemple #2
0
class ConsolidatingAggregator(EngineModule, ResultsProvider):
    """

    :type underlings: list[bzt.modules.aggregator.ResultsProvider]
    """
    # FIXME: it was oscillating with remote test of 100 servers
    def __init__(self):
        EngineModule.__init__(self)
        ResultsProvider.__init__(self)
        self.generalize_labels = True
        self.ignored_labels = []
        self.underlings = []
        self.buffer = BetterDict()
        self.buffer_len = 2

    def prepare(self):
        """
        Read aggregation options
        """
        super(ConsolidatingAggregator, self).prepare()
        self.track_percentiles = self.settings.get("percentiles", self.track_percentiles)
        self.buffer_len = self.settings.get("buffer-seconds", self.buffer_len)
        self.ignored_labels = self.settings.get("ignore-labels", self.ignored_labels)
        self.generalize_labels = self.settings.get("generalize-labels", self.generalize_labels)

    def add_underling(self, underling):
        """
        Add source for aggregating

        :type underling: ResultsProvider
        """
        underling.track_percentiles = self.track_percentiles
        if isinstance(underling, ResultsReader):
            underling.ignored_labels = self.ignored_labels
            underling.generalize_labels = self.generalize_labels
            # underling.buffer_len = self.buffer_len  # NOTE: is it ok for underling to have the same buffer len?
        self.underlings.append(underling)

    def check(self):
        """
        Check if there is next aggregate data present

        :rtype: bool
        """
        for point in self.datapoints():
            self.log.debug("Processed datapoint: %s/%s", point[DataPoint.TIMESTAMP], point[DataPoint.SOURCE_ID])
        return super(ConsolidatingAggregator, self).check()

    def post_process(self):
        """
        Process all remaining aggregate data
        """
        super(ConsolidatingAggregator, self).post_process()
        for point in self.datapoints(True):
            self.log.debug("Processed datapoint: %s/%s", point[DataPoint.TIMESTAMP], point[DataPoint.SOURCE_ID])

    def _process_underlings(self, final_pass):
        for underling in self.underlings:
            for data in [x for x in underling.datapoints(final_pass)]:
                tstamp = data[DataPoint.TIMESTAMP]
                if self.buffer:
                    mints = min(self.buffer.keys())
                    if tstamp < mints:
                        self.log.warning("Putting datapoint %s into %s", tstamp, mints)
                        data[DataPoint.TIMESTAMP] = mints
                        tstamp = mints
                self.buffer.get(tstamp, []).append(data)

    def _calculate_datapoints(self, final_pass=False):
        """
        Override ResultsProvider._calculate_datapoints

        """
        self._process_underlings(final_pass)

        self.log.debug("Consolidator buffer[%s]: %s", len(self.buffer), self.buffer.keys())
        if not self.buffer:
            return

        timestamps = sorted(self.buffer.keys())
        while timestamps and (final_pass or (timestamps[-1] >= timestamps[0] + self.buffer_len)):
            tstamp = timestamps.pop(0)
            self.log.debug("Merging into %s", tstamp)
            points_to_consolidate = self.buffer.pop(tstamp)
            point = DataPoint(tstamp, self.track_percentiles)
            for subresult in points_to_consolidate:
                self.log.debug("Merging %s", subresult[DataPoint.TIMESTAMP])
                point.merge_point(subresult)
            point.recalculate()
            yield point
Exemple #3
0
class ConsolidatingAggregator(Aggregator, ResultsProvider):
    """

    :type underlings: list[bzt.modules.aggregator.ResultsProvider]
    """

    # TODO: switch to underling-count-based completeness criteria
    def __init__(self):
        Aggregator.__init__(self, is_functional=False)
        ResultsProvider.__init__(self)
        self.generalize_labels = False
        self.ignored_labels = ["ignore"]
        self.underlings = []
        self.buffer = BetterDict()
        self.rtimes_len = 1000

    def prepare(self):
        """
        Read aggregation options
        """
        super(ConsolidatingAggregator, self).prepare()

        # make unique & sort
        self.track_percentiles = self.settings.get("percentiles",
                                                   self.track_percentiles)
        self.track_percentiles = list(set(self.track_percentiles))
        self.track_percentiles.sort()
        self.settings["percentiles"] = self.track_percentiles

        self.ignored_labels = self.settings.get("ignore-labels",
                                                self.ignored_labels)
        self.generalize_labels = self.settings.get("generalize-labels",
                                                   self.generalize_labels)

        self.min_buffer_len = dehumanize_time(
            self.settings.get("min-buffer-len", self.min_buffer_len))

        max_buffer_len = self.settings.get("max-buffer-len",
                                           self.max_buffer_len)
        try:
            self.max_buffer_len = dehumanize_time(max_buffer_len)
        except TaurusInternalException as exc:
            self.log.debug("Exception in dehumanize_time(%s): %s",
                           max_buffer_len, exc)
            raise TaurusConfigError("Wrong 'max-buffer-len' value: %s" %
                                    max_buffer_len)

        self.buffer_multiplier = self.settings.get("buffer-multiplier",
                                                   self.buffer_multiplier)

        count = len(self.track_percentiles)
        if count == 1:
            self.buffer_scale_idx = str(float(self.track_percentiles[0]))
        if count > 1:
            percentile = self.settings.get("buffer-scale-choice", 0.5)
            percentiles = [i / (count - 1.0) for i in range(count)]
            distances = [
                abs(percentile - percentiles[i]) for i in range(count)
            ]
            index_position = distances.index(min(distances))
            self.buffer_scale_idx = str(
                float(self.track_percentiles[index_position]))

        debug_str = 'Buffer scaling setup: percentile %s from %s selected'
        self.log.debug(debug_str, self.buffer_scale_idx,
                       self.track_percentiles)
        self.rtimes_len = self.settings.get("rtimes-len", self.rtimes_len)

    def add_underling(self, underling):
        """
        Add source for aggregating

        :type underling: ResultsProvider
        """
        underling.track_percentiles = self.track_percentiles
        if isinstance(underling, ResultsReader):
            underling.ignored_labels = self.ignored_labels
            underling.generalize_labels = self.generalize_labels
            underling.min_buffer_len = self.min_buffer_len
            underling.max_buffer_len = self.max_buffer_len
            underling.buffer_multiplier = self.buffer_multiplier
            underling.buffer_scale_idx = self.buffer_scale_idx
            underling.rtimes_len = self.rtimes_len

        self.underlings.append(underling)

    def check(self):
        """
        Check if there is next aggregate data present

        :rtype: bool
        """
        for point in self.datapoints():
            self.log.debug("Processed datapoint: %s/%s",
                           point[DataPoint.TIMESTAMP],
                           point[DataPoint.SOURCE_ID])
        return super(ConsolidatingAggregator, self).check()

    def post_process(self):
        """
        Process all remaining aggregate data
        """
        super(ConsolidatingAggregator, self).post_process()
        for point in self.datapoints(True):
            self.log.debug("Processed datapoint: %s/%s",
                           point[DataPoint.TIMESTAMP],
                           point[DataPoint.SOURCE_ID])

    def _process_underlings(self, final_pass):
        for underling in self.underlings:
            for data in underling.datapoints(final_pass):
                tstamp = data[DataPoint.TIMESTAMP]
                if self.buffer:
                    mints = min(self.buffer.keys())
                    if tstamp < mints:
                        self.log.debug("Putting datapoint %s into %s", tstamp,
                                       mints)
                        data[DataPoint.TIMESTAMP] = mints
                        tstamp = mints
                self.buffer.get(tstamp, [], force_set=True).append(data)

    def _calculate_datapoints(self, final_pass=False):
        """
        Override ResultsProvider._calculate_datapoints

        """
        self._process_underlings(final_pass)

        self.log.debug("Consolidator buffer[%s]: %s", len(self.buffer),
                       self.buffer.keys())
        if not self.buffer:
            return

        timestamps = sorted(self.buffer.keys())
        while timestamps and (
                final_pass or
            (timestamps[-1] >= timestamps[0] + self.buffer_len)):
            tstamp = timestamps.pop(0)
            self.log.debug("Merging into %s", tstamp)
            points_to_consolidate = self.buffer.pop(tstamp)
            point = DataPoint(tstamp, self.track_percentiles)
            for subresult in points_to_consolidate:
                self.log.debug("Merging %s", subresult[DataPoint.TIMESTAMP])
                point.merge_point(subresult)
            point.recalculate()
            yield point
Exemple #4
0
class ConsolidatingAggregator(EngineModule, ResultsProvider):
    """

    :type underlings: list[bzt.modules.aggregator.ResultsProvider]
    """

    # TODO: switch to underling-count-based completeness criteria
    def __init__(self):
        EngineModule.__init__(self)
        ResultsProvider.__init__(self)
        self.generalize_labels = False
        self.ignored_labels = []
        self.underlings = []
        self.buffer = BetterDict()

    def prepare(self):
        """
        Read aggregation options
        """
        super(ConsolidatingAggregator, self).prepare()

        # make unique & sort
        percentiles = self.settings.get("percentiles", self.track_percentiles)
        percentiles = list(set(percentiles))
        percentiles.sort()
        self.track_percentiles = percentiles
        self.settings['percentiles'] = percentiles

        self.ignored_labels = self.settings.get("ignore-labels", self.ignored_labels)
        self.generalize_labels = self.settings.get("generalize-labels", self.generalize_labels)

        self.min_buffer_len = dehumanize_time(self.settings.get("min-buffer-len", self.min_buffer_len))

        max_buffer_len = self.settings.get("max-buffer-len", self.max_buffer_len)
        try:  # for max_buffer_len == float('inf')
            self.max_buffer_len = dehumanize_time(max_buffer_len)
        except ValueError as verr:
            if str(verr).find('inf') != -1:
                self.max_buffer_len = max_buffer_len
            else:
                raise

        self.buffer_multiplier = self.settings.get("buffer-multiplier", self.buffer_multiplier)

        percentile = self.settings.get("buffer-scale-choice", 0.5)
        count = len(self.track_percentiles)
        if count == 1:
            self.buffer_scale_idx = str(float(self.track_percentiles[0]))
        if count > 1:
            percentiles = [i / (count - 1.0) for i in range(count)]
            distances = [abs(percentile - percentiles[i]) for i in range(count)]
            index_position = distances.index(min(distances))
            self.buffer_scale_idx = str(float(self.track_percentiles[index_position]))

        debug_str = 'Buffer scaling setup: percentile %s from %s selected'
        self.log.debug(debug_str, self.buffer_scale_idx, self.track_percentiles)

    def add_underling(self, underling):
        """
        Add source for aggregating

        :type underling: ResultsProvider
        """
        underling.track_percentiles = self.track_percentiles
        if isinstance(underling, ResultsReader):
            underling.ignored_labels = self.ignored_labels
            underling.generalize_labels = self.generalize_labels
            underling.min_buffer_len = self.min_buffer_len
            underling.max_buffer_len = self.max_buffer_len
            underling.buffer_multiplier = self.buffer_multiplier
            underling.buffer_scale_idx = self.buffer_scale_idx

        self.underlings.append(underling)

    def check(self):
        """
        Check if there is next aggregate data present

        :rtype: bool
        """
        for point in self.datapoints():
            self.log.debug("Processed datapoint: %s/%s", point[DataPoint.TIMESTAMP], point[DataPoint.SOURCE_ID])
        return super(ConsolidatingAggregator, self).check()

    def post_process(self):
        """
        Process all remaining aggregate data
        """
        super(ConsolidatingAggregator, self).post_process()
        for point in self.datapoints(True):
            self.log.debug("Processed datapoint: %s/%s", point[DataPoint.TIMESTAMP], point[DataPoint.SOURCE_ID])

    def _process_underlings(self, final_pass):
        for underling in self.underlings:
            for data in underling.datapoints(final_pass):
                tstamp = data[DataPoint.TIMESTAMP]
                if self.buffer:
                    mints = min(self.buffer.keys())
                    if tstamp < mints:
                        self.log.warning("Putting datapoint %s into %s", tstamp, mints)
                        data[DataPoint.TIMESTAMP] = mints
                        tstamp = mints
                self.buffer.get(tstamp, []).append(data)

    def _calculate_datapoints(self, final_pass=False):
        """
        Override ResultsProvider._calculate_datapoints

        """
        self._process_underlings(final_pass)

        self.log.debug("Consolidator buffer[%s]: %s", len(self.buffer), self.buffer.keys())
        if not self.buffer:
            return

        timestamps = sorted(self.buffer.keys())
        while timestamps and (final_pass or (timestamps[-1] >= timestamps[0] + self.buffer_len)):
            tstamp = timestamps.pop(0)
            self.log.debug("Merging into %s", tstamp)
            points_to_consolidate = self.buffer.pop(tstamp)
            point = DataPoint(tstamp, self.track_percentiles)
            for subresult in points_to_consolidate:
                self.log.debug("Merging %s", subresult[DataPoint.TIMESTAMP])
                point.merge_point(subresult)
            point.recalculate()
            yield point
Exemple #5
0
class ConsolidatingAggregator(EngineModule, ResultsProvider):
    """

    :type underlings: list[bzt.modules.aggregator.ResultsProvider]
    """

    # TODO: switch to underling-count-based completeness criteria
    def __init__(self):
        EngineModule.__init__(self)
        ResultsProvider.__init__(self)
        self.generalize_labels = False
        self.ignored_labels = []
        self.underlings = []
        self.buffer = BetterDict()
        self.buffer_len = 2

    def prepare(self):
        """
        Read aggregation options
        """
        super(ConsolidatingAggregator, self).prepare()
        self.track_percentiles = self.settings.get("percentiles",
                                                   self.track_percentiles)
        self.buffer_len = self.settings.get("buffer-seconds", self.buffer_len)
        self.ignored_labels = self.settings.get("ignore-labels",
                                                self.ignored_labels)
        self.generalize_labels = self.settings.get("generalize-labels",
                                                   self.generalize_labels)

    def add_underling(self, underling):
        """
        Add source for aggregating

        :type underling: ResultsProvider
        """
        underling.track_percentiles = self.track_percentiles
        if isinstance(underling, ResultsReader):
            underling.ignored_labels = self.ignored_labels
            underling.generalize_labels = self.generalize_labels
            # underling.buffer_len = self.buffer_len  # NOTE: is it ok for underling to have the same buffer len?
        self.underlings.append(underling)

    def check(self):
        """
        Check if there is next aggregate data present

        :rtype: bool
        """
        for point in self.datapoints():
            self.log.debug("Processed datapoint: %s/%s",
                           point[DataPoint.TIMESTAMP],
                           point[DataPoint.SOURCE_ID])
        return super(ConsolidatingAggregator, self).check()

    def post_process(self):
        """
        Process all remaining aggregate data
        """
        super(ConsolidatingAggregator, self).post_process()
        for point in self.datapoints(True):
            self.log.debug("Processed datapoint: %s/%s",
                           point[DataPoint.TIMESTAMP],
                           point[DataPoint.SOURCE_ID])

    def _process_underlings(self, final_pass):
        for underling in self.underlings:
            for data in [x for x in underling.datapoints(final_pass)]:
                tstamp = data[DataPoint.TIMESTAMP]
                if self.buffer:
                    mints = min(self.buffer.keys())
                    if tstamp < mints:
                        self.log.warning("Putting datapoint %s into %s",
                                         tstamp, mints)
                        data[DataPoint.TIMESTAMP] = mints
                        tstamp = mints
                self.buffer.get(tstamp, []).append(data)

    def _calculate_datapoints(self, final_pass=False):
        """
        Override ResultsProvider._calculate_datapoints

        """
        self._process_underlings(final_pass)

        self.log.debug("Consolidator buffer[%s]: %s", len(self.buffer),
                       self.buffer.keys())
        if not self.buffer:
            return

        timestamps = sorted(self.buffer.keys())
        while timestamps and (
                final_pass or
            (timestamps[-1] >= timestamps[0] + self.buffer_len)):
            tstamp = timestamps.pop(0)
            self.log.debug("Merging into %s", tstamp)
            points_to_consolidate = self.buffer.pop(tstamp)
            point = DataPoint(tstamp, self.track_percentiles)
            for subresult in points_to_consolidate:
                self.log.debug("Merging %s", subresult[DataPoint.TIMESTAMP])
                point.merge_point(subresult)
            point.recalculate()
            yield point