Esempio n. 1
0
    def process_reply(self, message):
        result = super(Iperf3Executor, self).process_reply(message)

        if not result['stdout']:
            raise base.ExecutorException(result, 'Empty result from iperf')

        data = json.loads(result['stdout'])

        # store verbose data in result
        result['verbose'] = yaml.safe_dump(dict(start=data['start'],
                                                end=data['end']),
                                           indent=2,
                                           default_flow_style=False)

        if 'error' in data:
            raise base.ExecutorException(result, data['error'])

        if self.test_definition.get('udp'):
            sampler = lambda p: [round(p['end'], 2), p['packets']]
            meta = [['time', 's'], ['packets', 'pps']]
        else:
            sampler = lambda p: [
                round(p['end'], 2), p['bits_per_second'], p['retransmits']
            ]
            meta = [['time', 's'], ['bandwidth', 'bit/s'], ['retransmits', '']]

        samples = []
        for point in data['intervals']:
            samples.append(sampler(point['sum']))

        result['samples'] = samples
        result['meta'] = meta
        return result
Esempio n. 2
0
    def process_reply(self, message):
        result = super(Iperf3Executor, self).process_reply(message)

        if not result['stdout']:
            raise base.ExecutorException(result, 'Empty result from iperf')

        data = json.loads(result['stdout'])

        # store verbose data in result
        result['verbose'] = yaml.safe_dump(dict(start=data['start'],
                                                end=data['end']),
                                           indent=2,
                                           default_flow_style=False)

        if 'error' in data:
            raise base.ExecutorException(result, data['error'])

        has_retransmits = False
        if (len(data['intervals']) > 0
                and 'retransmits' in data['intervals'][0]['sum']):
            has_retransmits = True

        if self.test_definition.get('udp'):
            sampler = lambda p: [round(p['end'], 2), p['packets']]
            meta = [['time', 's'], ['packets', 'pps']]
        elif has_retransmits:
            sampler = lambda p: [
                round(p['end'], 2), p['bits_per_second'], p['retransmits']
            ]
            meta = [['time', 's'], ['bandwidth', 'bit/s'], ['retransmits', '']]
        else:
            sampler = lambda p: [round(p['end'], 2), p['bits_per_second']]
            meta = [['time', 's'], ['bandwidth', 'bit/s']]

        samples = []
        for point in data['intervals']:
            samples.append(sampler(point['sum']))

        result['samples'] = samples
        result['meta'] = meta

        stats = result['stats'] = {}
        if utils.copy_value_by_path(data, 'end.sum.jitter_ms', stats,
                                    'jitter.avg'):
            utils.set_value_by_path(stats, 'jitter.unit', 'ms')

        if utils.copy_value_by_path(data, 'end.sum.lost_percent', stats,
                                    'loss.avg'):
            utils.set_value_by_path(stats, 'loss.unit', '%')

        return result
Esempio n. 3
0
    def process_reply(self, message):
        result = super(IperfGraphExecutor, self).process_reply(message)

        if not result['stdout']:
            raise base.ExecutorException(result, 'Empty result from iperf')

        samples = []
        threads_count = self.test_definition.get('threads') or 1

        for row in csv.reader(result['stdout'].split('\n')):
            if row and len(row) > 8:
                thread = row[5]
                if threads_count > 1 and thread != '-1':
                    # ignore individual per-thread data
                    continue

                start, end = row[6].split('-')
                samples.append([float(end), int(row[8])])

        if samples:
            samples.pop()  # the last line is summary, remove it

        result['samples'] = samples
        result['meta'] = [['time', 's'], ['bandwidth', 'bit/s']]
        return result
Esempio n. 4
0
    def process_reply(self, message):
        result = super(FlentExecutor, self).process_reply(message)

        stdout = result['stdout']
        if not stdout:
            raise base.ExecutorException(
                result,
                'Flent returned no data, stderr: %s' % result['stderr'])

        data = json.loads(stdout)

        series_meta = data['metadata']['SERIES_META']
        columns = sorted(series_meta.keys())
        meta = ([['time', 's']] + [[utils.strict(k), series_meta[k]['UNITS']]
                                   for k in columns])
        samples = []

        for i in range(len(data['x_values'])):
            line = [data['x_values'][i]]
            for el in columns:
                line.append(data['results'][el][i])
            samples.append(line)

        result['meta'] = meta
        result['samples'] = samples

        return result
Esempio n. 5
0
    def process_reply(self, message):
        result = super(FlentExecutor, self).process_reply(message)

        stdout = result['stdout']
        if not stdout:
            raise base.ExecutorException(result, 'Empty result from flent')

        data = json.loads(stdout)

        series_meta = data['metadata']['SERIES_META']
        columns = sorted(series_meta.keys())
        meta = ([['time', 's']] + [[k, series_meta[k]['UNITS']]
                                   for k in columns])
        samples = []

        for i in range(int(data['metadata']['TOTAL_LENGTH'])):
            line = [data['x_values'][i]]
            for el in columns:
                line.append(data['results'][el][i])
            samples.append(line)

        result['meta'] = meta
        result['samples'] = samples

        return result
Esempio n. 6
0
    def process_reply(self, message):

        stdout = message.get("stdout")

        if not stdout:
            raise base.ExecutorException(
                message,
                'Flent returned no data, stderr: %s' % message['stderr'])

        data = json.loads(stdout)
        jobs = data.get('jobs')

        formalized_results = []
        for job in jobs:
            job_name = job.get('jobname')
            read_bw = job.get('read', {}).get('bw', 0)
            read_iops = job.get('read', {}).get('iops', 0)
            write_bw = job.get('write', {}).get('bw', 0)
            write_iops = job.get('write', {}).get('iops', 0)
            formalized_results.append({
                job_name: {
                    "read_bw": read_bw,
                    "read_iops": read_iops,
                    "write_bw": write_bw,
                    "write_iops": write_iops
                }
            })

        result = dict()
        result['samples'] = formalized_results

        return result
Esempio n. 7
0
    def test_execute_operation_process_reply_with_error(self):
        executor = mock.MagicMock()
        executor.process_reply = mock.Mock(
            side_effect=base.ExecutorException({'stderr': 'sad'}, 'Error!'))

        agent_id = 'the-agent'
        ex = quorum.ExecuteOperation({agent_id: executor})

        message = {
            'stdout': 'foo',
            'stderr': '',
        }
        reply = ex.process_reply(agent_id, message)

        expected = {
            'status': 'error',
            'stderr': 'sad',
            'info': 'Error!'
        }
        executor.process_reply.assert_called_once_with(message)
        self.assertDictContainsSubset(expected, reply)