示例#1
0
def main(
        job_instance_ids, bins, statistics_names, stats_with_suffixes, labels,
        titles, use_legend, legend, pickle, cumulative, filenames):
    file_ext = 'pickle' if pickle else 'png'
    plot = _Plot.plot_cumulative_histogram if cumulative else _Plot.plot_histogram
    legends = iter(legend)

    statistics = Statistics.from_default_collector()
    with tempfile.TemporaryDirectory(prefix='openbach-histogram-') as root:
        for fields, label, title, filename in itertools.zip_longest(statistics_names, labels, titles, filenames):
            figure, axis = plt.subplots()
            data = statistics.fetch_all(
                    job_instances=job_instance_ids,
                    suffix = None if stats_with_suffixes else '',
                    fields=fields, columns=legends)
            plot(data, axis, label, bins, use_legend)
            if title is not None:
                axis.set_title(title)
            if filename is not None:
                filename = '{}.{}'.format(filename, file_ext)
            elif fields is None:
                filename = 'histogram.{}'.format(file_ext)
            else:
                filename = 'histogram_{}.{}'.format('_'.join(fields), file_ext)
            filepath = os.path.join(root, filename)
            save(figure, filepath, pickle)
            collect_agent.store_files(now(), figure=filepath)
示例#2
0
def main(src_ip, dst_ip, src_port, dst_port, proto, interface, capture_file,
         duration):
    """Capture packets on a live network interface. Only consider packets matching the specified fields."""
    capture_filter = build_capture_filter(src_ip, dst_ip, src_port, dst_port,
                                          proto)
    copy = False
    if capture_file == "":
        capture_file = "/tmp/tcpdump_capture.pcap"
        copy = True
    signal_handler_partial = partial(save_pcap, capture_file, copy,
                                     os.getpid())
    original_sigint_handler = signal.getsignal(signal.SIGINT)
    original_sigterm_handler = signal.getsignal(signal.SIGTERM)
    signal.signal(signal.SIGTERM, signal_handler_partial)
    signal.signal(signal.SIGINT, signal_handler_partial)
    try:
        parent = pathlib.Path(capture_file).parent
        pathlib.Path(parent).mkdir(parents=True, exist_ok=True)
        subprocess.run(["rm", capture_file])
        cmd = [
            'tcpdump', '-i', interface, capture_filter, '-w', capture_file,
            '-Z', 'root'
        ]
        if duration:
            cmd += ['-G', str(duration), '-W', str(1)]
        p = subprocess.run(cmd)
        if p.returncode != 0:
            message = 'ERROR when lauching tcpdump: {}'.format(p.stderr)
            collect_agent.send_log(syslog.LOG_ERR, message)
            sys.exit(message)

    except Exception as ex:
        message = 'ERROR when capturing: {}'.format(ex)
        collect_agent.send_log(syslog.LOG_ERR, message)
        sys.exit(message)
    collect_agent.store_files(int(time.time() * 1000),
                              pcap_file=capture_file,
                              copy=copy)
    signal.signal(signal.SIGTERM, original_sigint_handler)
    signal.signal(signal.SIGINT, original_sigterm_handler)
示例#3
0
def main(job_instance_ids, statistics_names, stats_with_suffixes, ylabels, titles, xlabel_job_instance_names, pickle):
    file_ext = 'pickle' if pickle else 'png'
    xlabel_instance_names = iter(xlabel_job_instance_names)

    statistics = Statistics.from_default_collector()
    with tempfile.TemporaryDirectory(prefix='openbach-comparison-') as root:
        for fields, ylabel, title in itertools.zip_longest(statistics_names, ylabels, titles):
            figure, axis = plt.subplots()
            data = statistics.fetch_all(
                    job_instances=job_instance_ids,
                    suffix = None if stats_with_suffixes else '',
                    fields=fields, columns=xlabel_instance_names)
            data.plot_comparison(axis, ylabel, False)
            if title is not None:
                axis.set_title(title)
            if statistics_names is None:
                filename = 'comparison.{}'.format(file_ext)
            else:
                filename = 'comparison_{}.{}'.format('_'.join(fields), file_ext)
            filepath = os.path.join(root, filename)
            save(figure, filepath, pickle)
            collect_agent.store_files(now(), figure=filepath)
示例#4
0
def save_pcap(capture_file, copy, parent_pid, signum, frame):
    collect_agent.store_files(int(time.time() * 1000),
                              pcap_file=capture_file,
                              copy=copy)
def main(job_instance_ids, statistics_names, aggregations_periods, bins_sizes,
         offset, maximum, stats_with_suffixes, axis_labels, figures_titles,
         legends_titles, use_legend, add_global, pickle):
    file_ext = 'pickle' if pickle else 'png'
    statistics = Statistics.from_default_collector()
    statistics.origin = 0
    with tempfile.TemporaryDirectory(
            prefix='openbach-temporal-binning-histogram-') as root:
        for job, fields, aggregations, bin_sizes, labels, titles, legend_titles in itertools.zip_longest(
                job_instance_ids,
                statistics_names,
                aggregations_periods,
                bins_sizes,
                axis_labels,
                figures_titles,
                legends_titles,
                fillvalue=[]):
            data_collection = statistics.fetch(
                job_instances=job,
                suffix=None if stats_with_suffixes else '',
                fields=fields)

            # Drop multi-index columns to easily concatenate dataframes from their statistic names
            df = pd.concat([
                plot.dataframe.set_axis(
                    plot.dataframe.columns.get_level_values('statistic'),
                    axis=1,
                    inplace=False) for plot in data_collection
            ])
            # Recreate a multi-indexed columns so the plot can function properly
            df.columns = pd.MultiIndex.from_tuples(
                [('', '', '', '', stat) for stat in df.columns],
                names=['job', 'scenario', 'agent', 'suffix', 'statistic'])
            plot = _Plot(df)

            if not fields:
                fields = list(df.columns.get_level_values('statistic'))

            metadata = itertools.zip_longest(fields, labels, bin_sizes,
                                             aggregations, legend_titles,
                                             titles)
            for field, label, bin_size, aggregation, legend, title in metadata:
                if field not in df.columns.get_level_values('statistic'):
                    message = 'job instances {} did not produce the statistic {}'.format(
                        job, field)
                    collect_agent.send_log(syslog.LOG_WARNING, message)
                    print(message)
                    continue

                if label is None:
                    collect_agent.send_log(
                        syslog.LOG_WARNING,
                        'no y-axis label provided for the {} statistic of job '
                        'instances {}: using the empty string instead'.format(
                            field, job))
                    label = ''

                if aggregation is None:
                    collect_agent.send_log(
                        syslog.LOG_WARNING,
                        'invalid aggregation value of {} for the {} '
                        'statistic of job instances {}: choose from {}, using '
                        '"hour" instead'.format(aggregation, field, job,
                                                TIME_OPTIONS))
                    aggregation = 'hour'

                if legend is None and use_legend:
                    collect_agent.send_log(
                        syslog.LOG_WARNING,
                        'no legend title provided for the {} statistic of job '
                        'instances {}: using the empty string instead'.format(
                            field, job))
                    legend = ''

                if bin_size is None:
                    collect_agent.send_log(
                        syslog.LOG_WARNING,
                        'no bin size provided for the {} statistic of job '
                        'instances {}: using the default value 100 instead'.
                        format(field, job))
                    bin_size = 100

                figure, axis = plt.subplots()
                axis = plot.plot_temporal_binning_histogram(
                    axis, label, field, None, bin_size, offset, maximum,
                    aggregation, add_global, use_legend, legend)
                if title is not None:
                    axis.set_title(title)
                filepath = os.path.join(
                    root,
                    'temporal_binning_histogram_{}.{}'.format(field, file_ext))
                save(figure, filepath, pickle, False)
                collect_agent.store_files(now(), figure=filepath)