コード例 #1
0
def main(args):
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--input-file',
        dest='input_files',
        nargs=2,
        action='append',
        required=True,
        help="csv file to plot.  Needs a label as a second argument.")
    parser.add_argument('--window-size',
                        nargs=2,
                        action='append',
                        dest='window_size',
                        help="How long to average over.  In ps.",
                        required=True)
    parser.add_argument('--keep-temps',
                        dest='keep_temps',
                        default=False,
                        action='store_true',
                        help="Keep temp files")
    parser.add_argument(
        '--server',
        dest='server_ip',
        required=True,
        help="IP of the machine that the card is directory connected to")
    parser.add_argument('--output-name', dest='output_name', required=True)
    parser.add_argument('--title', dest='title')
    # This is to avoid issues with tcpdump hanging.
    parser.add_argument('--packets',
                        type=int,
                        required=False,
                        default=None,
                        dest='packets',
                        help="Number of packets to process from a pcap file")

    args = parser.parse_args(args)

    plt.figure(1)
    plt.clf()
    plt.figure(2)
    plt.clf()
    plt.figure(3)
    plt.clf()
    plt.figure(4)
    plt.clf()

    for pcap_file, label in args.input_files:
        for window_size, label_suffix in args.window_size:
            if pcap_file.endswith('.csv'):
                incoming_x_values, incoming_bandwidths = \
                        process_csv.extract_bandwidths(pcap_file, window_size,
                                                       to_ip=args.server_ip, count=args.packets)
                outgoing_x_values, outgoing_bandwidths = \
                    process_csv.extract_bandwidths(pcap_file, window_size,
                                                   from_ip=args.server_ip, count=args.packets)

            for i in range(len(incoming_bandwidths)):
                incoming_bandwidths[i] = float(incoming_bandwidths[i])

            # Dump the percentiles into a file.
            percentiles = np.linspace(0.0, 1.0, 10000)
            sorted_bandwidths = [
                x for x in sorted(incoming_bandwidths) if x > 0.0
            ]
            sorted_out_bandwidths = [
                x for x in sorted(outgoing_bandwidths) if x > 0.0
            ]
            with open(pcap_file + label_suffix + '.in_percentiles',
                      'w') as in_file:
                with open(pcap_file + label_suffix + '.out_percentiles',
                          'w') as out_file:
                    contents_in = []
                    contents_out = []
                    count_in = len(sorted_bandwidths) - 1
                    count_out = len(sorted_out_bandwidths) - 1
                    for percentile in percentiles:
                        fraction_through_in = int(percentile * count_in)
                        fraction_through_out = int(percentile * count_out)

                        contents_in.append(
                            str(percentile) + ' ' + '{0:.15f}'.format(
                                sorted_bandwidths[fraction_through_in]) + '\n')
                        contents_out.append(
                            str(percentile) + ' ' + '{0:.15f}'.format(
                                sorted_out_bandwidths[fraction_through_out]) +
                            '\n')
                    in_file.writelines(contents_in)
                    out_file.writelines(contents_out)

            min_lim = min(incoming_bandwidths)
            max_lim = max(incoming_bandwidths)
            small_diff = (min_lim + max_lim) / 10000.0
            bins = np.append(np.linspace(min_lim, max_lim + small_diff, 1000),
                             np.inf)
            print bins
            plt.figure(1)
            plt.hist(incoming_bandwidths,
                     cumulative=True,
                     bins=bins,
                     histtype='step',
                     normed=True,
                     label=label + ' ' + label_suffix)

            no_zero_incoming_bandwidths = graph_utils.no_zeroes(
                incoming_bandwidths)
            if len(no_zero_incoming_bandwidths) > 0:
                min_lim = min(no_zero_incoming_bandwidths)
                max_lim = max(no_zero_incoming_bandwidths)
                logspace_bins = graph_utils.get_logspace(min_lim, max_lim)
                plt.figure(2)
                plt.hist(no_zero_incoming_bandwidths,
                         cumulative=True,
                         bins=logspace_bins,
                         histtype='step',
                         normed=True,
                         label=label + ' ' + label_suffix)
            else:
                print "Error: No non-zero bandwidths found"

            for i in range(len(outgoing_bandwidths)):
                outgoing_bandwidths[i] = float(outgoing_bandwidths[i])
            min_lim = min(outgoing_bandwidths)
            max_lim = max(outgoing_bandwidths)
            small_diff = (min_lim + max_lim) / 10000.0
            bins = np.append(np.linspace(min_lim, max_lim + small_diff, 1000),
                             np.inf)
            plt.figure(3)
            plt.hist(outgoing_bandwidths,
                     cumulative=True,
                     bins=bins,
                     histtype='step',
                     normed=True,
                     label=label + ' ' + label_suffix)

            no_zero_outgoing_bandwidths = graph_utils.no_zeroes(
                outgoing_bandwidths)
            if len(no_zero_outgoing_bandwidths) > 0:
                min_lim = min(no_zero_outgoing_bandwidths)
                max_lim = max(no_zero_outgoing_bandwidths)
                logspace_bins = graph_utils.get_logspace(min_lim, max_lim)
                plt.figure(4)
                plt.hist(no_zero_outgoing_bandwidths,
                         cumulative=True,
                         bins=logspace_bins,
                         histtype='step',
                         normed=True,
                         label=label + ' ' + label_suffix)
            else:
                print "Error: No non-zero bandwidths found!"

    if args.title:
        plt.figure(1)
        plt.title('Client Traffic: ' + args.title)
        plt.figure(2)
        plt.title('Client Traffic: ' + args.title)
        plt.figure(3)
        plt.title('Server Traffic: ' + args.title)
        plt.figure(4)
        plt.title('Server Traffic: ' + args.title)

    label_count = len(args.input_files) * len(args.window_size)
    graph_utils.latexify(bottom_label_rows=label_count / 2)

    plt.figure(1)
    plt.xlabel("Bandwidth (Mbps)")
    plt.ylabel("CDF")
    graph_utils.set_legend_below()
    graph_utils.set_yax_max_one()
    graph_utils.set_non_negative_axes()
    graph_utils.set_ticks()
    filename = args.output_name + '_incoming_bandwidth_cdf_window.eps'
    plt.savefig(filename)
    print "Done! File is in ", filename

    plt.figure(2)
    plt.ylabel("CDF")
    plt.xlabel("Bandwidth (Mbps)")
    graph_utils.set_log_x()
    graph_utils.set_legend_below()
    graph_utils.set_yax_max_one()
    graph_utils.set_non_negative_axes()
    graph_utils.set_ticks()
    filename = args.output_name + '_incoming_bandwidth_cdf_window_log.eps'
    plt.savefig(filename)
    print "Done! File is in ", filename

    plt.figure(3)
    plt.xlabel("Bandwidth (Mbps)")
    plt.ylabel("CDF")
    graph_utils.set_legend_below()
    graph_utils.set_yax_max_one()
    graph_utils.set_non_negative_axes()
    graph_utils.set_ticks()
    filename = args.output_name + '_outgoing_bandwidth_cdf_window.eps'
    plt.savefig(filename)
    print "Done! File is in ", filename

    plt.figure(4)
    plt.xlabel("Bandwidth (Mbps)")
    plt.ylabel("CDF")
    graph_utils.set_legend_below()
    graph_utils.set_log_x()
    graph_utils.set_yax_max_one()
    graph_utils.set_non_negative_axes()
    graph_utils.set_ticks()
    filename = args.output_name + '_outgoing_bandwidth_cdf_window_log.eps'
    plt.savefig(filename)
    print "Done! File is in ", filename
コード例 #2
0
def main(args):
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--input-file',
        dest='input_files',
        nargs=2,
        action='append',
        required=True,
        help="csv file to plot.  Needs a label as a second argument.")
    parser.add_argument('--keep-temps',
                        dest='keep_temps',
                        default=False,
                        action='store_true',
                        help="Keep temp files")
    parser.add_argument('--output-name', dest='output_name', required=True)
    parser.add_argument('--title', dest='title')
    # This is to avoid issues with tcpdump hanging.
    parser.add_argument('--packets',
                        type=int,
                        required=False,
                        default=None,
                        dest='packets',
                        help="Number of packets to process from a pcap file")

    args = parser.parse_args(args)
    plt.figure(1)
    plt.clf()
    plt.figure(2)
    plt.clf()

    for pcap_file, label in args.input_files:
        if pcap_file.endswith('.csv'):
            flow_lengths = \
                process_csv.extract_flow_lengths(pcap_file)

        if len(flow_lengths) == 0:
            print "There were no TCP connections detected in ", pcap_file
            continue

        for i in range(len(flow_lengths)):
            flow_lengths[i] = float(flow_lengths[i])
        min_lim = min(flow_lengths)
        max_lim = max(flow_lengths)
        small_diff = (min_lim + max_lim) / 1000.0
        bins = np.append(np.linspace(min_lim, max_lim + small_diff, 1000),
                         np.inf)
        plt.figure(1)
        plt.hist(flow_lengths,
                 cumulative=True,
                 bins=bins,
                 histtype='step',
                 normed=True,
                 label=label)

        no_zero_values = graph_utils.no_zeroes(flow_lengths)
        if len(no_zero_values) > 0:
            min_lim = min(no_zero_values)
            max_lim = max(no_zero_values)
            logspace_bins = graph_utils.get_logspace(min_lim, max_lim)

            plt.figure(2)
            plt.hist(no_zero_values,
                     cumulative=True,
                     bins=logspace_bins,
                     histtype='step',
                     normed=True,
                     label=label)
        else:
            print "Hard Warning!: Found no non-zero flow sizes"

    if args.title:
        plt.figure(1)
        plt.title(args.title)
        plt.figure(2)
        plt.title(args.title)

    label_count = len(args.input_files)
    graph_utils.latexify(bottom_label_rows=label_count / 2)

    plt.figure(1)
    plt.xlabel("Flow Completion Time (s)")
    plt.ylabel("CDF")
    graph_utils.set_legend_below()
    graph_utils.set_yax_max_one()
    graph_utils.set_non_negative_axes()
    graph_utils.set_ticks()
    filename = args.output_name + '_flow_lengths.eps'
    plt.savefig(filename)
    print "Done! File is in ", filename

    plt.figure(2)
    plt.xlabel("Flow Completion Time (s)")
    plt.ylabel("CDF")
    graph_utils.set_log_x()
    graph_utils.set_legend_below()
    graph_utils.set_yax_max_one()
    graph_utils.set_non_negative_axes()
    graph_utils.set_ticks()
    filename = args.output_name + '_flow_lengths_log.eps'
    plt.savefig(filename)
    print "Done! File is in ", filename
コード例 #3
0
def main(args):
    parser = argparse.ArgumentParser()
    parser.add_argument('--server',
                        dest='server_ip',
                        help="The IP address of the server",
                        required=False,
                        default=None)
    parser.add_argument(
        '--input-file',
        dest='input_files',
        nargs=2,
        action='append',
        required=True,
        help="csv file to plot.  Needs a label as a second argument.")
    parser.add_argument('--keep-temps',
                        dest='keep_temps',
                        default=False,
                        action='store_true',
                        help="Keep temp files")
    parser.add_argument('--output-name', dest='output_name', required=True)
    parser.add_argument('--title', dest='title', required=False, default=None)
    # This is to avoid issues with tcpdump hanging.
    parser.add_argument('--packets',
                        type=int,
                        required=False,
                        default=None,
                        dest='packets',
                        help="Number of packets to process from a pcap file")

    args = parser.parse_args(args)
    plt.figure(1)
    plt.clf()
    plt.figure(2)
    plt.clf()
    plt.figure(3)
    plt.clf()
    plt.figure(4)
    plt.clf()

    for pcap_file, label in args.input_files:
        if pcap_file.endswith('.csv'):
            timestamp_deltas_incoming = \
                process_csv.extract_deltas(pcap_file, to_ip=args.server_ip)
            timestamp_deltas_outgoing = \
                process_csv.extract_deltas(pcap_file, from_ip=args.server_ip)

        # Convert to ns before starting:
        for i in xrange(len(timestamp_deltas_incoming)):
            timestamp_deltas_incoming[i] = float(
                Decimal(1000000000.0) * timestamp_deltas_incoming[i])
        for i in xrange(len(timestamp_deltas_outgoing)):
            timestamp_deltas_outgoing[i] = float(
                Decimal(1000000000.0) * timestamp_deltas_outgoing[i])

        # Do the outgoing packets.
        range = [
            min(timestamp_deltas_outgoing),
            max(timestamp_deltas_outgoing)
        ]
        print "Range is ", range
        print "Median is ", np.median(timestamp_deltas_outgoing)
        print "Deviation is ", np.std(timestamp_deltas_outgoing)
        timestamp_deltas_outgoing = \
            np.asarray(timestamp_deltas_outgoing, dtype='float')

        plt.figure(1)
        min_lim = range[0]
        max_lim = range[1]
        small_diff = (min_lim + max_lim) / 10000.0
        bins = np.append(np.linspace(min_lim, max_lim + small_diff, 1000),
                         np.inf)
        plt.hist(timestamp_deltas_outgoing,
                 bins=bins,
                 cumulative=True,
                 histtype='step',
                 normed=True,
                 label=label)

        timestamp_deltas_outgoing_no_zero = graph_utils.no_zeroes(
            timestamp_deltas_outgoing)
        if len(timestamp_deltas_outgoing_no_zero) > 0:
            min_lim = min(timestamp_deltas_outgoing_no_zero)
            max_lim = max(timestamp_deltas_outgoing_no_zero)
            logspace_bins = graph_utils.get_logspace(min_lim, max_lim)
            plt.figure(2)
            plt.hist(timestamp_deltas_outgoing_no_zero,
                     bins=logspace_bins,
                     cumulative=True,
                     histtype='step',
                     normed=True,
                     label=label)

        # Do the incoming.
        range = [
            min(timestamp_deltas_incoming),
            max(timestamp_deltas_incoming)
        ]
        print "Incoming Range is ", range
        print "Incoming Median is ", np.median(timestamp_deltas_incoming)
        print "Incoming Deviation is ", np.std(timestamp_deltas_incoming)
        timestamp_deltas_incoming = \
            np.asarray(timestamp_deltas_incoming, dtype='float')

        min_lim = range[0]
        max_lim = range[1]
        small_diff = (min_lim + max_lim) / 10000.0
        bins = np.append(np.linspace(min_lim, max_lim + small_diff, 1000),
                         np.inf)

        plt.figure(3)
        plt.hist(timestamp_deltas_incoming,
                 bins=bins,
                 cumulative=True,
                 histtype='step',
                 normed=True,
                 label=label)

        timestamp_deltas_incoming_no_zero = graph_utils.no_zeroes(
            timestamp_deltas_incoming)
        if len(timestamp_deltas_incoming_no_zero) > 0:
            min_lim = min(timestamp_deltas_incoming_no_zero)
            max_lim = max(timestamp_deltas_incoming_no_zero)

            plt.figure(4)
            plt.hist(timestamp_deltas_incoming,
                     bins=logspace_bins,
                     cumulative=True,
                     histtype='step',
                     normed=True,
                     label=label)
        else:
            print "Error: found no incoming timestamp deltas with nonzero inter-arrival times"

    if args.title:
        plt.figure(1)
        plt.title("Server Traffic: " + args.title)
        plt.figure(2)
        plt.title("Server Traffic: " + args.title)
        plt.figure(3)
        plt.title("Client Traffic: " + args.title)
        plt.figure(4)
        plt.title("Client Traffic: " + args.title)

    label_count = len(args.input_files)
    graph_utils.latexify(bottom_label_rows=label_count / 2)

    plt.figure(1)
    plt.ylabel("CDF")
    plt.xlabel("Inter-arrival time (ns)")
    graph_utils.set_legend_below()
    graph_utils.set_yax_max_one()
    graph_utils.set_non_negative_axes()
    graph_utils.set_ticks()
    filename = args.output_name + '_outgoing_interarrival.eps'
    plt.savefig(filename)
    print "Done! File is in ", args.output_name + '_outgoing_interarrival'

    plt.figure(2)
    plt.ylabel("CDF")
    plt.xlabel("Inter-arrival time (ns)")
    graph_utils.set_legend_below()
    graph_utils.set_log_x()
    graph_utils.set_yax_max_one()
    graph_utils.set_non_negative_axes()
    graph_utils.set_ticks()
    filename = args.output_name + '_outgoing_interarrival_log.eps'
    plt.savefig(filename)
    print "Done! File is in ", args.output_name + '_outgoing_interarrival'

    # Do the incoming packets.
    plt.figure(3)
    plt.ylabel("CDF")
    graph_utils.set_legend_below()
    graph_utils.set_yax_max_one()
    graph_utils.set_non_negative_axes()
    graph_utils.set_ticks()
    plt.xlabel("Inter-arrival time (ns)")
    filename = args.output_name + '_incoming_interarrival.eps'
    plt.savefig(filename)
    print "Done! File is in ", args.output_name + '_incoming_interarrival'

    # Do the incoming packets.
    plt.figure(4)
    plt.ylabel("CDF")
    graph_utils.set_legend_below()
    graph_utils.set_log_x()
    graph_utils.set_yax_max_one()
    graph_utils.set_non_negative_axes()
    graph_utils.set_ticks()
    plt.xlabel("Inter-arrival time (ns)")
    filename = args.output_name + '_incoming_interarrival_log.eps'
    plt.savefig(filename)
    print "Done! File is in ", args.output_name + '_incoming_interarrival'
コード例 #4
0
def main(args):
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--input-file',
        dest='input_files',
        nargs=2,
        action='append',
        required=True,
        help="csv file to plot.  Needs a label as a second argument.")
    parser.add_argument('--keep-temps',
                        dest='keep_temps',
                        default=False,
                        action='store_true',
                        help="Keep temp files")
    parser.add_argument('--server', dest='server_ip', required=True)
    parser.add_argument('--output-name', dest='output_name', required=True)
    parser.add_argument('--title', dest='title', required=False, default=None)
    # This is to avoid issues with tcpdump hanging.
    parser.add_argument('--packets',
                        type=int,
                        required=False,
                        default=None,
                        dest='packets',
                        help="Number of packets to process from a pcap file")

    args = parser.parse_args(args)

    plt.figure(1)
    plt.clf()
    plt.figure(2)
    plt.clf()
    plt.figure(3)
    plt.clf()
    plt.figure(4)
    plt.clf()

    pcap_files = args.input_files
    output_label = args.output_name

    for (pcap_file, label) in pcap_files:
        if pcap_file.endswith('.csv'):
            incoming_ipg_gaps = \
                process_csv.extract_ipgs(pcap_file, to_ip=args.server_ip)
            outgoing_ipg_gaps = \
                process_csv.extract_ipgs(pcap_file, from_ip=args.server_ip)

        range = [min(incoming_ipg_gaps), max(incoming_ipg_gaps)]
        print "Dealing with incoming IPG gaps"
        print "Range is ", range
        print "Median is ", np.median(incoming_ipg_gaps)
        print "Deviation is ", np.std(incoming_ipg_gaps)

        # Before we plot these, they need to be converted to normal
        # floats.  To do this, multiply by 10**9
        for i in xrange(len(incoming_ipg_gaps)):
            incoming_ipg_gaps[i] = float(
                Decimal(1000000000.0) * incoming_ipg_gaps[i])
        for i in xrange(len(outgoing_ipg_gaps)):
            outgoing_ipg_gaps[i] = float(
                Decimal(1000000000.0) * outgoing_ipg_gaps[i])

        # Remove anything greater than the 99th percentile to stop
        # if affecting the bins.
        i = 0
        nintyninth_percentile = np.percentile(incoming_ipg_gaps, 99)
        while i < len(incoming_ipg_gaps):
            if incoming_ipg_gaps[i] > nintyninth_percentile:
                del incoming_ipg_gaps[i]
            else:
                i += 1

        print nintyninth_percentile

        # Avoid issues witht the CDF line decreasing to zero after the data is
        # plotted.
        min_lim = min(incoming_ipg_gaps)
        max_lim = max(incoming_ipg_gaps)
        small_diff = (min_lim + max_lim) / 10000.0
        bins = np.linspace(min_lim, max_lim + small_diff, 1000)
        bins = np.append(bins, np.inf)

        plt.figure(1)
        plt.hist(incoming_ipg_gaps,
                 bins=bins,
                 cumulative=True,
                 histtype='step',
                 normed=True,
                 label=label)

        # Now plot a log space version, with all times included.
        incoming_ipg_gas_no_zeroes = graph_utils.no_zeroes(incoming_ipg_gaps)
        if len(incoming_ipg_gas_no_zeroes) > 0:
            lim_min = min(incoming_ipg_gas_no_zeroes)
            lim_max = max(incoming_ipg_gas_no_zeroes)

            bins = graph_utils.get_logspace(lim_min, lim_max)
            plt.figure(2)
            plt.hist(incoming_ipg_gas_no_zeroes,
                     bins=bins,
                     cumulative=True,
                     histtype='step',
                     normed=True,
                     label=label)
        else:
            print "Error:: found only zero times on the incoming IPG gaps"

        # Now do the outgoing.
        # Remove anything greater than the 99th percentile to stop
        # if affecting the bins.
        i = 0
        nintyninth_percentile = np.percentile(outgoing_ipg_gaps, 99)
        while i < len(outgoing_ipg_gaps):
            if outgoing_ipg_gaps[i] > nintyninth_percentile:
                del outgoing_ipg_gaps[i]
            else:
                i += 1

        print nintyninth_percentile

        # Avoid issues witht the CDF line decreasing to zero after the data
        # is plotted.
        min_lim = min(outgoing_ipg_gaps)
        max_lim = max(outgoing_ipg_gaps)
        small_diff = (min_lim + max_lim) / 10000.0
        bins = np.linspace(min_lim, max_lim + small_diff, 1000)
        bins = np.append(bins, np.inf)

        plt.figure(3)
        plt.hist(outgoing_ipg_gaps,
                 bins=bins,
                 cumulative=True,
                 histtype='step',
                 normed=True,
                 label=label)

        # Now plot the logspace version.
        outgoing_ipg_gaps_no_zeroes = graph_utils.no_zeroes(outgoing_ipg_gaps)
        if len(outgoing_ipg_gaps_no_zeroes) > 0:
            min_lim = min(outgoing_ipg_gaps_no_zeroes)
            max_lim = max(outgoing_ipg_gaps_no_zeroes)

            bins = graph_utils.get_logspace(min_lim, max_lim)
            plt.figure(4)
            plt.hist(outgoing_ipg_gaps_no_zeroes,
                     bins=bins,
                     cumulative=True,
                     histtype='step',
                     normed=True,
                     label=label)
        else:
            print "Error: No non-zero IPGs found in outgoing data"

    if args.title:
        plt.figure(1)
        plt.title('Client Traffic: ' + args.title)
        plt.figure(2)
        plt.title('Client Traffic: ' + args.title)
        plt.figure(3)
        plt.title('Server Traffic: ' + args.title)
        plt.figure(4)
        plt.title('Server Traffic: ' + args.title)

    label_count = len(args.input_files)
    graph_utils.latexify(bottom_label_rows=label_count / 2)

    plt.figure(1)
    plt.xlim([min(outgoing_ipg_gaps), nintyninth_percentile])
    plt.ylabel("CDF")
    plt.xlabel("Inter-Arrival Time (ns)")
    graph_utils.set_legend_below()
    graph_utils.set_non_negative_axes()
    graph_utils.set_yax_max_one()
    graph_utils.set_ticks()
    filename = output_label + '_ipg_gaps_clients.eps'
    plt.savefig(filename)
    print "Done! File is in ", filename

    plt.figure(2)
    plt.ylabel("CDF")
    plt.xlabel("Inter-Arrival Time (ns)")
    graph_utils.set_legend_below()
    graph_utils.set_log_x()
    graph_utils.set_non_negative_axes()
    graph_utils.set_yax_max_one()
    graph_utils.set_ticks()
    filename = output_label + '_ipg_gaps_clients_log.eps'
    plt.savefig(filename)
    print "Done! File is in ", filename

    plt.figure(3)
    plt.xlim([min(outgoing_ipg_gaps), nintyninth_percentile])
    plt.ylabel("CDF")
    plt.xlabel("Inter-Arrival Time (ns)")
    graph_utils.set_legend_below()
    graph_utils.set_non_negative_axes()
    graph_utils.set_yax_max_one()
    graph_utils.set_ticks()
    filename = output_label + '_ipg_gaps_server.eps'
    plt.savefig(filename)

    print "Done! File is in ", filename

    plt.figure(4)
    plt.ylabel("CDF")
    plt.xlabel("Inter-Arrival Time (ns)")
    graph_utils.set_legend_below()
    graph_utils.set_log_x()
    graph_utils.set_non_negative_axes()
    graph_utils.set_yax_max_one()
    graph_utils.set_ticks()
    filename = output_label + '_ipg_gaps_server_log.eps'
    plt.savefig(filename)

    print "Done! File is in ", filename
コード例 #5
0
def microburst_analyze(bursts, identifier, pcap_file, label, id_base):
    global max_xlims_burst_lengths
    print identifier, " Number of bursts", len(bursts)
    bins = 1000
    if len(bursts) == 0:
        return

    # Print a CDF of the microburst length distribution:
    lengths = [len(x) for x in bursts]
    min_lim = min(lengths)
    max_lim = max(lengths)
    max_xlims_burst_lengths.append(max_lim)
    small_diff = (min_lim + max_lim) / 10000.0
    bins = np.append(np.linspace(min_lim, max_lim + small_diff, 1000), np.inf)
    plt.figure(1 + id_base)
    plt.hist(lengths,
             bins=bins,
             cumulative=True,
             histtype='step',
             normed=True,
             label=label)

    # Do the same, but with a log scale:
    non_zero_lengths = graph_utils.no_zeroes(lengths)
    if len(non_zero_lengths) > 0:
        plt.figure(2 + id_base)
        min_lim = min(non_zero_lengths)
        max_lim = max(non_zero_lengths)

        bins = graph_utils.get_logspace(min_lim, max_lim)
        plt.hist(non_zero_lengths,
                 bins=bins,
                 cumulative=True,
                 histtype='step',
                 normed=True,
                 label=label)

    # Plot a CDF of the bandwidth achieved in each microburst.
    bandwidths = []
    for burst in bursts:
        start_time = burst[0].wire_start_time()
        end_time = burst[len(burst) - 1].wire_end_time()
        total_time_in_use = Decimal(
            sum([packet.wire_length_time() for packet in burst]))
        bandwidths.append(
            Decimal(10000.0) * (total_time_in_use / (end_time - start_time)))
        if MICROBURST_DEBUG:
            for packet in burst:
                print "Packet time is ", packet.wire_start_time()
                print "Packet size is ", packet.length
                print "Packet end time is ", packet.wire_end_time()
            print "Time in use", total_time_in_use
            print "Total time is ", end_time - start_time
            print "Usage is ", bandwidths[-1]

    for i in range(len(bandwidths)):
        bandwidths[i] = float(bandwidths[i])

    plt.figure(3 + id_base)
    min_lim = min(bandwidths)
    max_lim = max(bandwidths)
    small_diff = (min_lim + max_lim) / 10000.0
    bins = np.append(np.linspace(min_lim, max_lim + small_diff, 1000), np.inf)
    plt.hist(bandwidths,
             bins=bins,
             cumulative=True,
             histtype='step',
             normed=True,
             label=label)

    # And do the logarithmic version.
    non_zero_bandwidths = graph_utils.no_zeroes(bandwidths)
    if len(non_zero_bandwidths) > 0:
        min_lim = min(non_zero_bandwidths)
        max_lim = max(non_zero_bandwidths)
        bins = graph_utils.get_logspace(min_lim, max_lim)
        plt.figure(4 + id_base)
        plt.hist(non_zero_bandwidths,
                 bins=bins,
                 cumulative=True,
                 histtype='step',
                 normed=True,
                 label=label)
    else:
        print "There were non non-zero bandwidths!"