Example #1
0
def plotIpt(directory):
    # Read inter-packet arrival times from the sniffer pickled files
    ipt_pfile = os.path.join(directory, "pickled/ipt.txt")
    ipt_summary_pfile = os.path.join(directory, "pickled/ipt_summary.txt")
    ipt = readPickledFile(ipt_pfile)
    summary = readPickledFile(ipt_summary_pfile)
    ports = summary.keys()

    # Read packet lengths from sniffer pickled files
    pkt_len_freq_pfile = os.path.join(directory, "pickled/pkt_len_freq.txt")
    (most_freq_pkt_len, pkt_len_freq) = readPickledFile(pkt_len_freq_pfile)

    cdf_data = []
    avg_data = []
    pc99_data = []

    for port in ports:
        # Convert from nanoseconds to microseconds for plotting
        cdf_data.append(map(lambda x: x / 1000.0, ipt[port]))
        avg, pc99 = summary[port]
        avg_data.append(avg / 1000.0)
        pc99_data.append(pc99 / 1000.0)

    # Generate CDF plot
    plot = plotCDFGraphSimple(
        cdf_data,
        avg_data,
        pc99_data,
        "Inter-packet time (in microseconds)",
        "Fractiles",
        "CDF of inter-packet time (not inter-packet gap) " "in microseconds",
    )

    # Compute ideal inter-packet arrival time for most frequently seen packet
    # length in the trace
    rate_mbps = float(readDirTagFileProperty(directory, "rate_mbps"))
    nclasses = int(readDirTagFileProperty(directory, "nclasses"))
    rate_per_class_gbps = rate_mbps / (1000.0 * nclasses)
    ideal_nsec = idealIptNsec(most_freq_pkt_len, rate_per_class_gbps)

    # Add VLine for the ideal inter-packet time
    # Also convert from nanoseconds to microseconds for plotting
    ideal_vline = boomslang.VLine(color="magenta", lineStyle="-.", width=2)
    ideal_vline.xValues = [ideal_nsec / 1000.0]
    ideal_vline.label = "Ideal"
    plot.add(ideal_vline)

    return plot
Example #2
0
def plotIpt(directory):
    # Read inter-packet arrival times from the sniffer pickled files
    ipt_pfile = os.path.join(directory, 'pickled/ipt.txt')
    ipt_summary_pfile = os.path.join(directory, 'pickled/ipt_summary.txt')
    ipt = readPickledFile(ipt_pfile)
    summary = readPickledFile(ipt_summary_pfile)
    ports = summary.keys()

    # Read packet lengths from sniffer pickled files
    pkt_len_freq_pfile = os.path.join(directory, 'pickled/pkt_len_freq.txt')
    (most_freq_pkt_len, pkt_len_freq) = readPickledFile(pkt_len_freq_pfile)

    cdf_data = []
    avg_data = []
    pc99_data = []

    for port in ports:
        # Convert from nanoseconds to microseconds for plotting
        cdf_data.append(map(lambda x: x / 1000.0, ipt[port]))
        avg, pc99 = summary[port]
        avg_data.append(avg / 1000.0)
        pc99_data.append(pc99 / 1000.0)

    # Generate CDF plot
    plot = plotCDFGraphSimple(
        cdf_data, avg_data, pc99_data, "Inter-packet time (in microseconds)",
        "Fractiles", "CDF of inter-packet time (not inter-packet gap) "
        "in microseconds")

    # Compute ideal inter-packet arrival time for most frequently seen packet
    # length in the trace
    rate_mbps = float(readDirTagFileProperty(directory, "rate_mbps"))
    nclasses = int(readDirTagFileProperty(directory, "nclasses"))
    rate_per_class_gbps = rate_mbps / (1000.0 * nclasses)
    ideal_nsec = idealIptNsec(most_freq_pkt_len, rate_per_class_gbps)

    # Add VLine for the ideal inter-packet time
    # Also convert from nanoseconds to microseconds for plotting
    ideal_vline = boomslang.VLine(color="magenta", lineStyle="-.", width=2)
    ideal_vline.xValues = [ideal_nsec / 1000.0]
    ideal_vline.label = "Ideal"
    plot.add(ideal_vline)

    return plot
Example #3
0
def main(argv):
    # Parse flags
    args = parser.parse_args()

    # Check if the base directory is valid
    exists = os.path.exists(args.base_dir)
    isdir = os.path.isdir(args.base_dir)
    if not exists:
        print 'Base directory does not exist'
        sys.exit(1)
    if not isdir:
        print 'Base directory not valid'
        sys.exit(1)
    args.base_dir = os.path.abspath(args.base_dir)

    # Create Plumbum commands for pickling data and plotting
    pickle_cmd = plumbum.local[os.path.join(config['PLOT_SCRIPTS_DIR'],
                                            'pickleExptLogs.py')]
    plot_sniffer_cmd = plumbum.local[os.path.join(config['PLOT_SCRIPTS_DIR'],
                                                  'plotSniffer.py')]
    plot_mcperf_lat_cmd = plumbum.local[os.path.join(
        config['PLOT_SCRIPTS_DIR'], 'plotMcperfLatency.py')]

    # Temp directory for plotting graphs
    plot_tmpdir = config['PLOT_TMPDIR']

    print 'Exploring base directory :', args.base_dir

    num_exp_dirs = 0
    for (path, dirs, files) in os.walk(args.base_dir, followlinks=True):
        # Check if an experiment directory was found
        if os.path.exists(os.path.join(path, 'expsift_tags')):
            print 'Found experiment directory:', path
            num_exp_dirs += 1

            # Pickle experiment logs
            print '... Pickling experiment data'
            if args.force_replot:
                pickle_cmd('-f', path, plot_tmpdir)
            else:
                pickle_cmd(path, plot_tmpdir)

            # Read the workload type for the experiment directory
            workload = readDirTagFileProperty(path, 'workload')
            if (not workload in trafgen_workloads
                    and not workload in memcached_workloads):
                print 'Workload not recognized for expt: %s' % args.expt_dir
                sys.exit(1)

            # Plot experiment graphs
            print '... Plotting experiment graphs'
            expt_plot_dir = os.path.join(path, 'plot/')
            if not os.path.exists(expt_plot_dir):
                os.makedirs(expt_plot_dir)
            if workload in trafgen_workloads:
                if args.force_replot:
                    plot_sniffer_cmd('-f', path, expt_plot_dir)
                else:
                    plot_sniffer_cmd(path, expt_plot_dir)
            elif workload in memcached_workloads:
                if args.force_replot:
                    plot_mcperf_lat_cmd('-f', path, expt_plot_dir)
                else:
                    plot_mcperf_lat_cmd(path, expt_plot_dir)

    print 'Plotted graphs for %d experiments under %s' % (num_exp_dirs,
                                                          args.base_dir)
def main(argv):
    # Parse flags
    args = parser.parse_args()

    # Read the workload type for the experiment directory
    workload = readDirTagFileProperty(args.expt_dir, 'workload')
    if (not workload in trafgen_workloads and
        not workload in memcached_workloads):
       print 'Workload not recognized for expt: %s' % args.expt_dir
       sys.exit(1)

    # Create directory for pickled files
    pickle_dir = os.path.join(args.expt_dir, 'pickled')
    if not os.path.exists(pickle_dir):
        os.makedirs(pickle_dir)

    # Create directory for saving statistics
    stats_dir = os.path.join(args.expt_dir, 'stats')
    if not os.path.exists(stats_dir):
        os.makedirs(stats_dir)

    # Pickle data for trafgen workloads
    if workload in trafgen_workloads:

        # Temp directory to extract the sniffer data and pickle it
        snf_data_dir = os.path.join(args.tmp_dir, 'snf_data')
        if not os.path.exists(snf_data_dir):
            os.makedirs(snf_data_dir)

        # Extract the sniffer data to the temp directory
        snf_tarfile = os.path.join(args.expt_dir, 'logs/pkt_snf.tar.gz')
        tar = tarfile.open(snf_tarfile)
        tar.extractall(snf_data_dir)
        tar.close()

        # Pickle sniffer data if required
        if (args.force_rewrite or
            not allFilesGenerated('sniffer', pickle_dir, stats_dir)):
            pickleSnfFile(os.path.join(snf_data_dir, 'pkt_snf.txt'),
                          pickle_dir, stats_dir, max_lines = 1000000)

        # Pickle mpstat data
        if (args.force_rewrite or
            not allFilesGenerated('mpstat', pickle_dir, stats_dir)):
            pickleMPStat(os.path.join(args.expt_dir, 'logs/mpstat.txt'),
                         pickle_dir, stats_dir)

        # Pickle ethstats data
        if (args.force_rewrite or
            not allFilesGenerated('ethstats', pickle_dir, stats_dir)):
            pickleEthstats(os.path.join(args.expt_dir, 'logs/net.txt'),
                           pickle_dir, stats_dir)

    # Pickle data for memcached workloads
    elif workload in memcached_workloads:

        # Read the list of server and client machines used for the experiment
        (servers, clients) = parseHostsFile(os.path.join(args.expt_dir,
                                                         'logs/hostsfile.txt'))

        # Pickle mcperf data
        if (args.force_rewrite or
            not allFilesGenerated('mcperf', pickle_dir, stats_dir)):

            # Pickle hosts info separately
            hosts_pfile = os.path.join(pickle_dir, 'hosts_p.txt')
            fd = open(hosts_pfile, 'wb')
            cPickle.dump((servers, clients), fd)
            fd.close()

            mcperf_files = []
            for client in clients:
                files = glob.glob(os.path.join(args.expt_dir, 'logs',
                                  client, 'mcperf-t*-c*-*.txt'))
                mcperf_files.append((client, files))

            pickleMcperf(mcperf_files, pickle_dir, stats_dir)

        # Pickle mpstat data for clients and servers
        if (args.force_rewrite or
            not allFilesGenerated('mpstat_mc', pickle_dir, stats_dir)):
            client_mpstat_files = [ os.path.join(args.expt_dir, 'logs',
                                                 client, 'mpstat.txt')
                                    for client in clients ]
            server_mpstat_files = [ os.path.join(args.expt_dir, 'logs',
                                                 server, 'mpstat.txt')
                                    for server in servers ]

            pickleMPStatMC(client_mpstat_files, server_mpstat_files,
                           pickle_dir, stats_dir)

        # Pickle trafgen data
        if (args.force_rewrite or
            not allFilesGenerated('trafgen', pickle_dir, stats_dir)):
            hosts = clients + servers
            pickleTrafgen(hosts, os.path.join(args.expt_dir, 'logs'),
                          pickle_dir, stats_dir)
Example #5
0
def main(argv):
    # Parse flags
    args = parser.parse_args()

    # Check if the base directory is valid
    exists = os.path.exists(args.base_dir)
    isdir = os.path.isdir(args.base_dir)
    if not exists:
        print 'Base directory does not exist'
        sys.exit(1)
    if not isdir:
        print 'Base directory not valid'
        sys.exit(1)
    args.base_dir = os.path.abspath(args.base_dir)

    # Create Plumbum commands for pickling data and plotting
    pickle_cmd = plumbum.local[os.path.join(config['PLOT_SCRIPTS_DIR'],
                                            'pickleExptLogs.py')]
    plot_sniffer_cmd = plumbum.local[os.path.join(config['PLOT_SCRIPTS_DIR'],
                                                  'plotSniffer.py')]
    plot_mcperf_lat_cmd = plumbum.local[os.path.join(config['PLOT_SCRIPTS_DIR'],
                                                     'plotMcperfLatency.py')]

    # Temp directory for plotting graphs
    plot_tmpdir = config['PLOT_TMPDIR']

    print 'Exploring base directory :', args.base_dir

    num_exp_dirs = 0
    for (path, dirs, files) in os.walk(args.base_dir, followlinks=True):
        # Check if an experiment directory was found
        if os.path.exists(os.path.join(path, 'expsift_tags')):
            print 'Found experiment directory:', path
            num_exp_dirs += 1

            # Pickle experiment logs
            print '... Pickling experiment data'
            if args.force_replot:
                pickle_cmd('-f', path, plot_tmpdir)
            else:
                pickle_cmd(path, plot_tmpdir)

            # Read the workload type for the experiment directory
            workload = readDirTagFileProperty(path, 'workload')
            if (not workload in trafgen_workloads and
                not workload in memcached_workloads):
               print 'Workload not recognized for expt: %s' % args.expt_dir
               sys.exit(1)

            # Plot experiment graphs
            print '... Plotting experiment graphs'
            expt_plot_dir = os.path.join(path, 'plot/')
            if not os.path.exists(expt_plot_dir):
                os.makedirs(expt_plot_dir)
            if workload in trafgen_workloads:
                if args.force_replot:
                    plot_sniffer_cmd('-f', path, expt_plot_dir)
                else:
                    plot_sniffer_cmd(path, expt_plot_dir)
            elif workload in memcached_workloads:
                if args.force_replot:
                    plot_mcperf_lat_cmd('-f', path, expt_plot_dir)
                else:
                    plot_mcperf_lat_cmd(path, expt_plot_dir)

    print 'Plotted graphs for %d experiments under %s' % (num_exp_dirs,
            args.base_dir)
def main(argv):
    # Parse flags
    args = parser.parse_args()

    # Read the workload type for the experiment directory
    workload = readDirTagFileProperty(args.expt_dir, 'workload')
    if (not workload in trafgen_workloads
            and not workload in memcached_workloads):
        print 'Workload not recognized for expt: %s' % args.expt_dir
        sys.exit(1)

    # Create directory for pickled files
    pickle_dir = os.path.join(args.expt_dir, 'pickled')
    if not os.path.exists(pickle_dir):
        os.makedirs(pickle_dir)

    # Create directory for saving statistics
    stats_dir = os.path.join(args.expt_dir, 'stats')
    if not os.path.exists(stats_dir):
        os.makedirs(stats_dir)

    # Pickle data for trafgen workloads
    if workload in trafgen_workloads:

        # Temp directory to extract the sniffer data and pickle it
        snf_data_dir = os.path.join(args.tmp_dir, 'snf_data')
        if not os.path.exists(snf_data_dir):
            os.makedirs(snf_data_dir)

        # Extract the sniffer data to the temp directory
        snf_tarfile = os.path.join(args.expt_dir, 'logs/pkt_snf.tar.gz')
        tar = tarfile.open(snf_tarfile)
        tar.extractall(snf_data_dir)
        tar.close()

        # Pickle sniffer data if required
        if (args.force_rewrite
                or not allFilesGenerated('sniffer', pickle_dir, stats_dir)):
            pickleSnfFile(os.path.join(snf_data_dir, 'pkt_snf.txt'),
                          pickle_dir,
                          stats_dir,
                          max_lines=1000000)

        # Pickle mpstat data
        if (args.force_rewrite
                or not allFilesGenerated('mpstat', pickle_dir, stats_dir)):
            pickleMPStat(os.path.join(args.expt_dir, 'logs/mpstat.txt'),
                         pickle_dir, stats_dir)

        # Pickle ethstats data
        if (args.force_rewrite
                or not allFilesGenerated('ethstats', pickle_dir, stats_dir)):
            pickleEthstats(os.path.join(args.expt_dir, 'logs/net.txt'),
                           pickle_dir, stats_dir)

    # Pickle data for memcached workloads
    elif workload in memcached_workloads:

        # Read the list of server and client machines used for the experiment
        (servers, clients) = parseHostsFile(
            os.path.join(args.expt_dir, 'logs/hostsfile.txt'))

        # Pickle mcperf data
        if (args.force_rewrite
                or not allFilesGenerated('mcperf', pickle_dir, stats_dir)):

            # Pickle hosts info separately
            hosts_pfile = os.path.join(pickle_dir, 'hosts_p.txt')
            fd = open(hosts_pfile, 'wb')
            cPickle.dump((servers, clients), fd)
            fd.close()

            mcperf_files = []
            for client in clients:
                files = glob.glob(
                    os.path.join(args.expt_dir, 'logs', client,
                                 'mcperf-t*-c*-*.txt'))
                mcperf_files.append((client, files))

            pickleMcperf(mcperf_files, pickle_dir, stats_dir)

        # Pickle mpstat data for clients and servers
        if (args.force_rewrite
                or not allFilesGenerated('mpstat_mc', pickle_dir, stats_dir)):
            client_mpstat_files = [
                os.path.join(args.expt_dir, 'logs', client, 'mpstat.txt')
                for client in clients
            ]
            server_mpstat_files = [
                os.path.join(args.expt_dir, 'logs', server, 'mpstat.txt')
                for server in servers
            ]

            pickleMPStatMC(client_mpstat_files, server_mpstat_files,
                           pickle_dir, stats_dir)

        # Pickle trafgen data
        if (args.force_rewrite
                or not allFilesGenerated('trafgen', pickle_dir, stats_dir)):
            hosts = clients + servers
            pickleTrafgen(hosts, os.path.join(args.expt_dir, 'logs'),
                          pickle_dir, stats_dir)