Exemplo n.º 1
0
    def read_and_start(self):
        files = glob("params/*.json")

        for file in files:
            print(file)
            with open(file) as f:
                param = json.loads(f.read())
            self.callback([param])
Exemplo n.º 2
0
def get_opts(configfilename):
    """
    read config file and parse it (including our own json deserialization)
    :param configfilename:
    :return:
    """
    # tensorflow results and python3 require decoding
    if type(configfilename) == bytes:
        configfilename = configfilename.decode()

    # read config file and deserialize
    with open("%s.json" % configfilename) as f:
        opts = JsonSerialization.loads(f.read())

    return opts
Exemplo n.º 3
0
def get_filenames_and_config():
    """
    find all files, check consistency and read config
    :return: filenames and configs as list
    """

    # where to search for preprocessed data
    dir = "../data/preprocessed/"

    # find all files
    files = glob(dir + "*.pcap.json")
    files = [f[len(dir):] for f in files]

    # consistency check: do pcaps before and after bottleneck exist?
    names = {}
    for f in files:
        name = None
        add = 0
        if f.endswith("+before.pcap.json"):
            name = f[:-len("+before.pcap.json")]
            add = 1
        elif f.endswith("+after.pcap.json"):
            name = f[:-len("+after.pcap.json")]
            add = 2
        if name:
            if name not in names:
                names[name] = 0
            names[name] += add

    filenames = []
    for k, v in names.items():
        if v < 3:
            # one of both pcaps is missing
            print("missing files for %s" % k)
        else:
            if os.path.exists("../data/jsons/%s.json" % k):
                # everything alright
                beforepcap = "%s%s+before.pcap.json" % (dir, k)
                afterpcap = "%s%s+after.pcap.json" % (dir, k)
                with open("../data/jsons/%s.json" % k) as f:
                    config = json.loads(f.read())
                # add filenames, config and general id
                filenames.append((config, beforepcap, afterpcap, k))
            else:
                # configfile missing
                print("missing files for %s" % k)

    return filenames
Exemplo n.º 4
0
def mp_create_hist(outdir, filenames_and_config):
    """
    create histograms for files
    :param outdir: dir to output histograms to
    :param filenames_and_config: preparsed config and datafilename
    :return: csv entry line
    """
    (config, before, after, k) = filenames_and_config
    cong = config['config']['cong']

    # get variant and pacing label from name
    variant_idx = names_to_variant_idx[cong]
    pacing_idx = names_to_pacing_idx[cong]

    # check whether files already exist - if yes, skip histogram creation
    file_exists = os.path.isfile("%s/1ms/%s_before.csv" %
                                 (outdir, k)) and os.path.isfile(
                                     "%s/1ms/%s_before.csv" % (outdir, k))
    if file_exists:
        return [(variant_idx, pacing_idx, 0, "%s" % k, "%s_before.csv" % k),
                (variant_idx, pacing_idx, 1, "%s" % k, "%s_after.csv" % k)]

    # read packet arrival jsons
    try:
        before_file = open(before, 'r')
        after_file = open(after, 'r')
        before_data = json.loads(before_file.read())
        after_data = json.loads(after_file.read())
    except Exception as e:
        print(e, "problem with", (before, after))
        return None

    # extract flows from packet arrival
    before_flow_data = get_right_flow(before_data, config)
    after_flow_data = get_right_flow(after_data, config)

    if before_flow_data is None or after_flow_data is None:
        print("problem parsing", (before, after))
        return None

    # extract flow directions
    before_flow, before_flow_back, _ = before_flow_data
    after_flow, after_flow_back, _ = after_flow_data

    # extract timestamp, length and sequence number
    packets_before = [(packet['t'], packet['l'], packet['seq'])
                      for packet in before_flow if packet['l'] > 0]
    packets_after = [(packet['t'], packet['l'], packet['seq'])
                     for packet in after_flow if packet['l'] > 0]

    # sort if wrong order
    packets_before = sort_if_necessary(packets_before)
    packets_after = sort_if_necessary(packets_after)

    # create histograms
    hist_before = get_histogram([packets_before], steps=0.001, end=60.0)
    hist_after = get_histogram([packets_after], steps=0.001, end=60.0)

    # write histograms to csv files
    _csv("%s/1ms/%s_before.csv" % (outdir, k), hist_before)
    _csv("%s/1ms/%s_after.csv" % (outdir, k), hist_after)

    # return two csv label file lines (before and after bottleneck)
    return [(variant_idx, pacing_idx, 0, "%s" % k, "%s_before.csv" % k),
            (variant_idx, pacing_idx, 1, "%s" % k, "%s_after.csv" % k)]