key_part1_str = args.key_part1_str
    key_part2_str = args.key_part2_str
    probe_id = args.probe_id
    summaries_file = args.summaries_file

    list_of_headings = [key_part1_str + " ; " + key_part2_str, key_part1_str, key_part2_str, 'ping', 'dns', 'traceroute', 'num_origins', 'origins_list']
    
    list_of_entries = listdir(data_path)
    list_of_files = [join(data_path, f) for f in list_of_entries if isfile(join(data_path, f))]

    results_dict = {}

    # build results_dict
    for this_file in list_of_files:
        print("reading " + this_file)
        measurement_id = Measurement_Data.get_measurement_id_from_file(this_file)

        # handle empty file
        if measurement_id == None:
            print(this_file + " appears to be empty.  Skipping.")
            continue

        # handle nonempty file
        measurement_data = Measurement_Data(measurement_id, this_file, summaries_file)
        measurement_data.add_compound_key_results(probe_id, results_dict, key_part1_str, key_part2_str)

    # write results_dict to file
    with open(json_dict_file, 'w') as f:
            json.dump(results_dict, f)

    # print(Measurement_Data.calc_results_summary(json_dict_file))
Exemple #2
0
                    help="A file containing a dictionary of measurement_id : measurement_summary.  If ommitted, summaries are obtained from internet.")
    return parser.parse_args()


if __name__ == "__main__":
    args = parse_args()
    summaries_file = args.summaries_file
    data_path = args.data_path
    probe_id = args.probe_id
    out_file = args.out_file

    list_of_entries = listdir(data_path)
    list_of_files = [join(data_path, f) for f in list_of_entries if isfile(join(data_path, f))]

    for this_file in list_of_files:
        print("reading "+this_file)

        # Get measurement ID
        measurement_id = Measurement_Data.get_measurement_id_from_file(this_file)
        if measurement_id == None:
            print(this_file + " appears to be empty.  Skipping.")
            continue    # go to next file

        # Get data from this file
        measurement_data = Measurement_Data(measurement_id, this_file, summaries_file)

        if measurement_data.type == 'ping':
            ping_data = Ping_Data(measurement_id, this_file, summaries_file)
            ping_data.write_data_to_csv(csv_file, probe_id)

    
    csv_file = args.csv_file
    data_path = args.data_path
    probe_id = args.probe_id
    summaries_file = args.summaries_file

    list_of_headings = ['probe_id ; target', 'probe_id', 'target', 'ping', 'dns', 'traceroute', 'num_origins', 'origins_list']

    list_of_entries = listdir(data_path)
    list_of_files = [join(data_path, f) for f in list_of_entries if isfile(join(data_path, f))]

    results_dict = {}

    # build results_dict
    for this_file in list_of_files:
        print("reading "+this_file)
        measurement_id = Measurement_Data.get_measurement_id_from_file(this_file)

        # handle empty file
        if measurement_id == None:
            print(this_file + " appears to be empty.  Skipping.")
            continue

        # handle nonempty file
        measurement_data = Measurement_Data(measurement_id, this_file, summaries_file)
        measurement_data.add_probe_and_target_results(results_dict)

    # write results_dict to file
    with open(json_dict_file, 'w') as f:
            json.dump(results_dict, f)

    # print(Measurement_Data.calc_results_summary(json_dict_file))
Exemple #4
0
    )
    return parser.parse_args()


# Call the script by typing:
#    $ python explore_data.py filename [limit] [summaries_file]
# where filename is the name of a file containing a ripe atlas response
if __name__ == "__main__":
    args = parse_args()
    summaries_file = args.summaries_file
    data_path = args.data_path
    probe_id = args.probe_id
    csv_file = join(args.csv_path, CSV_FILE_PREFIX + str(probe_id) + ".csv")

    list_of_headings = Ping_Data.get_ping_headings()
    Measurement_Data.prep_csv_file(csv_file, list_of_headings)

    list_of_entries = listdir(data_path)
    list_of_files = [
        join(data_path, f) for f in list_of_entries
        if isfile(join(data_path, f))
    ]

    for this_file in list_of_files:
        print("reading " + this_file)
        measurement_id = Measurement_Data.get_measurement_id_from_file(
            this_file)
        if measurement_id == None:
            print(this_file + " appears to be empty.  Skipping.")
            continue
Exemple #5
0
    key_name = args.key_name
    data_path = args.data_path
    probe_id = args.probe_id
    summaries_file = args.summaries_file

    list_of_headings = ['probe_id', 'ping', 'dns', 'traceroute', 'num_targets', 'targets_list', 'num_origins', 'origins_list']

    list_of_entries = listdir(data_path)
    list_of_files = [join(data_path, f) for f in list_of_entries if isfile(join(data_path, f))]

    results_dict = {}

    # build results_dict
    for this_file in list_of_files:
        print("reading "+this_file)
        measurement_id = Measurement_Data.get_measurement_id_from_file(this_file)

        # handle empty file
        if measurement_id == None:
            print(this_file + " appears to be empty.  Skipping.")
            continue

        # handle nonempty file
        measurement_data = Measurement_Data(measurement_id, this_file, summaries_file)
        measurement_data.add_to_results_dict(key_name, results_dict)

    # write results_dict to file
    with open(json_dict_file, 'w') as f:
            json.dump(results_dict, f)

    # print(Measurement_Data.calc_results_summary(json_dict_file))
Exemple #6
0
    parser = argparse.ArgumentParser(
            description='Given a dictionary with RIPE measurement data, builds a CDF showing the time between measurements.')
    parser.add_argument('json_dict_files', type=str, nargs="+",
                    help="A the paths and filenames containing data in json format.  e.g. timestamp_probe1.json timestamp_probe22.json ...")
    return parser.parse_args()


if __name__ == "__main__":
    args = parse_args()

    json_dict_files = args.json_dict_files
    #json_dict_files = json.loads(json_dict_files)

    list_of_time_differences = []
    for dict_file in json_dict_files:
        Measurement_Data.write_compound_key_dict_to_list(dict_file, list_of_time_differences)

    print("num measurements: " + str(len(list_of_time_differences)))
    #list_of_time_differences = [1448020803, 1448020805, 1448020808, 1448020808, 1448020816, 1448020831, 1448020834, 1448020855]

    plt.figure(figsize=(4.5,3.5))
    linedata = ECDF(list_of_time_differences)
    plt.plot(linedata.x, linedata.y, lw=3)
    plt.xlabel("Time between measurements (s)")
    plt.ylabel("CDF of measurements")
    plt.ylim(0,1)
    #plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
    #plt.savefig(os.path.join(full_path, "num_networks.pdf"), bbox_inches="tight")
    #plt.close()
    plt.show()
        'probe_id ; target', 'probe_id', 'target', 'ping', 'dns', 'traceroute',
        'num_origins', 'origins_list'
    ]

    list_of_entries = listdir(data_path)
    list_of_files = [
        join(data_path, f) for f in list_of_entries
        if isfile(join(data_path, f))
    ]

    results_dict = {}

    # build results_dict
    for this_file in list_of_files:
        print("reading " + this_file)
        measurement_id = Measurement_Data.get_measurement_id_from_file(
            this_file)

        # handle empty file
        if measurement_id == None:
            print(this_file + " appears to be empty.  Skipping.")
            continue

        # handle nonempty file
        measurement_data = Measurement_Data(measurement_id, this_file,
                                            summaries_file)
        measurement_data.add_probe_and_target_results(results_dict)

    # write results_dict to file
    with open(json_dict_file, 'w') as f:
        json.dump(results_dict, f)
Exemple #8
0
                    help="A file containing a dictionary of measurement_id : measurement_summary.  If ommitted, summaries are obtained from internet.")
    return parser.parse_args()


# Call the script by typing:
#    $ python explore_data.py filename [limit] [summaries_file]
# where filename is the name of a file containing a ripe atlas response
if __name__ == "__main__":
    args = parse_args()
    summaries_file = args.summaries_file
    data_path = args.data_path
    probe_id = args.probe_id
    csv_file = join(args.csv_path, CSV_FILE_PREFIX+str(probe_id)+".csv")

    list_of_headings = Ping_Data.get_ping_headings()
    Measurement_Data.prep_csv_file(csv_file, list_of_headings)

    list_of_entries = listdir(data_path)
    list_of_files = [join(data_path, f) for f in list_of_entries if isfile(join(data_path, f))]

    for this_file in list_of_files:
        print("reading "+this_file)
        measurement_id = Measurement_Data.get_measurement_id_from_file(this_file)
        if measurement_id == None:
            print(this_file + " appears to be empty.  Skipping.")
            continue

        measurement_data = Measurement_Data(measurement_id, this_file, summaries_file)

        if measurement_data.type == 'ping':
            ping_data = Ping_Data(measurement_id, this_file, summaries_file)
Exemple #9
0
        'probe_id', 'ping', 'dns', 'traceroute', 'num_targets', 'targets_list',
        'num_origins', 'origins_list'
    ]

    list_of_entries = listdir(data_path)
    list_of_files = [
        join(data_path, f) for f in list_of_entries
        if isfile(join(data_path, f))
    ]

    results_dict = {}

    # build results_dict
    for this_file in list_of_files:
        print("reading " + this_file)
        measurement_id = Measurement_Data.get_measurement_id_from_file(
            this_file)

        # handle empty file
        if measurement_id == None:
            print(this_file + " appears to be empty.  Skipping.")
            continue

        # handle nonempty file
        measurement_data = Measurement_Data(measurement_id, this_file,
                                            summaries_file)
        measurement_data.add_to_results_dict(key_name, results_dict)

    # write results_dict to file
    with open(json_dict_file, 'w') as f:
        json.dump(results_dict, f)