def generate_usability_table(guard_slection_filename = '../Data/guard_selection_probs.json', \
                             nsf = "../Data/2016-10-01-00-00-00-network_state",\
                             pfx2as = "../Data/routeviews-rv2-20161001-1200.pfx2as",\
                             libspookyhash_file = "../Data/libspookyhash.so",\
                             paths_text_file = "../Data/20180601_paths.txt", \
                             index_bin_file = "../Data/20180601_index.bin"):

    pft = ip_to_asn.prefix_tree_from_pfx2as_file(pfx2as)
    my_pfi = pfi.PFI(libspookyhash_file, paths_text_file, index_bin_file)
    my_pfi.load()

    #load in json file
    with open(guard_slection_filename) as f:
        inp_dict = json.load(f)

    top_ASes = list(inp_dict.keys())
    guard_fps = list(inp_dict['6128'].keys())

    network_state_vars = relays.fat_network_state(nsf)

    guard_fp_to_ip = relays.get_guards(network_state_vars[0],
                                       network_state_vars[1])
    guard_fp_to_asns =\
              relays.make_relay_fp_to_asns_dict(guard_fp_to_ip, pft)

    usability_table = denasa.make_client_guard_usability_table(
        top_ASes, guard_fps, guard_fp_to_ip, my_pfi)

    np.save('../Data/usability_table.npy', usability_table)
def calculate_risk_similarity (outputChar='risk_similarity_metric',\
                            nsf = "../Data/2016-10-01-00-00-00-network_state"):
    '''
    Load in all the data needed
    '''

    usability_table = np.load('../Data/usability_table.npy').item()

    guard_selection_probs = {}

    with open('../Data/guard_selection_probs.json') as f:
        guard_selection_probs = json.load(f)
    '''
    Initialize the similarity dictionary. Initially set all similarity values to 0.0
    '''
    output_similarity_dict = dict.fromkeys(list(guard_selection_probs.keys()),
                                           {})
    for i, v in output_similarity_dict.items():
        output_similarity_dict[i] = dict.fromkeys(
            list(guard_selection_probs.keys()), 0.0)
    '''
    s(AS1, AS2): Sum over guards, for guard g_i with weight w_i (vanilla Tor weight aka consensus bandwidth),
    if AS1 and AS2 agree on usability of g_i (i.e. if g_i is usable for both or unusable for both) then
    add w_i into total sum, and otherwise add 0

    '''
    '''
    First, get the vanilla Tor bandwidths
    '''
    network_state_vars = relays.fat_network_state(nsf)
    guard_fps = []
    for i, inner_dict in guard_selection_probs.items():
        for guard_fp, v in inner_dict.items():
            guard_fps.append(guard_fp)

    guard_weights = relays.pathsim_get_position_weights(
        guard_fps, network_state_vars[0], 'g', network_state_vars[4],
        network_state_vars[5])
    total = sum(guard_weights.values())

    #normalize to get probabilities
    guard_weights = {k: v / total for k, v in guard_weights.items()}
    '''
    if AS1 and AS2 agree on usability of g_i (i.e. if g_i is usable for both or unusable for both) then
    add w_i into total sum, and otherwise add 0
    '''
    for i, i_dict in output_similarity_dict.items():
        for j, v in i_dict.items():
            similarity = 0
            for guard in guard_weights.keys():
                if usability_table[(i, guard)] is True and usability_table[
                    (j, guard)] is True:
                    similarity += guard_weights[guard]
            i_dict[j] = similarity

    with open("../Data/ASSimilarityFile" + str(outputChar) + ".json",
              'w') as file:
        json.dump(output_similarity_dict, file)
Ejemplo n.º 3
0
def jaccard (date,\
        outputChar='jaccard',\
        nsf = "../Data/2016-10-01-00-00-00-network_state"):
    '''
    Load in all the data needed
    '''

    # with open('../Data/usability_table.json') as f:
    #     usability_table = json.load(f)

    usability_table = np.load('../Data/usability_table.npy').item()

    guard_selection_probs = {}

    with open('../Data/guard_selection_probs.json') as f:
        guard_selection_probs = json.load(f)
    '''
    Initialize the similarity dictionary. Initially set all similarity values to 0.0
    '''
    output_similarity_dict = dict.fromkeys(list(guard_selection_probs.keys()),
                                           {})
    for i, v in output_similarity_dict.items():
        output_similarity_dict[i] = dict.fromkeys(
            list(guard_selection_probs.keys()), 0.0)
    '''
    s(AS1, AS2): Sum over guards, for guard g_i with weight w_i (vanilla Tor weight aka consensus bandwidth),
    if AS1 and AS2 agree on usability of g_i (i.e. if g_i is usable for both or unusable for both) then
    add w_i into total sum, and otherwise add 0

    '''
    '''
    First, get the vanilla Tor bandwidths
    '''
    network_state_vars = relays.fat_network_state(nsf)
    guard_fps = []
    for i, inner_dict in guard_selection_probs.items():
        for guard_fp, v in inner_dict.items():
            guard_fps.append(guard_fp)

    guard_weights = relays.pathsim_get_position_weights(
        guard_fps, network_state_vars[0], 'g', network_state_vars[4],
        network_state_vars[5])
    total = sum(guard_weights.values())

    #normalize to get probabilities
    guard_weights = {k: v / total for k, v in guard_weights.items()}
    with open('../Data/vanilla_tor_bandwidth.json', 'w') as file:
        json.dump(guard_weights, file)
    '''
    if AS1 and AS2 agree on usability of g_i (i.e. if g_i is usable for both or unusable for both) then
    add w_i into total sum, and otherwise add 0
    '''
    for i, i_dict in output_similarity_dict.items():
        for j, v in i_dict.items():
            similarity = 0
            numerator = 0
            denominator = 0
            for guard in guard_weights.keys():
                if usability_table[(i, guard)] is True and usability_table[
                    (j, guard)] is True:
                    numerator += guard_weights[guard]
                if usability_table[(i, guard)] is True or usability_table[
                    (j, guard)] is True:
                    denominator += guard_weights[guard]

            #just say similarity it 0
            similarity = numerator / denominator if denominator != 0 else 0
            i_dict[j] = similarity

    outputFileName = '../Data/ASSimilarityFile_' + str(
        outputChar) + '_' + date + '.json'
    with open(
            '../Data/ASSimilarityFile_' + str(outputChar) + '_' + date +
            '.json', 'w') as file:
        json.dump(output_similarity_dict, file)

    return outputFileName
Ejemplo n.º 4
0
from tempest.tor import relays

#get 95 Top ASes
inp_array = []
with open('../Data/Top95ASes.txt', 'r') as file:
    for i in file.readlines():
        inp_array.append(i.rstrip("\n"))

nsf = "../Data/2016-10-01-00-00-00-network_state"
pft = ip_to_asn.prefix_tree_from_pfx2as_file(
    "../Data/routeviews-rv2-20161001-1200.pfx2as")
my_pfi = pfi.PFI("../Data/libspookyhash.so", "../Data/20180601_paths.txt",
                 "../Data/20180601_index.bin")
my_pfi.load()

network_state_vars = relays.fat_network_state(nsf)

guard_fp_to_ip = relays.get_guards(network_state_vars[0],
                                   network_state_vars[1])

guard_fp_to_asns =\
        relays.make_relay_fp_to_asns_dict(guard_fp_to_ip, pft)

guard_fps = []
for guard_fp, asns in guard_fp_to_asns.items():
    if asns is not None:
        guard_fps.append(guard_fp)

guard_selection_probs = denasa.compute_denasa_guard_selection_probs(
    inp_array, nsf, pft, my_pfi)