Example #1
0
def main():
    network_dataset = Dataset('twitters2')

    nl = read_file_to_dict(os.path.join(DATASET_PATH, 'TwitterSample2.txt'))

    # 10% sampling
    nbunch = nl[0:int(len(nl) // 2)]
    network_dataset.graph = network_dataset.graph.subgraph(nbunch)

    server_list = [Server(k) for k in range(0, 512)]
    vp_number = 0

    node_list = list(network_dataset.graph.nodes)
    random.shuffle(node_list)
    print('Dataset information: TwitterSample2\nNodes Number:',
          network_dataset.graph.order(), '\nEdge Number:',
          network_dataset.graph.size())
    print('Using Random Partitioning Method...\nServer Number:',
          len(server_list), '\nVirtual Primary Copy Number:', vp_number,
          '\nWrite Frequency of Nodes: 1')
    start = time.time()
    m = RandomP(server_list, network_dataset, node_list)
    m.add_new_primary_node(server_list, vp_number)
    m.check_server_load()
    m.check_locality()
    end = time.time()
    print('Random Partitioning Time:', end - start, 'seconds')
    m.compute_inter_sever_cost()
    path = RANDOM_GRAPH_PATH
    m.save_all(path)
Example #2
0
def create_algo(server_count=4, node_count=10):
    data = Dataset(dataset_str='facebook')
    data.graph = nx.Graph()
    for i in range(node_count):
        data.graph.add_node(i)
    server_list = [Server(serer_id=i) for i in range(server_count)]
    algo = OfflineAlgo(server_list=server_list, network_dataset=data)
    return algo
Example #3
0
 def test_relocate_process(self):
     data = Dataset(dataset_str='facebook')
     data.graph = nx.Graph()
     for i in range(10):
         data.graph.add_node(i)
     data.graph.add_edge(0, 1)
     data.graph.add_edge(0, 2)
     data.graph.add_edge(0, 3)
     data.graph.add_edge(0, 4)
     server_list = [Server(serer_id=i) for i in range(8)]
     algo = OfflineAlgo(server_list=server_list, network_dataset=data)
     node_list = list(data.graph.nodes)
     node_len = len(node_list)
     for i in range(node_len):
         n = node_list[i]
         algo.add_new_primary_node(node_id=n, write_freq=Constant.WRITE_FREQ)
     algo.node_relocation_process()
Example #4
0
    def test_merge_process(self):
        data = Dataset(dataset_str='facebook')
        data.graph = nx.Graph()
        for i in range(10):
            data.graph.add_node(i)
        data.graph.add_edge(0, 1)
        data.graph.add_edge(0, 2)
        data.graph.add_edge(0, 3)
        data.graph.add_edge(0, 4)
        server_list = [Server(serer_id=i) for i in range(8)]
        algo = OfflineAlgo(server_list=server_list, network_dataset=data)
        node_list = list(data.graph.nodes)
        node_len = len(node_list)
        for i in range(node_len):
            n = node_list[i]
            algo.add_new_primary_node(node_id=n, write_freq=Constant.WRITE_FREQ)
        algo.init_merge_process()

        for i in range(0, len(algo.merged_node_list)):
            m_node = algo.merged_node_list[i]
            if m_node.id == 0:
                self.assertEqual(m_node.internal_connection, 0)
                self.assertEqual(m_node.external_connection, 4)
            elif m_node.id in [1, 2, 3, 4]:
                self.assertEqual(m_node.internal_connection, 0)
                self.assertEqual(m_node.external_connection, 1)
            else:
                self.assertEqual(m_node.internal_connection, 0)
                self.assertEqual(m_node.external_connection, 0)
        node_count_list = []
        for m_node in algo.merged_node_list:
            node_count_list += m_node.node_id_list
        node_count_list.sort()
        self.assertEqual(node_count_list, [i for i in range(10)])
        for i in range(1, len(algo.merged_node_list)):
            algo.merged_node_list[0]._add_node(algo.merged_node_list[i], algo=algo, remove_flag=False)
        node_count_list = algo.merged_node_list[0].node_id_list
        node_count_list.sort()
        self.assertEqual(node_count_list, [i for i in range(10)])
        self.assertEqual(algo.merged_node_list[0].external_connection, 0)
        self.assertEqual(algo.merged_node_list[0].internal_connection, 4)
        self.assertEqual(algo.merged_node_list[0].node_count, 10)
Example #5
0
    def test_virtual_primary_copy_process(self):
        data = Dataset(dataset_str='facebook')
        data.graph = nx.Graph()
        data.graph.add_node(0)
        server_list = [Server(serer_id=i) for i in range(2)]
        Constant.LEAST_VIRTUAL_PRIMARY_COPY_NUMBER = 1
        algo = OfflineAlgo(server_list=server_list, network_dataset=data)
        node_list = list(data.graph.nodes)
        node_len = len(node_list)
        for i in range(node_len):
            n = node_list[i]
            algo.add_new_primary_node(node_id=n, write_freq=Constant.WRITE_FREQ)
        self.assertEqual(len(algo.node_list), 1)
        self.assertEqual(algo.node_list[0].id, 0)
        self.assertEqual(algo.node_list[0].virtual_primary_copy_server_list[0].id, 1 - algo.node_list[0].server.id)
        self.assertEqual(len(algo.node_list[0].virtual_primary_copy_server_list), 1)
        data.graph.add_edge(0, 1)
        algo.server_list.append(Server(2))
        algo.server_list.append(Server(3))

        algo._add_node_to_server(node_id=1, node_type=Constant.PRIMARY_COPY, write_freq=10.0,
                                 server=algo.server_list[2])

        # algo.virtual_primary_copy_swap()
        for vir_server in algo.node_list[0].virtual_primary_copy_server_list:
            if vir_server.id != algo.node_list[1].virtual_primary_copy_server_list[0].id:
                tmp_server_1_id = vir_server
                tmp_server_2_id = algo.node_list[1].virtual_primary_copy_server_list[0]

                Operation.swap_virtual_primary_copy(s_node=algo.node_list[0],
                                                    t_node=algo.node_list[1],
                                                    s_server=vir_server,
                                                    t_server=algo.node_list[1].virtual_primary_copy_server_list[0],
                                                    algo=algo)
                break
        self.assertTrue(tmp_server_1_id.has_node(algo.node_list[1].id, node_type=Constant.VIRTUAL_PRIMARY_COPY))
        self.assertTrue(tmp_server_2_id.has_node(algo.node_list[0].id, node_type=Constant.VIRTUAL_PRIMARY_COPY))
        self.assertTrue(tmp_server_2_id in algo.node_list[0].virtual_primary_copy_server_list)
        self.assertTrue(tmp_server_1_id in algo.node_list[1].virtual_primary_copy_server_list)
Example #6
0
    def test_merged_node_swap_process(self):

        data = Dataset(dataset_str='facebook')
        data.graph = nx.Graph()
        for i in range(10):
            data.graph.add_node(i)
        data.graph.add_edge(0, 1)
        data.graph.add_edge(0, 2)
        data.graph.add_edge(0, 3)
        data.graph.add_edge(0, 4)
        server_list = [Server(serer_id=i) for i in range(8)]
        algo = OfflineAlgo(server_list=server_list, network_dataset=data)
        node_list = list(data.graph.nodes)
        node_len = len(node_list)
        for i in range(node_len):
            n = node_list[i]
            algo.add_new_primary_node(node_id=n, write_freq=Constant.WRITE_FREQ)
        algo.init_merge_process()
        algo.start_merge_process()
        node_count_list = []
        for m_node in algo.merged_node_list:
            node_count_list += m_node.node_id_list
        node_count_list.sort()
        self.assertEqual(node_count_list, [i for i in range(10)])