Пример #1
0
class MainServer:
    def __init__(self):
        self._args = Args(self)
        logger.init(self._args.get_args())

        self.server = Server(self._args.get_args().port)
        self.control = Control(self.server)

    def start(self):
        self.server.listen_asych()

        View(self.control)
Пример #2
0
def main():
    network_dataset = Dataset('twitters2')

    nl = read_file_to_dict(os.path.join(DATASET_PATH, 'TwitterSample2.txt'))

    # 10% sampling
    nbunch = nl[0:int(len(nl) // 2)]
    network_dataset.graph = network_dataset.graph.subgraph(nbunch)

    server_list = [Server(k) for k in range(0, 512)]
    vp_number = 0

    node_list = list(network_dataset.graph.nodes)
    random.shuffle(node_list)
    print('Dataset information: TwitterSample2\nNodes Number:',
          network_dataset.graph.order(), '\nEdge Number:',
          network_dataset.graph.size())
    print('Using Random Partitioning Method...\nServer Number:',
          len(server_list), '\nVirtual Primary Copy Number:', vp_number,
          '\nWrite Frequency of Nodes: 1')
    start = time.time()
    m = RandomP(server_list, network_dataset, node_list)
    m.add_new_primary_node(server_list, vp_number)
    m.check_server_load()
    m.check_locality()
    end = time.time()
    print('Random Partitioning Time:', end - start, 'seconds')
    m.compute_inter_sever_cost()
    path = RANDOM_GRAPH_PATH
    m.save_all(path)
Пример #3
0
def create_algo(server_count=4, node_count=10):
    data = Dataset(dataset_str='facebook')
    data.graph = nx.Graph()
    for i in range(node_count):
        data.graph.add_node(i)
    server_list = [Server(serer_id=i) for i in range(server_count)]
    algo = OfflineAlgo(server_list=server_list, network_dataset=data)
    return algo
Пример #4
0
    def test_virtual_primary_copy_process(self):
        data = Dataset(dataset_str='facebook')
        data.graph = nx.Graph()
        data.graph.add_node(0)
        server_list = [Server(serer_id=i) for i in range(2)]
        Constant.LEAST_VIRTUAL_PRIMARY_COPY_NUMBER = 1
        algo = OfflineAlgo(server_list=server_list, network_dataset=data)
        node_list = list(data.graph.nodes)
        node_len = len(node_list)
        for i in range(node_len):
            n = node_list[i]
            algo.add_new_primary_node(node_id=n, write_freq=Constant.WRITE_FREQ)
        self.assertEqual(len(algo.node_list), 1)
        self.assertEqual(algo.node_list[0].id, 0)
        self.assertEqual(algo.node_list[0].virtual_primary_copy_server_list[0].id, 1 - algo.node_list[0].server.id)
        self.assertEqual(len(algo.node_list[0].virtual_primary_copy_server_list), 1)
        data.graph.add_edge(0, 1)
        algo.server_list.append(Server(2))
        algo.server_list.append(Server(3))

        algo._add_node_to_server(node_id=1, node_type=Constant.PRIMARY_COPY, write_freq=10.0,
                                 server=algo.server_list[2])

        # algo.virtual_primary_copy_swap()
        for vir_server in algo.node_list[0].virtual_primary_copy_server_list:
            if vir_server.id != algo.node_list[1].virtual_primary_copy_server_list[0].id:
                tmp_server_1_id = vir_server
                tmp_server_2_id = algo.node_list[1].virtual_primary_copy_server_list[0]

                Operation.swap_virtual_primary_copy(s_node=algo.node_list[0],
                                                    t_node=algo.node_list[1],
                                                    s_server=vir_server,
                                                    t_server=algo.node_list[1].virtual_primary_copy_server_list[0],
                                                    algo=algo)
                break
        self.assertTrue(tmp_server_1_id.has_node(algo.node_list[1].id, node_type=Constant.VIRTUAL_PRIMARY_COPY))
        self.assertTrue(tmp_server_2_id.has_node(algo.node_list[0].id, node_type=Constant.VIRTUAL_PRIMARY_COPY))
        self.assertTrue(tmp_server_2_id in algo.node_list[0].virtual_primary_copy_server_list)
        self.assertTrue(tmp_server_1_id in algo.node_list[1].virtual_primary_copy_server_list)
Пример #5
0
 def test_relocate_process(self):
     data = Dataset(dataset_str='facebook')
     data.graph = nx.Graph()
     for i in range(10):
         data.graph.add_node(i)
     data.graph.add_edge(0, 1)
     data.graph.add_edge(0, 2)
     data.graph.add_edge(0, 3)
     data.graph.add_edge(0, 4)
     server_list = [Server(serer_id=i) for i in range(8)]
     algo = OfflineAlgo(server_list=server_list, network_dataset=data)
     node_list = list(data.graph.nodes)
     node_len = len(node_list)
     for i in range(node_len):
         n = node_list[i]
         algo.add_new_primary_node(node_id=n, write_freq=Constant.WRITE_FREQ)
     algo.node_relocation_process()
Пример #6
0
    def test_merge_process(self):
        data = Dataset(dataset_str='facebook')
        data.graph = nx.Graph()
        for i in range(10):
            data.graph.add_node(i)
        data.graph.add_edge(0, 1)
        data.graph.add_edge(0, 2)
        data.graph.add_edge(0, 3)
        data.graph.add_edge(0, 4)
        server_list = [Server(serer_id=i) for i in range(8)]
        algo = OfflineAlgo(server_list=server_list, network_dataset=data)
        node_list = list(data.graph.nodes)
        node_len = len(node_list)
        for i in range(node_len):
            n = node_list[i]
            algo.add_new_primary_node(node_id=n, write_freq=Constant.WRITE_FREQ)
        algo.init_merge_process()

        for i in range(0, len(algo.merged_node_list)):
            m_node = algo.merged_node_list[i]
            if m_node.id == 0:
                self.assertEqual(m_node.internal_connection, 0)
                self.assertEqual(m_node.external_connection, 4)
            elif m_node.id in [1, 2, 3, 4]:
                self.assertEqual(m_node.internal_connection, 0)
                self.assertEqual(m_node.external_connection, 1)
            else:
                self.assertEqual(m_node.internal_connection, 0)
                self.assertEqual(m_node.external_connection, 0)
        node_count_list = []
        for m_node in algo.merged_node_list:
            node_count_list += m_node.node_id_list
        node_count_list.sort()
        self.assertEqual(node_count_list, [i for i in range(10)])
        for i in range(1, len(algo.merged_node_list)):
            algo.merged_node_list[0]._add_node(algo.merged_node_list[i], algo=algo, remove_flag=False)
        node_count_list = algo.merged_node_list[0].node_id_list
        node_count_list.sort()
        self.assertEqual(node_count_list, [i for i in range(10)])
        self.assertEqual(algo.merged_node_list[0].external_connection, 0)
        self.assertEqual(algo.merged_node_list[0].internal_connection, 4)
        self.assertEqual(algo.merged_node_list[0].node_count, 10)
Пример #7
0
def main():
    table = [[SUB, SUB, SUB, SEA, SEA, SEA, SEA, SEA, SEA, SEA],
             [SEA, SEA, SEA, SEA, SEA, SEA, SEA, SEA, SEA, SEA],
             [SUB, SEA, SEA, SUB, SEA, SUB, SUB, SUB, SUB, SEA],
             [SUB, SEA, SEA, SUB, SEA, SEA, SEA, SEA, SEA, SEA],
             [SUB, SEA, SEA, SUB, SEA, SEA, SEA, SEA, SEA, SEA],
             [SUB, SEA, SEA, SEA, SEA, SEA, SEA, SEA, SEA, SEA],
             [SUB, SEA, SEA, SEA, SEA, SEA, SEA, SEA, SEA, SEA],
             [SEA, SEA, SEA, SUB, SUB, SUB, SUB, SEA, SEA, SUB],
             [SEA, SEA, SEA, SEA, SEA, SEA, SEA, SEA, SEA, SUB],
             [SEA, SEA, SEA, SEA, SEA, SEA, SEA, SEA, SEA, SUB]]

    server = Server(5555)
    server.begin()
    Game(server, table, True).play()
    server.end()
Пример #8
0
    def test_merged_node_swap_process(self):

        data = Dataset(dataset_str='facebook')
        data.graph = nx.Graph()
        for i in range(10):
            data.graph.add_node(i)
        data.graph.add_edge(0, 1)
        data.graph.add_edge(0, 2)
        data.graph.add_edge(0, 3)
        data.graph.add_edge(0, 4)
        server_list = [Server(serer_id=i) for i in range(8)]
        algo = OfflineAlgo(server_list=server_list, network_dataset=data)
        node_list = list(data.graph.nodes)
        node_len = len(node_list)
        for i in range(node_len):
            n = node_list[i]
            algo.add_new_primary_node(node_id=n, write_freq=Constant.WRITE_FREQ)
        algo.init_merge_process()
        algo.start_merge_process()
        node_count_list = []
        for m_node in algo.merged_node_list:
            node_count_list += m_node.node_id_list
        node_count_list.sort()
        self.assertEqual(node_count_list, [i for i in range(10)])
Пример #9
0
from src.server.server import Server
from src.daemonize import daemonize

DEFAULT_CONFIG_PATH = '/etc/rhn/osad/osad_server.cfg'

if __name__ == '__main__':
    parser = argparse.ArgumentParser()
    parser.add_argument('-c',
                        '--config',
                        default=DEFAULT_CONFIG_PATH,
                        dest='config_file',
                        help='configuration file')
    parser.add_argument('-d',
                        '--daemon',
                        dest='daemon',
                        action='store_true',
                        help='run as daemon',
                        default=False)
    args = parser.parse_args()

    config = ServerConfig(args.config_file)
    server = Server(config)

    if args.daemon:
        daemonize(server)
    else:
        try:
            server.start()
        finally:
            server.stop()
Пример #10
0
def main():
    peer = Server(5555)
    peer.begin()
    Game().play(True)
    peer.end()
def server():
    return Server()
Пример #12
0
import peewee
import string
import random
from src.server.bot import bot
from src.server.models import StorageManager
from src.server.server import Server
from src.common.settings import APP_NAME, PG_DATABASE, PG_HOST, PG_USER, PG_PASSWORD, PG_PORT
from src.server.setuper import Setuper

if __name__ == '__main__':
    psql_db = peewee.PostgresqlDatabase(database=PG_DATABASE,
                                        user=PG_USER,
                                        password=PG_PASSWORD,
                                        host=PG_HOST,
                                        port=PG_PORT,
                                        isolation_level=0)
    storage = StorageManager(psql_db)
    setuper = Setuper()
    server = Server(APP_NAME, bot, setuper)
    bot.set_server(server)
    bot.set_storage(storage)
    bot.set_setuper(setuper)
    server.secret_key = ''.join(
        random.choice(string.ascii_uppercase + string.digits)
        for _ in range(10))
    server.run_server()
Пример #13
0
from src.server.server import Server

#creates a server instance and runs it
if __name__ == '__main__':
    server = Server()
    server.run()
Пример #14
0
def main():
    from src.server.server import Server
    os.makedirs("src/static/images", exist_ok=True)
    os.makedirs("src/static/images/tmp", exist_ok=True)
    Server().run()
for i, id in enumerate(ids):
    if i <= 3:
        cl = "python start_one_node.py " + str(id) + ' 0.3 2'
        #https://docs.python.org/3/library/subprocess.html
        args = shlex.split(cl)
        process = subprocess.Popen(args)
        server_list.append(process)
    else:
        time.sleep(1)
        test_follower_server = Server(str(id), [
            {
                "action": None,
                "term": 0,
            },
            {
                "action": None,
                "term": 1,
            },
            {
                "action": None,
                "term": 1,
            },
        ], Follower(None), [str(_) for _ in ids[:i] + ids[i + 1:]])

while len(test_follower_server.log) > 1:
    print(test_follower_server.log)
    time.sleep(0.1)

print("wrong extra log was removed" + str(test_follower_server.log))

while True:
    time.sleep(10)
import project
from src.server.server import Server

if __name__ == "__main__":
    PORT = 1234
    server = Server()
    server.serve(PORT)
Пример #17
0
from src.state.follower import Follower
from src.state.candidate import Candidate

server_list = []
ids = [i for i in range(1, 6)]

threads = []
server_list = []

for i, id in enumerate(ids):
    if i == 0:
        t = threading.Thread(
            target=server_list.append,
            args=(Server(str(id), [{
                "action": None,
                "term": 0,
            }], Follower(None), [str(_) for _ in ids[:i] + ids[i + 1:]],
                         0.15), ))
        t.start()
    else:
        t = threading.Thread(
            target=server_list.append,
            args=(Server(str(id), [
                {
                    "action": None,
                    "term": 0,
                },
                {
                    "action": {
                        'key': 1,
                        'value': 1
Пример #18
0
from src.server.server import Server

if __name__ == '__main__':
    Server.start()
Пример #19
0
            if my_card.color == game.discard_pile.get_top_card(
            ).color or my_card.value == game.discard_pile.get_top_card().value:
                # print(f"CARD PLAYED - Card({my_card.color}:{my_card.value})")
                removed_card = game.current_player.remove_from_hand(my_card)
                game.discard_pile.add_to_top(removed_card)
                server.send_message(
                    server_data.Data(
                        content_type=SERV_BROADCAST,
                        content_data=
                        f'{game.current_player.name} played ({my_card.color}:{my_card.value})'
                    ))
                game.end_turn(server)


game_started = False
server = Server(name='UnoServer')
server.callbacks.register_callback('on_client_connect', on_connect)
server.callbacks.register_callback('on_client_disconnect', on_disconnect)
server.callbacks.register_callback('on_server_start', on_start)
server.callbacks.register_callback('on_client_message', on_message)
server.callbacks.register_callback('on_draw_card', on_draw_card)
server.callbacks.register_callback('on_get_clients', on_get_clients)
server.callbacks.register_callback('on_game_start', on_game_start)
server.callbacks.register_callback('on_game_stop', on_game_stop)
server.callbacks.register_callback('on_view_cards', on_view_cards)
server.callbacks.register_callback('on_play_card', on_player_play_card)
server.commands.register_command('!say', 'on_client_message')
server.commands.register_command('!draw', 'on_draw_card')
server.commands.register_command('!cards', 'on_view_cards')
server.commands.register_command('!clients', 'on_get_clients')
server.commands.register_command('!start', 'on_game_start')
Пример #20
0
log_status = int(sys.argv[3])
complete_test_log = [
    {
        "action":None,
        "term":0,
    },
    {
        "action":{'key':1,'value':1},
        "term":0,
    },
    {
        "action":{'key':1,'value':1},
        "term":0,
    },
    {
        "action":{'key':1,'value':1},
        "term":0,
    },

]

ids = []
ids = [i for i in range(1,6)]

for i,id in enumerate(ids):
    if str(id) == sys.argv[1]:
        Server(sys.argv[1], complete_test_log[:log_status+1], 
            Follower(None), 
            [str(_) for _ in ids[:i]+ids[i+1:]],)

server_list = []
ids = [i for i in range(1, 6)]

threads = []
server_list = []

for i, id in enumerate(ids):
    if i == 0:
        test_leader_server = Server(str(id), [{
            "action": None,
            "term": 0,
        }, {
            "action": {
                'key': 1,
                'value': 1
            },
            "term": 0,
        }, {
            "action": {
                'key': 2,
                'value': 2
            },
            "term": 0,
        }], Follower(None), [str(_) for _ in ids[:i] + ids[i + 1:]])
    else:
        cl = "python start_one_node.py " + str(id) + ' 0.3 0'
        #https://docs.python.org/3/library/subprocess.html
        args = shlex.split(cl)
        process = subprocess.Popen(args)
        server_list.append(process)

time.sleep(0.3)
Пример #22
0
def app(tg_bot):
    return Server(APP_NAME, tg_bot, DEBUG)
Пример #23
0
def main(dataset='amazon', part_flag=0.01, log_path_end='', tmp_log_flag=False):
    log_path = LOG_PATH
    log_path = os.path.join(log_path, '%s_%s_%s_%s_%s' % (
        time.strftime("%Y-%m-%d_%H-%M-%S"), 'offline', dataset, str(part_flag), log_path_end))
    if not os.path.exists(log_path):
        os.mkdir(log_path)
    logging.basicConfig(level=logging.DEBUG,
                        filename=log_path + '/log',
                        filemode='w')
    Constant().log_out()
    data = Dataset(dataset_str=dataset, part_flag=part_flag)

    server_list = [Server(serer_id=i) for i in range(Constant.SERVER_NUMBER)]
    algo = OfflineAlgo(server_list=server_list, network_dataset=data)
    node_list = list(data.graph.nodes)
    node_len = len(node_list)
    for i in range(node_len):
        n = node_list[i]
        log_str = "(%d/%d) Adding node: %d" % (i, node_len, n)
        logging.info(log_str)
        print(log_str)
        algo.add_new_primary_node(node_id=n, write_freq=Constant.WRITE_FREQ)
    print_graph(server_list)
    op.validate_result(dataset_g=algo.network_dataset.graph,
                       server_g_list=[x.graph for x in algo.server_list],
                       load_differ=Constant.MAX_LOAD_DIFFERENCE_AMONG_SERVER,
                       virtual_copy_number=Constant.LEAST_VIRTUAL_PRIMARY_COPY_NUMBER)
    log_str = 'Inter Server cost is %f' % algo.compute_inter_server_cost()
    print(log_str)
    logging.info(log_str)

    print("Running relocation process-------------")
    logging.info("Running relocation process-------------")
    algo.node_relocation_process()
    log_str = 'Inter Server cost is %f' % algo.compute_inter_server_cost()
    print(log_str)
    logging.info(log_str)
    print_graph(server_list)
    op.validate_result(dataset_g=algo.network_dataset.graph,
                       server_g_list=[x.graph for x in algo.server_list],
                       load_differ=Constant.MAX_LOAD_DIFFERENCE_AMONG_SERVER,
                       virtual_copy_number=Constant.LEAST_VIRTUAL_PRIMARY_COPY_NUMBER)
    print("Init merge process-------------")
    logging.info("Init merge process-------------")
    algo.init_merge_process()
    print_graph(server_list)
    op.validate_result(dataset_g=algo.network_dataset.graph,
                       server_g_list=[x.graph for x in algo.server_list],
                       load_differ=Constant.MAX_LOAD_DIFFERENCE_AMONG_SERVER,
                       virtual_copy_number=Constant.LEAST_VIRTUAL_PRIMARY_COPY_NUMBER)
    print("Start merge process-------------")
    logging.info("Start merge process-------------")
    algo.start_merge_process()
    print_graph(server_list)
    log_str = 'Inter Server cost is %f' % algo.compute_inter_server_cost()
    print(log_str)
    logging.info(log_str)
    op.validate_result(dataset_g=algo.network_dataset.graph,
                       server_g_list=[x.graph for x in algo.server_list],
                       load_differ=Constant.MAX_LOAD_DIFFERENCE_AMONG_SERVER,
                       virtual_copy_number=Constant.LEAST_VIRTUAL_PRIMARY_COPY_NUMBER)
    print("Init Group Swap process-------------")
    logging.info("Init Group Swap process-------------")
    algo.init_group_swap_process(algo)
    print_graph(server_list)
    op.validate_result(dataset_g=algo.network_dataset.graph,
                       server_g_list=[x.graph for x in algo.server_list],
                       load_differ=Constant.MAX_LOAD_DIFFERENCE_AMONG_SERVER,
                       virtual_copy_number=Constant.LEAST_VIRTUAL_PRIMARY_COPY_NUMBER)
    print("Virtual Primary Copy Swap process-------------")
    logging.info("Virtual Swap Copy Swap process-------------")
    algo.virtual_primary_copy_swap()
    print_graph(server_list)
    log_str = 'Inter Server cost is %f' % algo.compute_inter_server_cost()
    print(log_str)
    logging.info(log_str)
    op.validate_result(dataset_g=algo.network_dataset.graph,
                       server_g_list=[x.graph for x in algo.server_list],
                       load_differ=Constant.MAX_LOAD_DIFFERENCE_AMONG_SERVER,
                       virtual_copy_number=Constant.LEAST_VIRTUAL_PRIMARY_COPY_NUMBER)

    print("Remove redundant replica-------------")
    logging.info("Remove redundant replica-------------")
    algo.remove_unnecessary_replica()
    log_str = 'Inter Server cost is %f' % algo.compute_inter_server_cost()
    print(log_str)
    logging.info(log_str)

    algo.save_all(path=log_path)
    g, server = op.load_log(log_path)
    op.validate_result(dataset_g=g,
                       server_g_list=server,
                       load_differ=Constant.MAX_LOAD_DIFFERENCE_AMONG_SERVER,
                       virtual_copy_number=Constant.LEAST_VIRTUAL_PRIMARY_COPY_NUMBER)
    log_str = 'Inter Server cost is %f' % algo.compute_inter_server_cost()
    print(log_str)
    logging.info(log_str)
    print(log_path)
    for handler in logging.root.handlers[:]:
        logging.root.removeHandler(handler)
Пример #24
0
 def test_remove_util(self):
     server_list = [Server(serer_id=i) for i in range(10)]
     server_list.remove(server_list[1])
     self.assertEqual(len(server_list), 9)
Пример #25
0
    def __init__(self):
        self._args = Args(self)
        logger.init(self._args.get_args())

        self.server = Server(self._args.get_args().port)
        self.control = Control(self.server)
Пример #26
0
from src.server.server import Server

if __name__ == "__main__":
    from gevent import monkey; monkey.patch_all()
    try:
        print("[+] Launching server")
        print("[+] Listening on port 31337")
        Server().run()

    except KeyboardInterrupt:
        print("\n[+] User terminated server")
Пример #27
0
def create_test_client(bot):
    server = Server(APP_NAME, bot, DEBUG)
    return server.test_client()
from src.state.follower import Follower
from src.state.candidate import Candidate

server_list = []
ids = [i for i in range(1, 6)]

threads = []
server_list = []

for i, id in enumerate(ids):
    if i == 0:
        t = threading.Thread(
            target=server_list.append,
            args=(Server(str(id), [{
                "action": None,
                "term": 0,
            }], Follower(None), [str(_) for _ in ids[:i] + ids[i + 1:]],
                         0.15), ))
        t.start()
    else:
        t = threading.Thread(
            target=server_list.append,
            args=(Server(str(id), [{
                "action": None,
                "term": 0,
            }], Follower(None), [str(_) for _ in ids[:i] + ids[i + 1:]]), ))
        t.start()
    threads.append(t)

for thread in threads:
    thread.join()
Пример #29
0
from src.server.config import ServerConfig
from src.server.server import Server
from src.daemonize import daemonize

DEFAULT_CONFIG_PATH = '/etc/rhn/osad/osad_server.cfg'

if __name__ == '__main__':
    parser = argparse.ArgumentParser()
    parser.add_argument('-c', '--config',
                        default=DEFAULT_CONFIG_PATH,
                        dest='config_file',
                        help='configuration file')
    parser.add_argument('-d', '--daemon',
                        dest='daemon',
                        action='store_true',
                        help='run as daemon',
                        default=False)
    args = parser.parse_args()

    config = ServerConfig(args.config_file)
    server = Server(config)

    if args.daemon:
        daemonize(server)
    else:
        try:
            server.start()
        finally:
            server.stop()
Пример #30
0
 def server(self):
     return Server()
import threading
import time
import sys
sys.path.append("..")
from src.server.server import Server
from src.state.follower import Follower
from src.state.candidate import Candidate

server_list=[]
ids = [i for i in range(1,6)]

threads=[]
server_list=[]

for i,id in enumerate(ids):
    if i<=2:
        t=threading.Thread(target=server_list.append,args=(Server(str(id), [{
                "action":None,
                "term":0,
            }], Follower(None), [str(_) for _ in ids[:i]+ids[i+1:]],0.15),))
        t.start()
    else:
        t=threading.Thread(target=server_list.append,args=(Server(str(id), [{
                "action":None,
                "term":0,
            }], Follower(None), [str(_) for _ in ids[:i]+ids[i+1:]]),))
        t.start()
    threads.append(t)

for thread in threads:
    thread.join()