Exemple #1
0
def setup_logging(logging_level):
    """
    Setup the loggers.

    """
    gl(logging_level)  # setup simulation logging
    gl.info("Started Gateway logging with level '{}'".format(logging_level))
    bl(logging_level)  # setup backend logging
    bl.info("Started Backend logging with level '{}'".format(logging_level))
Exemple #2
0
def setup_logging(logging_level):
    """
    Setup the loggers.

    """
    cl(logging_level)  # setup simulation logging
    cl.info("Started Core logging with level '{}'".format(logging_level))
    sl(logging_level)  # setup simulation logging
    sl.info("Started Simulation logging with level '{}'".format(logging_level))
    bl(logging_level)  # setup backend logging
    bl.info("Started Backend logging with level '{}'".format(logging_level))
    def setUp(self):

        cl("debug")
        bl("debug")
        sl("debug")

        # queues for pushing information about new files over the socket
        # send from server
        self.new_file_server_queue = multiprocessing.Queue()
        # receive at client
        self.new_file_client_queue = multiprocessing.Queue()

        #
        # a queue for returning the requested index
        self.queue_datacopy_backend_index_data = multiprocessing.Queue()
        #
        # queue for index data on client side
        self.queue_client_index_data = multiprocessing.Queue()

        # server and client side of index exchange
        # index request events
        self.get_index_server_event = multiprocessing.Event()
        self.get_index_client_event = multiprocessing.Event()
        # index avail events
        self.index_avail_server_event = multiprocessing.Event()
        self.index_avail_client_event = multiprocessing.Event()
        # index pipes
        self.server_index_pipe = multiprocessing.Pipe()
        self.client_index_pipe = multiprocessing.Pipe()
        (self.server_index_pipe_local,
         self.server_index_pipe_remote) = self.server_index_pipe
        (self.client_index_pipe_local,
         self.client_index_pipe_remote) = self.client_index_pipe

        # queues for getting files from the ceph cluster
        # request file at server
        self.file_name_request_server_queue = multiprocessing.Queue()
        # send file contents, name and hash from server
        self.file_contents_name_hash_server_queue = multiprocessing.Queue()
        # request file name at client
        self.file_name_request_client_queue = multiprocessing.Queue()
        # receive file contents, name and hash on client
        self.file_contents_name_hash_client_queue = multiprocessing.Queue()

        self.shutdown_backend_manager_event = multiprocessing.Event()
        self.shutdown_client_event = multiprocessing.Event()

        print()
    def setUp(self):
        cl("debug")
        bl("debug")
        sl("debug")

        self.ceph_config = pathlib.Path.home() / ".ccphi/simuser.ceph.conf"
        self.ceph_pool = "simdata"
        self.pool_user = "******"

        self.queue_ceph_tasks = multiprocessing.Queue()
        self.event_shutdown_process = multiprocessing.Event()

        self.queue_index = multiprocessing.Queue()
        self.queue_namespace_index = multiprocessing.Queue()
        self.queue_object_tags = multiprocessing.Queue()
        self.queue_object_data = multiprocessing.Queue()
        self.queue_object_hash = multiprocessing.Queue()

        num_conns = 10
        print()

        self.conns = []

        for _ in range(num_conns):
            conn = multiprocessing.Process(
                target=cc.CephConnection,
                args=(self.ceph_config, self.ceph_pool, self.pool_user,
                      self.queue_ceph_tasks, self.event_shutdown_process,
                      self.queue_index, self.queue_namespace_index,
                      self.queue_object_tags, self.queue_object_data,
                      self.queue_object_hash))
            self.conns.append(conn)

        for conn in self.conns:
            conn.start()

        time.sleep(.1)
Exemple #5
0
 def tearDown(self):
     sl("quiet")
     bl("quiet")
     cl("quiet")
    def setUp(self):

        sl("debug")
        bl("debug")
        cl("debug")

        ceph_conf = pathlib.Path.home() / ".ccphi/simuser.ceph.conf"
        ceph_pool = "simdata"
        ceph_user = "******"

        host = "localhost"
        backend_port = 9009
        simulation_port = 9010

        # create all necessary queues, pipes and events for inter process
        # communication
        #
        # inter process communication for registering new files
        #
        # a queue for sending information about new files from the simulation to the
        # data copy process
        self.queue_sim_datacopy_new_file = multiprocessing.Queue()
        #
        # a queue for requesting the hash for a new file from the ceph cluster
        queue_datacopy_ceph_request_hash_for_new_file = multiprocessing.Queue()
        #
        # a queue for answering the request for a hash for a new file from the ceph
        # cluster. contains the name and the hash
        queue_datacopy_ceph_answer_hash_for_new_file = multiprocessing.Queue()
        #
        # a queue for sending the name and hash of a new file to the backend manager
        self.queue_datacopy_backend_new_file_and_hash = multiprocessing.Queue()

        # inter process communication for requesting files from the ceph cluster
        #
        # a queue for sending a request for a file to the ceph manager
        queue_backend_ceph_request_file = multiprocessing.Queue()
        #
        # a queue for answering the request for a file with the file name, contents
        # and hash
        queue_backend_ceph_answer_file_name_contents_hash = multiprocessing.Queue(
        )

        # inter process communication for requesting the index for the backend
        # manager from the data copy
        #
        # an event for requesting the index for the backend from the data copy
        event_datacopy_backend_get_index = multiprocessing.Event()
        #
        # a queue for returning the requested index
        queue_datacopy_backend_index_data = multiprocessing.Queue()
        # #
        # # an event for telling the backend that the index from the data copy is
        # # ready for pickup
        # self.event_datacopy_backend_index_ready = multiprocessing.Event()
        # #
        # # a pipe that connects the datacopy mgr and the backend class, for
        # # transferring the requested index
        # (
        #     self.pipe_this_end_datacopy_backend_index,
        #     self.pipe_that_end_datacopy_backend_index
        # ) = multiprocessing.Pipe()

        # inter process communication for requesting the index for the data manager
        # from the ceph cluster
        #
        # an event for requesting the index for the data copy from the ceph cluster
        event_datacopy_ceph_update_index = multiprocessing.Event()
        #
        # a queue for updating the local datacopy with these names and hashes
        queue_datacopy_ceph_filename_and_hash = multiprocessing.Queue()

        # inter process communication for shutting down processes
        #
        # an event for shutting down the backend manager
        self.event_backend_manager_shutdown = multiprocessing.Event()
        #
        # an event for shutting down the ceph manager
        self.event_ceph_shutdown = multiprocessing.Event()

        self.localdata_manager = multiprocessing.Process(
            target=LocalDataManager,
            args=(
                self.queue_sim_datacopy_new_file,
                queue_datacopy_ceph_request_hash_for_new_file,
                queue_datacopy_ceph_answer_hash_for_new_file,
                self.queue_datacopy_backend_new_file_and_hash,
                event_datacopy_backend_get_index,
                queue_datacopy_backend_index_data,
                # event_datacopy_backend_index_ready,
                # pipe_this_end_datacopy_backend_index,
                event_datacopy_ceph_update_index,
                queue_datacopy_ceph_filename_and_hash))
        simulation_manager = multiprocessing.Process(
            target=SimulationManager,
            args=(
                host,
                simulation_port,
                self.queue_sim_datacopy_new_file,
            ))
        backend_manager = multiprocessing.Process(
            target=BackendManager,
            args=(
                host,
                backend_port,
                self.queue_datacopy_backend_new_file_and_hash,
                event_datacopy_backend_get_index,
                queue_datacopy_backend_index_data,
                # event_datacopy_backend_index_ready,
                # pipe_that_end_datacopy_backend_index,
                queue_backend_ceph_request_file,
                queue_backend_ceph_answer_file_name_contents_hash,
                self.event_backend_manager_shutdown))
        self.ceph_manager = multiprocessing.Process(
            target=CephManager,
            args=(ceph_conf, ceph_pool, ceph_user, self.event_ceph_shutdown,
                  queue_datacopy_ceph_request_hash_for_new_file,
                  queue_datacopy_ceph_answer_hash_for_new_file,
                  queue_backend_ceph_request_file,
                  queue_backend_ceph_answer_file_name_contents_hash,
                  event_datacopy_ceph_update_index,
                  queue_datacopy_ceph_filename_and_hash))

        print()
        try:
            self.ceph_manager.start()
            time.sleep(.1)
            self.localdata_manager.start()
            time.sleep(.1)
            # self.client.start()
            # time.sleep(.1)
        except KeyboardInterrupt:
            # self.event_backend_manager_shutdown.set()
            self.event_ceph_shutdown.set()
            # self.shutdown_client_event.set()
            time.sleep(1)
            self.localdata_manager.terminate()
            # self.client.terminate()
            self.ceph_manager.terminate()
Exemple #7
0
 def setUp(self):
     cl("debug")
     bl("debug")
     sl("debug")
    def setUp(self):

        cl("debug")
        bl("debug")
        sl("debug")

        # queues for pushing information about new files over the socket
        # send from server
        self.new_file_server_queue = multiprocessing.Queue()
        # receive at client
        self.new_file_client_queue = multiprocessing.Queue()

        # server and client side of index exchange
        # index request events
        self.get_index_server_event = multiprocessing.Event()
        self.get_index_client_event = multiprocessing.Event()

        #
        # a queue for returning the requested index
        self.queue_datacopy_backend_index_data = multiprocessing.Queue()
        #
        # a queue for returning the requested index
        self.queue_client_index_data = multiprocessing.Queue()

        # #
        # # an event for telling the backend that the index from the data copy is
        # # ready for pickup
        # self.event_datacopy_backend_index_ready = multiprocessing.Event()
        # #
        # # a pipe that connects the datacopy mgr and the backend class, for
        # # transferring the requested index
        # (
        #     self.pipe_this_end_datacopy_backend_index,
        #     self.pipe_that_end_datacopy_backend_index
        # ) = multiprocessing.Pipe()



        # # index avail events
        # # self.index_avail_server_event = multiprocessing.Event()
        # self.index_avail_client_event = multiprocessing.Event()
        # # index pipes
        # # self.server_index_pipe = multiprocessing.Pipe()
        # self.client_index_pipe = multiprocessing.Pipe()
        # # (
        # #     self.server_index_pipe_local,
        # #     self.server_index_pipe_remote
        # # ) = self.server_index_pipe
        # (
        #     self.client_index_pipe_local,
        #     self.client_index_pipe_remote
        # ) = self.client_index_pipe

        # queues for getting files from the ceph cluster
        # request file at server
        self.file_name_request_server_queue = multiprocessing.Queue()
        # send file contents, name and hash from server
        self.file_contents_name_hash_server_queue = multiprocessing.Queue()
        # request file name at client
        self.file_name_request_client_queue = multiprocessing.Queue()
        # receive file contents, name and hash on client
        self.file_contents_name_hash_client_queue = multiprocessing.Queue()

        self.shutdown_backend_manager_event = multiprocessing.Event()
        self.shutdown_client_event = multiprocessing.Event()

        print()
        self.server = multiprocessing.Process(
            target=backend_manager.BackendManager,
            args=(
                "localhost", 9001,
                self.new_file_server_queue,
                self.get_index_server_event,
                self.queue_datacopy_backend_index_data,
                # self.index_avail_server_event,
                # self.server_index_pipe_remote,
                self.file_name_request_server_queue,
                self.file_contents_name_hash_server_queue,
                self.shutdown_backend_manager_event,
            )
        )
        self.server.start()
        time.sleep(.1)
        self.client = multiprocessing.Process(
            target=client.Client,
            args=(
                "localhost", 9001,
                self.new_file_client_queue,
                self.get_index_client_event,
                self.queue_client_index_data,
                # self.index_avail_client_event,
                # self.client_index_pipe_remote,
                self.file_name_request_client_queue,
                self.file_contents_name_hash_client_queue,
                self.shutdown_client_event,
            )
        )
        self.client.start()
        time.sleep(.1)