def test_single_file(self): """Simulate single file creation. Simulate incoming files, one created after the other, and check if received events are correct. """ create_dir(self.target_file_base) self._start_eventdetector() for i in range(self.start, self.stop): # generate an event filename = "{}.cbf".format(i) target_file = os.path.join(self.target_file_base, "{}".format(filename)) self.log.debug("copy %s", target_file) copyfile(self.source_file, target_file) time.sleep(self.time_all_events_detected) # get all detected events event_list = self.eventdetector.get_new_event() expected_result_dict = { u'filename': filename, u'source_path': self.target_base_path, u'relative_path': self.target_relative_path } # check if the generated event was the only detected one try: self.assertEqual(len(event_list), 1) self.assertDictEqual(event_list[0], expected_result_dict) except AssertionError: self.log.debug("event_list %s", event_list) raise
def setUp(self): super().setUp() # attributes inherited from parent class: # self.config # self.con_ip # self.ext_ip ipc_dir = self.config["ipc_dir"] create_dir(directory=ipc_dir, chmod=0o777) self.context = zmq.Context() self.module_name = "zmq_events" self.config_module = { "context": self.context, "number_of_streams": 1, "eventdetector_port": 50003 } # needed for later reuse self.conf_structure = { "network": { "context": self.context, "ipc_dir": ipc_dir, "main_pid": self.config["main_pid"], "ext_ip": self.ext_ip, "con_ip": self.con_ip, }, "eventdetector": { "type": self.module_name, self.module_name: None } } self.ed_config = copy.deepcopy(self.conf_structure) self.ed_config["eventdetector"][self.module_name] = ( self.config_module) self.start = 100 self.stop = 101 target_base_dir = os.path.join(self.base_dir, "data", "source") target_relative_dir = os.path.join("local", "raw") self.target_dir = os.path.join(target_base_dir, target_relative_dir) self.ipc_addresses = zmq_events.get_ipc_addresses(self.ed_config) self.tcp_addresses = zmq_events.get_tcp_addresses(self.ed_config) self.endpoints = zmq_events.get_endpoints( ipc_addresses=self.ipc_addresses, tcp_addresses=self.tcp_addresses) self.eventdetector = None self.event_socket = None
def test_memory_usage(self): """Testing the memory usage of the event detector. This should not be tested automatically but only if really needed. """ import resource import gc # don't care about stuff that would be garbage collected properly gc.collect() # from guppy import hpy # self._init_logging(loglevel="info") create_dir(self.target_file_base) self._start_eventdetector() self.start = 100 self.stop = 30000 steps = 10 memory_usage_old = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss self.log.debug("Memory usage at start: %s (kb)", memory_usage_old) # hp = hpy() # hp.setrelheap() step_loop = (self.stop - self.start) / steps self.log.debug("Used steps: %s", steps) for step in range(steps): start = int(self.start + step * step_loop) stop = int(start + step_loop) # print ("start=", start, "stop=", stop) for i in range(start, stop): target_file = os.path.join(self.target_file_base, "{}.cbf".format(i)) copyfile(self.source_file, target_file) if i % 100 == 0: self.log.debug("copy index %s", i) self.eventdetector.get_new_event() # time.sleep(0.5) memory_usage_new = (resource.getrusage( resource.RUSAGE_SELF).ru_maxrss) self.log.debug("Memory usage in iteration %s: %$ (kb)", step, memory_usage_new) if memory_usage_new > memory_usage_old: memory_usage_old = memory_usage_new
def test_multiple_files(self): """Simulate multiple file creation. Simulate incoming files, all created at once, and check if received events are correct. """ create_dir(self.target_file_base) self._start_eventdetector() expected_result = [] # generate multiple events for i in range(self.start, self.stop): filename = "{}.cbf".format(i) target_file = os.path.join(self.target_file_base, "{}".format(filename)) self.log.debug("copy %s", target_file) copyfile(self.source_file, target_file) expected_result_dict = { u'filename': filename, u'source_path': self.target_base_path, u'relative_path': self.target_relative_path } expected_result.append(expected_result_dict) time.sleep(self.config_module["action_time"] + self.config_module["time_till_closed"]) # get all detected events event_list = self.eventdetector.get_new_event() # check that the generated events (and only these) were detected try: self.assertEqual(len(event_list), self.stop - self.start) for res_dict in expected_result: self.assertIn(res_dict, event_list) except AssertionError: # self.log.debug("event_list %s", event_list) raise
def setUp(self): super().setUp() # attributes inherited from parent class: # self.config # self.con_ip # self.ext_ip # self.base_dir self.context = zmq.Context() ipc_dir = self.config["ipc_dir"] create_dir(directory=ipc_dir, chmod=0o777) self.local_target = os.path.join(self.base_dir, "data", "target") self.chunksize = 10485760 # = 1024*1024*10 = 10 MiB self.datadispatcher_config = { "datafetcher": { "type": "file_fetcher", "local_target": self.local_target, "store_data": False, "remove_data": False, "use_cleaner": False, "chunksize": self.chunksize, "file_fetcher": { "fix_subdirs": ["commissioning", "current", "local"], "store_data": False, "remove_data": False, } }, "network": { "main_pid": self.config["main_pid"], "endpoints": self.config["endpoints"], }, "general": {} } self.receiving_ports = ["6005", "6006"]
def setUp(self): super().setUp() # attributes inherited from parent class: # self.config # self.con_ip # self.ext_ip ipc_dir = self.config["ipc_dir"] create_dir(directory=ipc_dir, chmod=0o777) self.context = zmq.Context() self.lock = threading.Lock() self.df_base_config = { "config": None, "log_queue": self.log_queue, "fetcher_id": "0", "context": self.context, "lock": self.lock, "stop_request": mock.MagicMock(), "check_dep": True }
def test_taskprovider(self): """Simulate incoming data and check if received events are correct. """ stop_request = Event() endpoints = self.config["endpoints"] kwargs = dict(config=self.taskprovider_config, endpoints=endpoints, log_queue=self.log_queue, log_level="debug", stop_request=stop_request) taskprovider_pr = Process(target=run_taskprovider, kwargs=kwargs) taskprovider_pr.start() request_responder_pr = RequestResponder(self.config, self.log_queue) request_responder_pr.start() router_socket = self.start_socket(name="router_socket", sock_type=zmq.PULL, sock_con="connect", endpoint=endpoints.router_con) control_socket = self.start_socket( name="control_socket", sock_type=zmq.PUB, sock_con="bind", # it is the sub endpoint because originally this is handled with # a zmq thread device endpoint=endpoints.control_sub_bind) source_file = os.path.join(self.base_dir, "test", "test_files", "test_file.cbf") target_file_base = os.path.join(self.base_dir, "data", "source", "local", "raw") create_dir(target_file_base) # give it time to start up time.sleep(0.5) try: for i in range(self.start, self.stop): target_file = os.path.join(target_file_base, "{}.cbf".format(i)) self.log.debug("copy to %s", target_file) copyfile(source_file, target_file) workload = router_socket.recv_multipart() self.log.info("next workload %s", workload) except KeyboardInterrupt: pass finally: self.log.info("send exit signal") control_socket.send_multipart([b"control", b"EXIT"]) request_responder_pr.stop() self.stop_socket(name="router_socket", socket=router_socket) self.stop_socket(name="control_socket", socket=control_socket) for number in range(self.start, self.stop): target_file = os.path.join(target_file_base, "{}.cbf".format(number)) self.log.debug("remove %s", target_file) os.remove(target_file)
def setUp(self): super().setUp() # see https://docs.python.org/2/library/multiprocessing.html#windows freeze_support() # attributes inherited from parent class: # self.config # self.con_ip # self.ext_ip # self.base_dir self.context = zmq.Context() ipc_dir = self.config["ipc_dir"] create_dir(directory=ipc_dir, chmod=0o777) monitored_dir = os.path.join(self.base_dir, "data", "source") if sys.version_info[0] < 3: used_eventdetector = "inotifyx_events" else: used_eventdetector = "inotify_events" self.taskprovider_config = { "eventdetector": { "type": used_eventdetector, "inotify_events": { "monitored_dir": monitored_dir, "fix_subdirs": ["commissioning", "current", "local"], "monitored_events": { "IN_CLOSE_WRITE": [".tif", ".cbf"], "IN_MOVED_TO": [".log"] }, "event_timeout": 0.1, "history_size": 0, "use_cleanup": False, "time_till_closed": 5, "action_time": 120 }, "inotifyx_events": { "monitored_dir": monitored_dir, "fix_subdirs": ["commissioning", "current", "local"], "monitored_events": { "IN_CLOSE_WRITE": [".tif", ".cbf"], "IN_MOVED_TO": [".log"] }, "event_timeout": 0.1, "history_size": 0, "use_cleanup": False, "time_till_closed": 5, "action_time": 120 } }, "general": { "config_file": pathlib.Path("testnotconfig.yaml") } } self.start = 100 self.stop = 105
def setUp(self): super().setUp() # see https://docs.python.org/2/library/multiprocessing.html#windows freeze_support() # attributes inherited from parent class: # self.config # self.con_ip # self.ext_ip # self.base_dir # Register context self.context = zmq.Context() self.com_socket = None self.fixed_recv_socket = None self.receiving_sockets = None ipc_dir = self.config["ipc_dir"] create_dir(directory=ipc_dir, chmod=0o777) self.local_target = os.path.join(self.base_dir, "data", "target") self.chunksize = 10485760 # = 1024*1024*10 = 10 MiB self.config["fixed_recv"] = 50100 self.config["receiving_ports"] = [50102, 50103] fix_subdirs = [ "commissioning/raw", "commissioning/scratch_bl", "current/raw", "current/scratch_bl", "local" ] ports = self.config["ports"] if sys.version_info[0] < 3: used_eventdetector = "inotifyx_events" else: used_eventdetector = "inotify_events" source_dir = os.path.join(self.base_dir, "data", "source") self.datamanager_config = { "general": { "com_port": ports["com"], "control_pub_port": ports["control_pub"], "control_sub_port": ports["control_sub"], "request_fw_port": ports["request_fw"], "request_port": 50001, "ext_ip": self.ext_ip, "ldapuri": "it-ldap-slave.desy.de:1389", "log_name": "datamanager.log", "log_path": os.path.join(self.base_dir, "logs"), "log_size": 10485760, # "onscreen": "debug", "onscreen": False, "procname": "hidra", "username": pwd.getpwuid(os.geteuid()).pw_name, "verbose": False, "whitelist": None, "use_statserver": False, "config_file": pathlib.Path("something_not_config.yaml") }, "eventdetector": { "type": used_eventdetector, "eventdetector_port": 50003, "ext_data_port": 50101, "inotify_events": { "monitored_dir": source_dir, "fix_subdirs": fix_subdirs, "create_fix_subdirs": False, "monitored_events": { "IN_CLOSE_WRITE": [""] }, "event_timeout": 0.1, "use_cleanup": False, "history_size": 0, "action_time": 10, "time_till_closed": 2, }, "inotifyx_events": { "monitored_dir": source_dir, "fix_subdirs": fix_subdirs, "create_fix_subdirs": False, "monitored_events": { "IN_CLOSE_WRITE": [""] }, "event_timeout": 0.1, "use_cleanup": False, "history_size": 0, "action_time": 10, "time_till_closed": 2, }, }, "datafetcher": { "type": "file_fetcher", "chunksize": self.chunksize, "data_stream_targets": [[self.con_ip, self.config["fixed_recv"]]], "local_target": os.path.join(self.base_dir, "data", "target"), "use_data_stream": True, "number_of_streams": 1, "store_data": False, "remove_data": False, "cleaner_port": ports["cleaner"], "cleaner_trigger_port": ports["cleaner_trigger"], "confirmation_port": ports["confirmation"], "confirmation_resp_port": 50012, "datafetcher_port": 50010, "router_port": ports["router"], "status_check_port": 50050, "status_check_resp_port": 50011, "file_fetcher": { "store_data": False, "remove_data": False, "fix_subdirs": fix_subdirs, } }, } self.datamanager_config["general"]["log_file"] = (os.path.join( self.datamanager_config["general"]["log_path"], self.datamanager_config["general"]["log_name"])) self.start = 100 self.stop = 105 self.appid = str(self.config["main_pid"]).encode("utf-8")