def load_config(): """Parses the command line arguments and loads config files. """ base_config_file = utils.determine_config_file(fname_base="base_sender") arguments = argument_parsing() if arguments.config_file is None: arguments.config_file = utils.determine_config_file( fname_base="datamanager").as_posix() else: arguments.config_file = Path(arguments.config_file).as_posix() # check if config_file exist utils.check_existence(base_config_file) utils.check_existence(arguments.config_file) # ------------------------------------------------------------------------ # Get arguments from config file and command line # ------------------------------------------------------------------------ config = utils.load_config(base_config_file) config_detailed = utils.load_config(arguments.config_file) # if config and yaml is mixed mapping has to take place before merging them config_type = "sender" config = utils.map_conf_format(config, config_type) config_detailed = utils.map_conf_format(config_detailed, config_type) arguments_dict = utils.map_conf_format(arguments, config_type, is_namespace=True) utils.update_dict(config_detailed, config) utils.update_dict(arguments_dict, config) return config
def main(): """Parses the settings from the configure file and displays them. """ args = get_arguments() config_file = args.config_file params = utils.load_config(config_file) # in case the config file was in the old config format # (for backwards compatibility to 4.0.x) config = utils.map_conf_format(params, "sender") config_ed = config["eventdetector"][config["eventdetector"]["type"]] print("Configured settings:") print("Monitored directory: {}".format(config_ed["monitored_dir"])) print("Watched subdirectories are: {}".format(config_ed["fix_subdirs"])) msg = "Data is written to: {}" if config["datafetcher"]["store_data"]: print(msg.format(config["datafetcher"]["local_target"])) else: print(msg.format("Data is not stored locally")) msg = "Data is sent to: {}" if config["datafetcher"]["use_data_stream"]: print(msg.format(config["datafetcher"]["data_stream_targets"])) else: print(msg.format("Data is not sent as priority stream anywhere")) print("Remove data from the detector: {}".format( config["datafetcher"]["remove_data"])) print("Whitelist: {}".format(config["general"]["whitelist"]))
def main(): """Connect to the receiver and show status. """ parser = get_arguments() args = parser.parse_args() host = args.host default_config_file = "/opt/hidra/conf/datamanager.yaml" config_file = args.config_file or default_config_file if host is not None and config_file is not None: parser.error("Either use --host or --config_file but not both.") if host is None: params = utils.load_config(config_file) # in case the config file was in the old config format # (for backwards compatibility to 4.0.x) config = utils.map_conf_format(params, "sender") data_stream_targets = params["datafetcher"]["data_stream_targets"] hosts = [target[0] for target in data_stream_targets] # TODO make generic host = hosts[0] control = ReceiverControl(host) print("Checking for service hidra receiver on", host, ": ", end="") try: status = control.get_status() if status == ["OK"]: print(CGREEN + "running." + CEND) else: print(CYELLOW + "running but in error state:" + CEND) print(status) except CommunicationFailed as excp: print(CRED + "not reachable." + CEND) print(traceback.format_exception_only(type(excp), excp)[0], end="") except Exception: print(CRED + "not reachable. Unknown Error.\n" + CEND) print(traceback.format_exc()) sys.exit(1)
def test_map_conf_format_minimal(self): flat_config = { 'config_file': '/my_dir/hidra/conf/datamanager_test.yaml', 'onscreen': 'debug', 'verbose': True } expected_result = { u'general': { u'config_file': '/my_dir/hidra/conf/datamanager_test.yaml', u'onscreen': 'debug', u'verbose': True } } res = utils.map_conf_format(flat_config, config_type="sender", is_namespace=False) self.assertDictEqual(res, expected_result)
def test_map_config_format(self): flat_config = { 'datafetcher': { 'chunksize': 10485760, 'cleaner_port': 50051, 'cleaner_trigger_port': 50052, 'confirmation_port': 50053, 'confirmation_resp_port': 50012, 'data_fetcher_port': 50010, 'router_port': 50004, 'status_check_port': 50050, 'status_check_resp_port': 50011 }, 'eventdetector': { 'dirs_not_to_create': [], 'event_det_port': 50003, 'ext_data_port': 50101 }, 'general': { 'com_port': 50000, 'control_pub_port': 50005, 'control_sub_port': 50006, 'ldapuri': 'it-ldap-slave.desy.de:1389', 'log_size': 10485760, 'request_fw_port': 50002, 'request_port': 50001 } } expected_result = { 'datafetcher': { 'chunksize': 10485760, 'cleaner_port': 50051, 'cleaner_trigger_port': 50052, 'confirmation_port': 50053, 'confirmation_resp_port': 50012, 'data_fetcher_port': 50010, 'router_port': 50004, 'status_check_port': 50050, 'status_check_resp_port': 50011 }, 'eventdetector': { 'dirs_not_to_create': [], 'event_det_port': 50003, 'ext_data_port': 50101 }, 'general': { 'com_port': 50000, 'control_pub_port': 50005, 'control_sub_port': 50006, 'ldapuri': 'it-ldap-slave.desy.de:1389', 'log_size': 10485760, 'request_fw_port': 50002, 'request_port': 50001 } } res = utils.map_conf_format(flat_config, config_type="sender", is_namespace=False) self.assertDictEqual(res, expected_result)
def test_map_conf_format_full(self): flat_config = { 'datafetcher': { 'data_stream_targets': [['my_pc', 50100]], 'file_fetcher': { 'fix_subdirs': self.fix_subdirs, }, 'http_fetcher': { 'fix_subdirs': self.fix_subdirs, }, 'local_target': '/my_dir/hidra/data/target', 'number_of_streams': 1, 'remove_data': True, 'store_data': False, 'type': 'file_fetcher', 'use_data_stream': False }, 'eventdetector': { 'http_events': { 'det_api_version': '1.6.0', 'det_ip': 'asap3-mon', 'fix_subdirs': self.fix_subdirs, 'history_size': 0 }, 'inotifyx_events': { 'action_time': 150, 'create_fix_subdirs': False, 'fix_subdirs': self.fix_subdirs, 'history_size': 0, 'monitored_dir': '/my_dir/hidra/data/source', 'monitored_events': { 'IN_MOVED_TO': ['.metadata'] }, 'time_till_closed': 2, 'use_cleanup': False }, 'type': 'watchdog_events', 'watchdog_events': { 'action_time': 5, 'create_fix_subdirs': False, 'fix_subdirs': self.fix_subdirs, 'monitored_dir': '/my_dir/hidra/data/source', 'monitored_events': { 'IN_MOVED_TO': ['.metadata'] }, 'time_till_closed': 1 } }, 'general': { 'ext_ip': '0.0.0.0', 'log_name': 'datamanager.log', 'log_path': '/my_dir/hidra/logs', 'procname': 'hidra', 'username': '******', 'whitelist': None } } expected_result = { 'datafetcher': { 'data_stream_targets': [['my_pc', 50100]], 'file_fetcher': { 'fix_subdirs': self.fix_subdirs, }, 'http_fetcher': { 'fix_subdirs': self.fix_subdirs, }, 'local_target': '/my_dir/hidra/data/target', 'number_of_streams': 1, 'remove_data': True, 'store_data': False, 'type': 'file_fetcher', 'use_data_stream': False }, 'eventdetector': { 'http_events': { 'det_api_version': '1.6.0', 'det_ip': 'asap3-mon', 'fix_subdirs': self.fix_subdirs, 'history_size': 0 }, 'inotifyx_events': { 'action_time': 150, 'create_fix_subdirs': False, 'fix_subdirs': self.fix_subdirs, 'history_size': 0, 'monitored_dir': '/my_dir/hidra/data/source', 'monitored_events': { 'IN_MOVED_TO': ['.metadata'] }, 'time_till_closed': 2, 'use_cleanup': False }, 'type': 'watchdog_events', 'watchdog_events': { 'action_time': 5, 'create_fix_subdirs': False, 'fix_subdirs': self.fix_subdirs, 'monitored_dir': '/my_dir/hidra/data/source', 'monitored_events': { 'IN_MOVED_TO': ['.metadata'] }, 'time_till_closed': 1 } }, 'general': { 'ext_ip': '0.0.0.0', 'log_name': 'datamanager.log', 'log_path': '/my_dir/hidra/logs', 'procname': 'hidra', 'username': '******', 'whitelist': None } } res = utils.map_conf_format(flat_config, config_type="sender", is_namespace=False) self.assertDictEqual(res, expected_result)
def argument_parsing(): """Parses and checks the command line arguments used. """ base_config_file = utils.determine_config_file(fname_base="base_receiver") # ------------------------------------------------------------------------ # Get command line arguments # ------------------------------------------------------------------------ parser = argparse.ArgumentParser() parser.add_argument("--config_file", type=str, help="Location of the configuration file") parser.add_argument("--log_path", type=str, help="Path where logfile will be created") parser.add_argument("--log_name", type=str, help="Filename used for logging") parser.add_argument("--log_size", type=int, help="File size before rollover in B (linux only)") parser.add_argument("--verbose", help="More verbose output", action="store_true") parser.add_argument("--onscreen", type=str, help="Display logging on screen " "(options are CRITICAL, ERROR, WARNING, " "INFO, DEBUG)", default=False) parser.add_argument("--procname", type=str, help="Name with which the service should be running") parser.add_argument("--whitelist", type=str, help="List of hosts allowed to connect") parser.add_argument("--target_dir", type=str, help="Where incoming data will be stored to") parser.add_argument("--dirs_not_to_create", type=str, help="Subdirectories which should not be created when " "data is stored") parser.add_argument("--data_stream_ip", type=str, help="Ip of dataStream-socket to pull new files from") parser.add_argument("--data_stream_port", type=str, help="Port number of dataStream-socket to pull new " "files from") arguments = parser.parse_args() arguments.config_file = ( arguments.config_file or utils.determine_config_file(fname_base="datareceiver")) # check if config_file exist utils.check_existence(arguments.config_file) # ------------------------------------------------------------------------ # Get arguments from config file # ------------------------------------------------------------------------ config = utils.load_config(base_config_file) config_detailed = utils.load_config(arguments.config_file) # if config and yaml is mixed mapping has to take place before merging them config_type = "receiver" config = utils.map_conf_format(config, config_type) config_detailed = utils.map_conf_format(config_detailed, config_type) arguments_dict = utils.map_conf_format(arguments, config_type, is_namespace=True) utils.update_dict(config_detailed, config) utils.update_dict(arguments_dict, config) # ------------------------------------------------------------------------ # Check given arguments # ------------------------------------------------------------------------ required_params = { "general": [ "log_path", "log_name", "procname", "procname", "ldapuri", "dirs_not_to_create", "whitelist" ], "datareceiver": [ "target_dir", "data_stream_ip", "data_stream_port", ] } # Check format of config check_passed, _ = utils.check_config(required_params, config, logging) if not check_passed: logging.error("Configuration check failed") raise utils.WrongConfiguration # check target directory for existence utils.check_existence(config["datareceiver"]["target_dir"]) # check if logfile is writable config["general"]["log_file"] = os.path.join(config["general"]["log_path"], config["general"]["log_name"]) return config