def main(): parser = argparse.ArgumentParser(description='consumes a mq ') parser.add_argument('-c', '--config', required=True, help="conductor config file ", default="conductor.yml") parser.add_argument('-l', '--logfile', default=None, help="Logfile to write to, default is stdout") parser.add_argument('-v', '--verbose', default=4, action="count", help="Increase the verbosity of logging output") args = parser.parse_args() config = init(args.config) if args.logfile: logger.init(name='nga_runner', log_file=args.logfile, rotate_logs=True) else: logger.init(name='nga_runner') logger.set_log_level(args.verbose) logger.info(f'startup (v:{version})') api_requests.set_token(config['key']) global mq mq.connect(uri=config['mq_uri'], prefetch_count=1) try: mq.consume(route='default', callback=do_work) except KeyboardInterrupt: mq.channel.stop_consuming() # Wait for all to complete # logger.debug('waiting for threads') mq.channel.close()
def init(config_file) -> {}: config = config_utils.readin_config_file(config_file) logger.info("init from config ") # set incoming and proxy keys api_requests.set_token(config.get('proxy_key', None)) global master_url, nels_url, instances, master_api, tmp_dir, sleep_time master_url = config['master_url'].rstrip("/") nels_url = config['nels_url'].rstrip("/") instances = {} master_api = api_requests.ApiRequests(master_url, config['key']) global nels_storage_client_key, nels_storage_client_secret, nels_storage_url, sleep_time nels_storage_client_key = config['nels_storage_client_key'] nels_storage_client_secret = config['nels_storage_client_secret'] nels_storage_url = config['nels_storage_url'].rstrip("/") tmp_dir = config.get('tmp_dir', tmp_dir) sleep_time = config.get('sleep_time', sleep_time) tmp_instances = config['instances'] for iid in tmp_instances: if 'active' not in tmp_instances[ iid] or not tmp_instances[iid]['active']: continue instances[iid] = tmp_instances[iid] instance = tmp_instances[iid] instance['api'] = api_requests.ApiRequests( instance['nga_url'].rstrip("/"), instance['nga_key']) instances[instance['name']] = instance return config
def init(config_file: dict) -> None: # Initialises setup from config file and galaxy config file and # also initialites global variables (galaxy_url, master_url, nels_url, instance_id, tos_grace_period), # sets (proxy_keys, instances, no_proxy) config = config_utils.readin_config_file(config_file) galaxy_config = config_utils.readin_config_file(config['galaxy_config']) galaxy_init(galaxy_config) logger.info("init from config ") # set incoming and proxy keys tornado.set_token(config.get('key', None)) api_requests.set_token(config.get('proxy_key', None)) global galaxy_url, master_url, nels_url, instance_id galaxy_url = config['galaxy_url'].rstrip("/") master_url = config['master_url'].rstrip("/") instance_id = config['id'].rstrip("/") nels_url = config['nels_url'].rstrip("/") if 'tos_server' in config and config['tos_server']: logger.info("Running with the tos-server") db.create_tos_table() global tos_grace_period tos_grace_period = config.get('grace_period', 14) if 'master' in config and config['master']: logger.info("Running with the master API") db.create_export_tracking_table() db.create_export_tracking_logs_table() db.create_import_tracking_table() db.create_import_tracking_logs_table() mq.connect(uri=config['mq_uri']) global proxy_keys, instances, no_proxy proxy_keys = {} instances = {} no_proxy = True tmp_instances = config['instances'] for iid in tmp_instances: if 'active' not in tmp_instances[ iid] or not tmp_instances[iid]['active']: continue instances[iid] = tmp_instances[iid] instance = tmp_instances[iid] instances[instance['name']] = instance instances[instance['name']]['api'] = api_requests.ApiRequests( instance['nga_url'].rstrip("/"), instance['nga_key']) if instance['proxy_key'] in proxy_keys.keys(): logger.warn( f"Proxy key for {instance['name']} is also used for {proxy_keys[instance['proxy_key']]}" ) proxy_keys[instance['proxy_key']] = instance['name'] # global mq return config
def test_encrypt(): requests.set_token(token) r = requests.encrypt(base_url, 13) assert True
def test_get_user_history_exports(): requests.set_token(token) r = requests.get_user_history_exports(base_url, 3) assert True
def test_get_history_export(): requests.set_token(token) r = requests.get_history_export(base_url, 13) assert True
def test_get_info(): requests.set_token(token) r = requests.get_info(base_url) assert True
def test_get_users(): requests.set_token(token) r = requests.get_users(base_url) assert True
def test_request_get(): requests.set_token(token) response, return_token = requests.request_get(base_url) assert return_token == token
def test_request_delete(): requests.set_token(token) response, return_token = requests.request_delete(base_url, {'user': '******'}) assert return_token == token
def test_get_all_exports(): requests.set_token(token) r = requests.get_instance_exports(base_url, user) assert True
def test_get_user_exports(): requests.set_token(token) r = requests.get_user_exports(base_url, user) assert True
def test_update_export(): requests.set_token(token) r = requests.update_export(base_url, 3, data={"state": "new"}) assert True
def test_set_token(): requests.set_token(token)
def test_add_bulk_history_export(): requests.set_token(token) data = {"nels_id": 3, "selectedFiles": ["goes_here"]} r = requests.add_bulk_export(base_url, instance, user, data) assert True