def on_startup(config_path: str) -> Dict: from sm.engine import image_storage # pylint: disable=import-outside-toplevel,cyclic-import SMConfig.set_path(config_path) sm_config = SMConfig.get_conf() init_loggers(sm_config['logs']) if 'aws' in sm_config: populate_aws_env_vars(sm_config['aws']) image_storage.init(sm_config) return sm_config
def main(): parser = argparse.ArgumentParser( description='Merge mol_dbs and adducts into config') parser.add_argument('--config', default='conf/config.json', help='SM config path') args = parser.parse_args() SMConfig.set_path(args.config) init_loggers(SMConfig.get_conf()['logs']) conf = SMConfig.get_conf() with ConnectionPool(conf['db']): db = DB() populate_ion_formula(db) populate_ions(db) populate_ion_id(db)
def sm_config(): SMConfig.set_path(Path(proj_root()) / TEST_CONFIG_PATH) SMConfig.get_conf( update=True) # Force reload in case previous tests modified it worker_id = os.environ.get('PYTEST_XDIST_WORKER', 'gw0') test_id = f'sm_test_{worker_id}' # Update the internal cached copy of the config, so independent calls to SMConfig.get_conf() # also get the updated config SMConfig._config_dict['db']['database'] = test_id SMConfig._config_dict['elasticsearch']['index'] = test_id SMConfig._config_dict['rabbitmq']['prefix'] = f'test_{worker_id}__' for path in SMConfig._config_dict['lithops']['sm_storage'].values(): # prefix keys with test ID so they can be cleaned up later path[1] = f'{test_id}/{path[1]}' # __LITHOPS_SESSION_ID determines the prefix to use for anonymous cloudobjects os.environ['__LITHOPS_SESSION_ID'] = f'{test_id}/cloudobjects' return SMConfig.get_conf()
def main(): parser = argparse.ArgumentParser( description='Migrate MolDB data from service to database') parser.add_argument('--config', default='conf/config.json', help='SM config path') args = parser.parse_args() SMConfig.set_path(args.config) config = SMConfig.get_conf() init_loggers(config['logs']) moldb_db_config = { 'host': 'localhost', 'database': 'mol_db', 'user': '******' } dump_moldb_tables(moldb_db_config) import_moldb_tables(config['db']) os.remove('/tmp/molecule.csv') os.remove('/tmp/molecular_db.csv')
create_subparser = subparsers.add_parser('create') create_subparser.add_argument('--drop', action='store_true', help='Delete existing index if exists') swap_subparser = subparsers.add_parser( 'swap', help='Swap the active and inactive indexes') drop_subparser = subparsers.add_parser( 'drop', help='Drop the index. Can only be used on the inactive index') status_subparser = subparsers.add_parser('status', help='Show current index mapping') args = parser.parse_args() SMConfig.set_path(args.config_path) init_loggers(SMConfig.get_conf()['logs']) es_config = SMConfig.get_conf()['elasticsearch'] es_man = ESIndexManager(es_config) alias = es_config['index'] active_index = es_man.internal_index_name(alias) inactive_index = es_man.another_index_name(active_index) index = inactive_index if args.inactive else active_index if args.action == 'create': if args.drop: es_man.delete_index(index) es_man.create_index(index) if not args.inactive: es_man.remap_alias(index, alias)
description='Script for updating metadata in DB on per dataset basis') parser.add_argument('--md-type', dest='md_type', type=str, help='Target metadata type') parser.add_argument( '--ds-name', dest='ds_name', type=str, help="DS name prefix mask ({} for all datasets)".format(ALL_DS_MASK), ) parser.add_argument( '--config', dest='sm_config_path', default='conf/config.json', type=str, help='SM config path', ) args = parser.parse_args() SMConfig.set_path(args.sm_config_path) sm_config = SMConfig.get_conf() init_loggers() logger = logging.getLogger('engine') if args.ds_name and args.md_type: set_metadata_type(DB(), args.md_type, args.ds_name) else: parser.print_help()