def test_helper_python(hm, make_db, db_src, make_mongodb, capsys): testfile = Path(__file__) if db_src == "fs": repo = Path(make_db) elif db_src == "mongo": if make_mongodb is False: pytest.skip("Mongoclient failed to start") else: repo = Path(make_mongodb) os.chdir(repo) main(args=hm[0]) out, err = capsys.readouterr() assert out == hm[1] expecteddir = testfile.parent / "outputs" / hm[0][1] if expecteddir.is_dir(): if db_src == "fs": test_dir = repo / "db" assert_outputs(test_dir, expecteddir) elif db_src == "mongo": from regolith.database import connect from regolith.runcontrol import DEFAULT_RC, load_rcfile os.chdir(repo) rc = copy.copy(DEFAULT_RC) rc._update(load_rcfile("regolithrc.json")) with connect(rc) as client: mongo_database = client[rc.databases[0]['name']] assert_mongo_vs_yaml_outputs(expecteddir, mongo_database)
def main(args=None): rc = DEFAULT_RC parser = create_parser() ns = parser.parse_args(args) if ns.cmd in NEED_RC: if os.path.exists(rc.user_config): rc._update(load_rcfile(rc.user_config)) rc._update(load_rcfile("regolithrc.json")) rc._update(ns.__dict__) if "schemas" in rc._dict: user_schema = copy.deepcopy(rc.schemas) default_schema = copy.deepcopy(SCHEMAS) rc.schemas = update_schemas(default_schema, user_schema) else: rc.schemas = SCHEMAS if ns.cmd in NEED_RC: filter_databases(rc) if rc.cmd in DISCONNECTED_COMMANDS: DISCONNECTED_COMMANDS[rc.cmd](rc) else: dbs = None if rc.cmd == 'build': dbs = commands.build_db_check(rc) with connect(rc, dbs=dbs) as rc.client: CONNECTED_COMMANDS[rc.cmd](rc)
def test_fs_to_mongo_python(make_fs_to_mongo_migration_db): if BILLINGE_TEST: repo = str( Path(__file__).parent.parent.parent.joinpath( 'rg-db-group', 'local')) else: repo = make_fs_to_mongo_migration_db os.chdir(repo) try: main(['fs-to-mongo']) except Exception as e: print(e) assert True == False else: assert True == True replace_rc_dbs(repo) os.chdir(repo) rc = copy.copy(DEFAULT_RC) rc._update(load_rcfile("regolithrc.json")) with connect(rc) as rc.client: fs_db = rc.client[FS_DB_NAME] mongo_db = rc.client[ALTERNATE_REGOLITH_MONGODB_NAME] for coll in fs_db.keys(): original_fs_collection = fs_db[coll] migrated_mongo_collection = load_mongo_col(mongo_db[coll]) for k, v in migrated_mongo_collection.items(): migrated_mongo_collection[k] = convert_doc_iso_to_date(v) for k, v in original_fs_collection.items(): original_fs_collection[k] = convert_doc_iso_to_date(v) assert original_fs_collection == migrated_mongo_collection
def test_connect_db(make_db): repo = make_db os.chdir(repo) rc = copy.copy(DEFAULT_RC) rc._update(load_rcfile("regolithrc.json")) filter_databases(rc) with connect(rc) as rc.client: expected_dbs = rc.client.dbs expected_chdb = rc.client.chained_db chained_db, dbs = connect_db(rc) assert chained_db == expected_chdb assert dbs == expected_dbs
def main(args=None): rc = DEFAULT_RC rc._update(load_rcfile('regolithrc.json')) parser = create_parser() ns = parser.parse_args(args) rc._update(ns.__dict__) filter_databases(rc) if rc.cmd in DISCONNECTED_COMMANDS: DISCONNECTED_COMMANDS[rc.cmd](rc) else: with connect(rc) as rc.client: CONNECTED_COMMANDS[rc.cmd](rc)
def main(args=None): rc = copy.copy(DEFAULT_RC) parser = create_parser() args0 = Namespace() args1, rest = parser.parse_known_args(args, namespace=args0) if args1.version: print(__version__) return rc if args1.cmd == 'helper': p = ArgumentParser(prog='regolith helper') p.add_argument( "helper_target", help="helper target to run. Currently valid targets are: \n{}". format([k for k in HELPERS]), ) if len(rest) == 0: p.print_help() args2, rest2 = p.parse_known_args(rest, namespace=args0) # it is not apparent from this but the following line calls the suparser in # in the helper module to get the rest of the args. HELPERS[args2.helper_target][1](p) if len(rest2) == 0: p.print_help() args3, rest3 = p.parse_known_args(rest, namespace=args0) ns = args3 else: ns = args1 if ns.cmd in NEED_RC: if os.path.exists(rc.user_config): rc._update(load_rcfile(rc.user_config)) rc._update(load_rcfile("regolithrc.json")) rc._update(ns.__dict__) if "schemas" in rc._dict: user_schema = copy.deepcopy(rc.schemas) default_schema = copy.deepcopy(SCHEMAS) rc.schemas = update_schemas(default_schema, user_schema) else: rc.schemas = SCHEMAS if ns.cmd in NEED_RC: filter_databases(rc) if rc.cmd in DISCONNECTED_COMMANDS: DISCONNECTED_COMMANDS[rc.cmd](rc) else: dbs = None if rc.cmd == 'build': dbs = commands.build_db_check(rc) elif rc.cmd == 'helper': dbs = commands.helper_db_check(rc) with connect(rc, dbs=dbs) as rc.client: CONNECTED_COMMANDS[rc.cmd](rc) return rc
def test_collection_retrieval_python(make_mixed_db): if make_mixed_db is False: pytest.skip("Mongoclient failed to start") else: repo, fs_coll, mongo_coll = make_mixed_db os.chdir(repo) rc = copy(DEFAULT_RC) rc._update(load_rcfile("regolithrc.json")) with connect(rc) as rc.client: fs_test_dict = dict( list(all_docs_from_collection(rc.client, "abstracts"))[0]) mongo_test_dict = dict( list(all_docs_from_collection(rc.client, "assignments"))[0]) fs_expected_dict = deepcopy(EXEMPLARS[fs_coll]) mongo_expected_dict = deepcopy(EXEMPLARS[mongo_coll]) assert fs_test_dict == fs_expected_dict assert mongo_test_dict == mongo_expected_dict
def test_mongo_to_fs_python(make_mongo_to_fs_backup_db): repo = make_mongo_to_fs_backup_db os.chdir(repo) try: main(['mongo-to-fs']) except Exception as e: print(e) assert True == False else: assert True == True replace_rc_dbs(repo) os.chdir(repo) rc = copy.copy(DEFAULT_RC) rc._update(load_rcfile("regolithrc.json")) with connect(rc) as rc.client: fs_db = rc.client[FS_DB_NAME] mongo_db = rc.client[ALTERNATE_REGOLITH_MONGODB_NAME] for coll in mongo_db.list_collection_names(): migrated_fs_collection = fs_db[coll] original_mongo_collection = load_mongo_col(mongo_db[coll]) assert migrated_fs_collection == original_mongo_collection
def test_mongo_invalid_insertion(make_mongodb): # proof that valid insertion is allowed is provided by helper tests on mongo if make_mongodb is False: pytest.skip("Mongoclient failed to start") else: repo = Path(make_mongodb) from regolith.database import connect from regolith.runcontrol import DEFAULT_RC, load_rcfile os.chdir(repo) rc = copy.copy(DEFAULT_RC) rc.schemas = SCHEMAS rc._update(load_rcfile("regolithrc.json")) with connect(rc) as rc.client: only_database_in_test = rc.databases[0]['name'] try: rc.client.insert_one(only_database_in_test, 'projecta', BAD_PROJECTUM) except ValueError as e: result = e.args[0] expected = 'ERROR in sb_firstprojectum:\n{\'lead\': [\'required field\'], \'status\': [\'required field\']}\nNone\nNone\n---------------\n' assert result == expected
def connect_db(rc, colls=None): ''' Load up the db's Parameters ---------- rc: The runcontrol instance colls The list of collections that should be loaded Returns ------- chained_db: The chained databases in the form of a document dbs: The databases in the form of a runcontrol client ''' with connect(rc, dbs=colls) as rc.client: dbs = rc.client.dbs chained_db = rc.client.chained_db return chained_db, dbs
def main(args=None): rc = DEFAULT_RC parser = create_parser() ns = parser.parse_args(args) if ns.cmd in NEED_RC: if os.path.exists(rc.user_config): rc._update(load_rcfile(rc.user_config)) rc._update(load_rcfile('regolithrc.json')) rc._update(ns.__dict__) if 'schemas' in rc._dict: user_schema = copy.deepcopy(rc.schemas) rc.schemas = copy.deepcopy(SCHEMAS) for k in user_schema: for k2, v in user_schema[k].items(): rc.schemas[k][k2].update(v) else: rc.schemas = SCHEMAS if ns.cmd in NEED_RC: filter_databases(rc) if rc.cmd in DISCONNECTED_COMMANDS: DISCONNECTED_COMMANDS[rc.cmd](rc) else: with connect(rc) as rc.client: CONNECTED_COMMANDS[rc.cmd](rc)
""" Loads the dbs for interactive sessions """ from regolith.database import connect from regolith.runcontrol import DEFAULT_RC, load_rcfile, filter_databases rc = DEFAULT_RC rc._update(load_rcfile("regolithrc.json")) filter_databases(rc) with connect(rc) as rc.client: dbs = rc.client.dbs chained_db = rc.client.chained_db