def test_import_inputset(): # test github ds = Dataset.import_inputset('files/git_repo.json', registry='github', cache_dir=CACHE_DIR, debug=True, github_pat=os.getenv('GITHUB_PAT')) ds.update(**{'name': 'test', 'version': '1.0'}) ds.backup('../test.p') ds = Dataset.restore('../test.p') ds.export_inputset('../test.json') ds = Dataset.import_inputset('files/git_repo_commit.json', registry='github', cache_dir=CACHE_DIR, debug=True, github_pat=os.getenv('GITHUB_PAT')) ds.update(**{'name': 'test', 'version': '1.0'}) ds.backup('../test.p') ds = Dataset.restore('../test.p') ds.export_inputset('../test.json') # test npm ds = Dataset.import_inputset('files/name_version.json', registry='npm', cache_dir=CACHE_DIR, debug=True) ds.update(**{'name': 'test', 'version': '1.0'}) ds.backup('../test.p') ds = Dataset.restore('../test.p') ds.export_inputset('../test.json') # test pypi ds = Dataset.import_inputset('files/name_version.json', registry='pypi', cache_dir=CACHE_DIR, debug=True) ds.update(**{'name': 'test', 'version': '1.0'}) ds.backup('../test.p') ds = Dataset.restore('../test.p') ds.export_inputset('../test.json') # test vanilla ds = Dataset.import_inputset('files/http_url.json', cache_dir=CACHE_DIR, debug=True) ds.update(**{'name': 'test', 'version': '1.0'}) ds.backup('../test.p') ds = Dataset.restore('../test.p') ds.export_inputset('../test.json') # cleanup files os.remove('../test.p') os.remove('../test.json')
def import_(ctx, registry, filepath): """Imports an input set json file.""" backup_ds = None try: backup_ds = deepcopy(ctx.obj.get('dataset', None)) if registry == 'noreg': registry = None global TEMP_SETTINGS ds = Dataset.import_inputset(filepath, registry, **TEMP_SETTINGS) ctx.obj['dataset'] = ds # reset the temporary api/metadata dict TEMP_SETTINGS = dict() except Exception as e: print_error(e, DEBUG) # silently restore the dataset ctx.obj['dataset'] = backup_ds