def test_dataverse_json_dv_up_valid(self, import_dataverse_min_dict): """Test Dataverse.json() with format=`dv_up` and valid data. Parameters ---------- import_dataverse_min_dict : dict Fixture, which returns a flat dataset dict() coming from `tests/data/dataverse_min.json`. """ data = import_dataverse_min_dict dv = Dataverse() dv.set(data) assert dv.json() assert isinstance(dv.json(), str)
def test_dataverse_json_dv_up_valid_not(self, import_dataverse_min_dict): """Test Dataverse.json() with format=`dv_up` and non-valid data. Parameters ---------- import_dataverse_min_dict : dict Fixture, which returns a flat dataset dict() coming from `tests/data/dataverse_min.json`. """ data = import_dataverse_min_dict dv = Dataverse() dv.set(data) dv.name = None assert not dv.json()
def test_dataverse_json_format_wrong_valid(self, import_dataverse_min_dict): """Test Dataverse.json() with non-valid format and valid data. Parameters ---------- import_dataverse_min_dict : dict Fixture, which returns a flat dataset dict() coming from `tests/data/dataverse_min.json`. """ data = import_dataverse_min_dict dv = Dataverse() dv.set(data) dv.datasets = [Dataset()] dv.dataverses = [Dataverse()] dv.pid = 'doi:10.11587/EVMUHP' data = dv.json('wrong') assert not data
def test_dataverse_json_all_valid(self, import_dataverse_min_dict): """Test Dataverse.json() with format=`all` and valid data. Parameters ---------- import_dataverse_min_dict : dict Fixture, which returns a flat dataset dict() coming from `tests/data/dataverse_min.json`. """ data = import_dataverse_min_dict dv = Dataverse() dv.set(data) dv.datasets = [Dataset()] dv.dataverses = [Dataverse()] dv.pid = 'doi:10.11587/EVMUHP' data = dv.json('all') assert data assert isinstance(data, str)
def create_testdata(config_file: str, force: bool) -> None: """Create testdata defined in a config file. Creates a pre-defined set of testdata on your instance. By default, the function uses the AUSSDA test data repository, which is so far not publicly available. If `PRODUCTION` is `true`, this function will not execute, as long as you not add `--force` to the function call. This is to protect from unwanted changes on a production instance. """ # Init if config.PRODUCTION and not force: print( "Create testdata on a PRODUCTION instance not allowed. Use --force to force it." ) sys.exit() pid_idx = [] users = read_json(config.USER_FILENAME) workflow = read_json(os.path.join(ROOT_DIR, config_file)) # Dataverses for dv_conf in workflow["dataverses"]: dv_alias = None if "create" in dv_conf: api = NativeApi( config.BASE_URL, users[dv_conf["create"]["user-handle"]]["api-token"]) dv = Dataverse() dv_filename = os.path.join(ROOT_DIR, dv_conf["create"]["metadata-filename"]) dv.from_json(read_file(dv_filename)) if "update" in dv_conf["create"]: for key, val in dv_conf["create"]["update"].items(): kwargs = {key: val} dv.set(kwargs) dv_alias = dv.get()["alias"] resp = api.create_dataverse(dv_conf["create"]["parent"], dv.json()) if "publish" in dv_conf: api = NativeApi( config.BASE_URL, users[dv_conf["publish"]["user-handle"]]["api-token"]) if not dv_alias and "alias" in dv_conf["publish"]: dv_alias = dv_conf["publish"]["alias"] resp = api.publish_dataverse(dv_alias) # Datasets for ds_conf in workflow["datasets"]: pid = None if "create" in ds_conf: api = NativeApi( config.BASE_URL, users[ds_conf["create"]["user-handle"]]["api-token"]) ds = Dataset() ds_filename = os.path.join(ROOT_DIR, ds_conf["create"]["metadata-filename"]) ds.from_json(read_file(ds_filename)) if "update" in ds_conf["create"]: for key, val in ds_conf["create"]["update"].items(): kwargs = {key: val} ds.set(kwargs) resp = api.create_dataset(dv_alias, ds.json()) pid = resp.json()["data"]["persistentId"] pid_idx.append(pid) if "publish" in ds_conf: if not pid: print("ERROR: PID missing!") sys.exit() api = NativeApi( config.BASE_URL, users[ds_conf["publish"]["user-handle"]]["api-token"]) resp = api.publish_dataset(pid, release_type="major") # Datafiles for dataset_id, ds_datafiles in workflow["datafiles"].items(): if int(dataset_id) == workflow["datasets"][int(dataset_id)]["id"]: pid = pid_idx[int(dataset_id)] else: print("ERROR: Dataset ID not matching.") sys.exit() for df_conf in ds_datafiles: if "upload" in df_conf: api = NativeApi( config.BASE_URL, users[df_conf["upload"]["user-handle"]]["api-token"], ) metadata = read_json(df_conf["upload"]["metadata-filename"]) df = Datafile() df.set(metadata) if "update" in df_conf["upload"]: for key, val in df_conf["upload"]["update"].items(): kwargs = {key: val} df.set(kwargs) df.set({"pid": pid}) filename = df_conf["upload"]["filename"] resp = api.upload_datafile(pid, filename, df.json()) if filename[-4:] == ".sav" or filename[-4:] == ".dta": sleep(30) else: sleep(3) if "publish-dataset" in df_conf: api = NativeApi( config.BASE_URL, users[df_conf["publish-dataset"]["user-handle"]]["api-token"], ) if df_conf["publish-dataset"]: resp = api.publish_dataset(pid, release_type="major")