def test_token_right_create_dataset_rights(self): BASE_URL = os.getenv("BASE_URL") api_su = NativeApi(BASE_URL, os.getenv("API_TOKEN_SUPERUSER")) api_nru = NativeApi(BASE_URL, os.getenv("API_TOKEN_TEST_NO_RIGHTS")) resp = api_su.get_info_version() assert resp.json()["data"]["version"] == "4.18.1" assert resp.json()["data"]["build"] == "267-a91d370" # resp = api_nru.get_info_version() # assert resp.json()["data"]["version"] == "4.18.1" # assert resp.json()["data"]["build"] == "267-a91d370" ds = Dataset() ds.from_json( read_file( os.path.join( BASE_DIR, "tests/data/dataset_upload_min_default.json"))) resp = api_su.create_dataset(":root", ds.json()) pid = resp.json()["data"]["persistentId"] assert resp.json()["status"] == "OK" # with pytest.raises(ApiAuthorizationError): # resp = api_nru.get_dataset(pid) resp = api_su.delete_dataset(pid) assert resp.json()["status"] == "OK"
def test_token_empty_string(self): BASE_URL = os.getenv("BASE_URL") api = NativeApi(BASE_URL, "") resp = api.get_info_version() assert resp.json()["data"]["version"] == "4.18.1" assert resp.json()["data"]["build"] == "267-a91d370" with pytest.raises(ApiAuthorizationError): ds = Dataset() ds.from_json( read_file( os.path.join( BASE_DIR, "tests/data/dataset_upload_min_default.json"))) api.create_dataset(":root", ds.json())
def test_token_no_rights(self): BASE_URL = os.getenv("BASE_URL") API_TOKEN = os.getenv("API_TOKEN_NO_RIGHTS") api = NativeApi(BASE_URL, API_TOKEN) resp = api.get_info_version() assert resp.json()["data"]["version"] == "4.15.1" assert resp.json()["data"]["build"] == "1377-701b56b" with pytest.raises(ApiAuthorizationError): ds = Dataset() ds.from_json( read_file( os.path.join( BASE_DIR, "tests/data/dataset_upload_min_default.json"))) api.create_dataset(":root", ds.json())
def create_testdata(config_file: str, force: bool) -> None: """Create testdata defined in a config file. Creates a pre-defined set of testdata on your instance. By default, the function uses the AUSSDA test data repository, which is so far not publicly available. If `PRODUCTION` is `true`, this function will not execute, as long as you not add `--force` to the function call. This is to protect from unwanted changes on a production instance. """ # Init if config.PRODUCTION and not force: print( "Create testdata on a PRODUCTION instance not allowed. Use --force to force it." ) sys.exit() pid_idx = [] users = read_json(config.USER_FILENAME) workflow = read_json(os.path.join(ROOT_DIR, config_file)) # Dataverses for dv_conf in workflow["dataverses"]: dv_alias = None if "create" in dv_conf: api = NativeApi( config.BASE_URL, users[dv_conf["create"]["user-handle"]]["api-token"]) dv = Dataverse() dv_filename = os.path.join(ROOT_DIR, dv_conf["create"]["metadata-filename"]) dv.from_json(read_file(dv_filename)) if "update" in dv_conf["create"]: for key, val in dv_conf["create"]["update"].items(): kwargs = {key: val} dv.set(kwargs) dv_alias = dv.get()["alias"] resp = api.create_dataverse(dv_conf["create"]["parent"], dv.json()) if "publish" in dv_conf: api = NativeApi( config.BASE_URL, users[dv_conf["publish"]["user-handle"]]["api-token"]) if not dv_alias and "alias" in dv_conf["publish"]: dv_alias = dv_conf["publish"]["alias"] resp = api.publish_dataverse(dv_alias) # Datasets for ds_conf in workflow["datasets"]: pid = None if "create" in ds_conf: api = NativeApi( config.BASE_URL, users[ds_conf["create"]["user-handle"]]["api-token"]) ds = Dataset() ds_filename = os.path.join(ROOT_DIR, ds_conf["create"]["metadata-filename"]) ds.from_json(read_file(ds_filename)) if "update" in ds_conf["create"]: for key, val in ds_conf["create"]["update"].items(): kwargs = {key: val} ds.set(kwargs) resp = api.create_dataset(dv_alias, ds.json()) pid = resp.json()["data"]["persistentId"] pid_idx.append(pid) if "publish" in ds_conf: if not pid: print("ERROR: PID missing!") sys.exit() api = NativeApi( config.BASE_URL, users[ds_conf["publish"]["user-handle"]]["api-token"]) resp = api.publish_dataset(pid, release_type="major") # Datafiles for dataset_id, ds_datafiles in workflow["datafiles"].items(): if int(dataset_id) == workflow["datasets"][int(dataset_id)]["id"]: pid = pid_idx[int(dataset_id)] else: print("ERROR: Dataset ID not matching.") sys.exit() for df_conf in ds_datafiles: if "upload" in df_conf: api = NativeApi( config.BASE_URL, users[df_conf["upload"]["user-handle"]]["api-token"], ) metadata = read_json(df_conf["upload"]["metadata-filename"]) df = Datafile() df.set(metadata) if "update" in df_conf["upload"]: for key, val in df_conf["upload"]["update"].items(): kwargs = {key: val} df.set(kwargs) df.set({"pid": pid}) filename = df_conf["upload"]["filename"] resp = api.upload_datafile(pid, filename, df.json()) if filename[-4:] == ".sav" or filename[-4:] == ".dta": sleep(30) else: sleep(3) if "publish-dataset" in df_conf: api = NativeApi( config.BASE_URL, users[df_conf["publish-dataset"]["user-handle"]]["api-token"], ) if df_conf["publish-dataset"]: resp = api.publish_dataset(pid, release_type="major")