コード例 #1
0
def parse_dataset_keys(dataset, data, terms_filename):
    ds_tmp = {}
    ds_id = None
    ds_tmp['termsOfAccess'] = read_file(terms_filename)
    for key, val in dataset.items():
        if not val == '':
            if key == 'organization.dataset_id':
                ds_id = val
            elif key == 'dataverse.title':
                ds_tmp['title'] = val
            elif key == 'dataverse.subtitle':
                ds_tmp['subtitle'] = val
            elif key == 'dataverse.author':
                ds_tmp['author'] = json.loads(val)
            elif key == 'dataverse.dsDescription':
                ds_tmp['dsDescription'] = []
                ds_tmp['dsDescription'].append({'dsDescriptionValue': val})
            elif key == 'dataverse.keywordValue':
                ds_tmp['keyword'] = json.loads(val)
            elif key == 'dataverse.topicClassification':
                ds_tmp['topicClassification'] = json.loads(val)
            elif key == 'dataverse.language':
                ds_tmp['language'] = json.loads(val)
            elif key == 'dataverse.subject':
                ds_tmp['subject'] = []
                ds_tmp['subject'].append(val)
            elif key == 'dataverse.kindOfData':
                ds_tmp['kindOfData'] = json.loads(val)
            elif key == 'dataverse.datasetContact':
                ds_tmp['datasetContact'] = json.loads(val)
    data[ds_id] = {'metadata': ds_tmp}
    return data
コード例 #2
0
ファイル: test_api.py プロジェクト: atrisovic/pyDataverse
        def test_token_right_create_dataset_rights(self):
            BASE_URL = os.getenv("BASE_URL")
            api_su = NativeApi(BASE_URL, os.getenv("API_TOKEN_SUPERUSER"))
            api_nru = NativeApi(BASE_URL,
                                os.getenv("API_TOKEN_TEST_NO_RIGHTS"))

            resp = api_su.get_info_version()
            assert resp.json()["data"]["version"] == "4.18.1"
            assert resp.json()["data"]["build"] == "267-a91d370"
            # resp = api_nru.get_info_version()
            # assert resp.json()["data"]["version"] == "4.18.1"
            # assert resp.json()["data"]["build"] == "267-a91d370"

            ds = Dataset()
            ds.from_json(
                read_file(
                    os.path.join(
                        BASE_DIR,
                        "tests/data/dataset_upload_min_default.json")))
            resp = api_su.create_dataset(":root", ds.json())
            pid = resp.json()["data"]["persistentId"]
            assert resp.json()["status"] == "OK"

            # with pytest.raises(ApiAuthorizationError):
            #     resp = api_nru.get_dataset(pid)

            resp = api_su.delete_dataset(pid)
            assert resp.json()["status"] == "OK"
コード例 #3
0
def json_upload_full():
    """Get JSON string of full Datafile.

    Returns
    -------
    str
        JSON string.

    """
    return read_file(test_config["datafile_upload_full_filename"])
コード例 #4
0
def json_upload_min():
    """Get JSON string of minimum Datafile.

    Returns
    -------
    dict
        JSON string.

    """
    return read_file(test_config["datafile_upload_min_filename"])
コード例 #5
0
        def test_dataverse_from_json_to_json_valid(self):
            """Test Dataverse to JSON from JSON with valid data."""
            data = [
                ({json_upload_min()}, {}),
                ({json_upload_full()}, {}),
                ({json_upload_min()}, {
                    "data_format": "dataverse_upload"
                }),
                ({json_upload_min()}, {
                    "validate": False
                }),
                (
                    {json_upload_min()},
                    {
                        "filename_schema": "wrong",
                        "validate": False
                    },
                ),
                (
                    {json_upload_min()},
                    {
                        "filename_schema":
                        test_config["datafile_upload_schema_filename"],
                        "validate":
                        True,
                    },
                ),
                ({"{}"}, {
                    "validate": False
                }),
            ]

            for args_from, kwargs_from in data:
                pdv_start = data_object()
                args = args_from
                kwargs = kwargs_from
                pdv_start.from_json(*args, **kwargs)
                if "validate" in kwargs:
                    if not kwargs["validate"]:
                        kwargs = {"validate": False}
                write_json(
                    test_config["datafile_json_output_filename"],
                    json.loads(pdv_start.json(**kwargs)),
                )
                pdv_end = data_object()
                kwargs = kwargs_from
                pdv_end.from_json(
                    read_file(test_config["datafile_json_output_filename"]),
                    **kwargs)

                for key, val in pdv_end.get().items():
                    assert getattr(pdv_start, key) == getattr(pdv_end, key)
                assert len(pdv_start.__dict__) == len(pdv_end.__dict__, )
コード例 #6
0
ファイル: test_api.py プロジェクト: atrisovic/pyDataverse
        def test_token_empty_string(self):
            BASE_URL = os.getenv("BASE_URL")
            api = NativeApi(BASE_URL, "")
            resp = api.get_info_version()
            assert resp.json()["data"]["version"] == "4.18.1"
            assert resp.json()["data"]["build"] == "267-a91d370"

            with pytest.raises(ApiAuthorizationError):
                ds = Dataset()
                ds.from_json(
                    read_file(
                        os.path.join(
                            BASE_DIR,
                            "tests/data/dataset_upload_min_default.json")))
                api.create_dataset(":root", ds.json())
コード例 #7
0
ファイル: test_api.py プロジェクト: MRIPAS/pyDataverse
        def test_token_no_rights(self):
            BASE_URL = os.getenv("BASE_URL")
            API_TOKEN = os.getenv("API_TOKEN_NO_RIGHTS")
            api = NativeApi(BASE_URL, API_TOKEN)
            resp = api.get_info_version()
            assert resp.json()["data"]["version"] == "4.15.1"
            assert resp.json()["data"]["build"] == "1377-701b56b"

            with pytest.raises(ApiAuthorizationError):
                ds = Dataset()
                ds.from_json(
                    read_file(
                        os.path.join(
                            BASE_DIR,
                            "tests/data/dataset_upload_min_default.json")))
                api.create_dataset(":root", ds.json())
コード例 #8
0
ファイル: __init__.py プロジェクト: AUSSDA/dataverse_tests
def create_testdata(config_file: str, force: bool) -> None:
    """Create testdata defined in a config file.

    Creates a pre-defined set of testdata on your
    instance. By default, the function uses the
    AUSSDA test data repository, which is so far not
    publicly available. If `PRODUCTION` is `true`,
    this function will not execute, as long as you
    not add `--force` to the function call. This is
    to protect from unwanted changes on a production
    instance.

    """
    # Init
    if config.PRODUCTION and not force:
        print(
            "Create testdata on a PRODUCTION instance not allowed. Use --force to force it."
        )
        sys.exit()
    pid_idx = []
    users = read_json(config.USER_FILENAME)
    workflow = read_json(os.path.join(ROOT_DIR, config_file))

    # Dataverses
    for dv_conf in workflow["dataverses"]:
        dv_alias = None
        if "create" in dv_conf:
            api = NativeApi(
                config.BASE_URL,
                users[dv_conf["create"]["user-handle"]]["api-token"])
            dv = Dataverse()
            dv_filename = os.path.join(ROOT_DIR,
                                       dv_conf["create"]["metadata-filename"])
            dv.from_json(read_file(dv_filename))
            if "update" in dv_conf["create"]:
                for key, val in dv_conf["create"]["update"].items():
                    kwargs = {key: val}
                    dv.set(kwargs)
            dv_alias = dv.get()["alias"]
            resp = api.create_dataverse(dv_conf["create"]["parent"], dv.json())

        if "publish" in dv_conf:
            api = NativeApi(
                config.BASE_URL,
                users[dv_conf["publish"]["user-handle"]]["api-token"])
            if not dv_alias and "alias" in dv_conf["publish"]:
                dv_alias = dv_conf["publish"]["alias"]
            resp = api.publish_dataverse(dv_alias)

    # Datasets
    for ds_conf in workflow["datasets"]:
        pid = None
        if "create" in ds_conf:
            api = NativeApi(
                config.BASE_URL,
                users[ds_conf["create"]["user-handle"]]["api-token"])
            ds = Dataset()
            ds_filename = os.path.join(ROOT_DIR,
                                       ds_conf["create"]["metadata-filename"])
            ds.from_json(read_file(ds_filename))
            if "update" in ds_conf["create"]:
                for key, val in ds_conf["create"]["update"].items():
                    kwargs = {key: val}
                    ds.set(kwargs)
            resp = api.create_dataset(dv_alias, ds.json())
            pid = resp.json()["data"]["persistentId"]
            pid_idx.append(pid)

        if "publish" in ds_conf:
            if not pid:
                print("ERROR: PID missing!")
                sys.exit()
            api = NativeApi(
                config.BASE_URL,
                users[ds_conf["publish"]["user-handle"]]["api-token"])
            resp = api.publish_dataset(pid, release_type="major")

    # Datafiles
    for dataset_id, ds_datafiles in workflow["datafiles"].items():
        if int(dataset_id) == workflow["datasets"][int(dataset_id)]["id"]:
            pid = pid_idx[int(dataset_id)]
        else:
            print("ERROR: Dataset ID not matching.")
            sys.exit()
        for df_conf in ds_datafiles:
            if "upload" in df_conf:
                api = NativeApi(
                    config.BASE_URL,
                    users[df_conf["upload"]["user-handle"]]["api-token"],
                )
                metadata = read_json(df_conf["upload"]["metadata-filename"])
                df = Datafile()
                df.set(metadata)
                if "update" in df_conf["upload"]:
                    for key, val in df_conf["upload"]["update"].items():
                        kwargs = {key: val}
                        df.set(kwargs)
                df.set({"pid": pid})
                filename = df_conf["upload"]["filename"]
                resp = api.upload_datafile(pid, filename, df.json())
                if filename[-4:] == ".sav" or filename[-4:] == ".dta":
                    sleep(30)
                else:
                    sleep(3)
        if "publish-dataset" in df_conf:
            api = NativeApi(
                config.BASE_URL,
                users[df_conf["publish-dataset"]["user-handle"]]["api-token"],
            )
            if df_conf["publish-dataset"]:
                resp = api.publish_dataset(pid, release_type="major")