Пример #1
0
    def test_dataverse_init_valid(self):
        """Test Dataverse.__init__() with valid data."""
        # specific
        data = [
            (Dataverse(), {}),
            (Dataverse(dict_flat_set_min()), object_data_min()),
            (Dataverse(dict_flat_set_full()), object_data_full()),
            (Dataverse({}), {}),
        ]

        for pdv, data_eval in data:
            for key, val in data_eval.items():
                print(getattr(pdv, key))
                print(data_eval[key])
                assert getattr(pdv, key) == data_eval[key]
            assert len(pdv.__dict__) - len(object_data_init()) == len(data_eval)
Пример #2
0
    def test_dataverse_init_invalid(self):
        """Test Dataverse.init() with invalid data."""
        pdv = Dataverse()

        # invalid data
        for data in test_config["invalid_set_types"]:
            with pytest.raises(AssertionError):
                pdv.set(data)
Пример #3
0
def data_object():
    """Get Dataverse object.

    Returns
    -------
    pydataverse.models.Dataverse
        :class:`Dataverse` object.
    """
    return Dataverse()
Пример #4
0
    def test_dataverse_json_all_valid(self, import_dataverse_min_dict):
        """Test Dataverse.json() with format=`all` and valid data.

        Parameters
        ----------
        import_dataverse_min_dict : dict
            Fixture, which returns a flat dataset dict() coming from
            `tests/data/dataverse_min.json`.

        """
        data = import_dataverse_min_dict
        dv = Dataverse()
        dv.set(data)
        dv.datasets = [Dataset()]
        dv.dataverses = [Dataverse()]
        dv.pid = 'doi:10.11587/EVMUHP'
        data = dv.json('all')

        assert data
        assert isinstance(data, str)
Пример #5
0
    def test_dataverse_json_format_wrong_valid(self,
                                               import_dataverse_min_dict):
        """Test Dataverse.json() with non-valid format and valid data.

        Parameters
        ----------
        import_dataverse_min_dict : dict
            Fixture, which returns a flat dataset dict() coming from
            `tests/data/dataverse_min.json`.

        """
        data = import_dataverse_min_dict
        dv = Dataverse()
        dv.set(data)
        dv.datasets = [Dataset()]
        dv.dataverses = [Dataverse()]
        dv.pid = 'doi:10.11587/EVMUHP'
        data = dv.json('wrong')

        assert not data
Пример #6
0
    def test_dataverse_is_valid_valid(self, import_dataverse_min_dict):
        """Test Dataverse.is_valid() with valid data.

        Parameters
        ----------
        import_dataverse_min_dict : dict
            Fixture, which returns a flat dataset dict() coming from
            `tests/data/dataverse_min.json`.

        """
        data = import_dataverse_min_dict
        dv = Dataverse()
        dv.set(data)

        assert dv.is_valid()
Пример #7
0
    def test_dataverse_import_metadata_dv_up(self):
        """Test Dataverse.import_metadata() with format=`dv_up`."""
        dv = Dataverse()
        dv.import_metadata(TEST_DIR + '/data/dataverse_min.json')

        assert isinstance(dv.datasets, list)
        assert not dv.datasets
        assert isinstance(dv.dataverses, list)
        assert not dv.dataverses
        assert not dv.pid
        assert dv.alias == 'test-pyDataverse'
        assert dv.name == 'Test pyDataverse'
        assert isinstance(dv.dataverseContacts, list)
        assert len(dv.dataverseContacts) == 1
        assert dv.dataverseContacts[0]['contactEmail'] == '*****@*****.**'
Пример #8
0
    def test_dataverse_dict_format_wrong(self, import_dataverse_min_dict):
        """Test Dataverse.dict() with non-valid format.

        Parameters
        ----------
        import_dataverse_min_dict : dict
            Fixture, which returns a flat dataset dict() coming from
            `tests/data/dataverse_min.json`.

        """
        data = import_dataverse_min_dict
        dv = Dataverse()
        dv.set(data)

        assert not dv.dict('wrong')
Пример #9
0
    def test_dataverse_dict_all_valid(self, import_dataverse_min_dict):
        """Test Dataverse.dict() with format=`all` and valid data.

        Parameters
        ----------
        import_dataverse_min_dict : dict
            Fixture, which returns a flat dataset dict() coming from
            `tests/data/dataverse_min.json`.

        """
        data = import_dataverse_min_dict
        dv = Dataverse()
        dv.set(data)
        dv.datasets = [Dataset()]
        dv.dataverses = [Dataverse()]
        dv.pid = 'doi:10.11587/EVMUHP'
        data = dv.dict('all')

        assert data
        assert isinstance(data, dict)
        assert data['alias'] == 'test-pyDataverse'
        assert data['name'] == 'Test pyDataverse'
        assert data['dataverseContacts'][0]['contactEmail'] == '*****@*****.**'
        assert data['pid'] == 'doi:10.11587/EVMUHP'
Пример #10
0
    def test_dataverse_json_dv_up_valid_not(self, import_dataverse_min_dict):
        """Test Dataverse.json() with format=`dv_up` and non-valid data.

        Parameters
        ----------
        import_dataverse_min_dict : dict
            Fixture, which returns a flat dataset dict() coming from
            `tests/data/dataverse_min.json`.

        """
        data = import_dataverse_min_dict
        dv = Dataverse()
        dv.set(data)
        dv.name = None

        assert not dv.json()
Пример #11
0
    def test_dataverse_json_dv_up_valid(self, import_dataverse_min_dict):
        """Test Dataverse.json() with format=`dv_up` and valid data.

        Parameters
        ----------
        import_dataverse_min_dict : dict
            Fixture, which returns a flat dataset dict() coming from
            `tests/data/dataverse_min.json`.

        """
        data = import_dataverse_min_dict
        dv = Dataverse()
        dv.set(data)

        assert dv.json()
        assert isinstance(dv.json(), str)
Пример #12
0
    def test_dataverse_init(self):
        """Test Dataverse.__init__()."""
        dv = Dataverse()

        assert isinstance(dv.datasets, list)
        assert len(dv.datasets) == 0
        assert isinstance(dv.dataverses, list)
        assert len(dv.dataverses) == 0
        assert not dv.pid
        assert not dv.name
        assert not dv.alias
        assert isinstance(dv.dataverseContacts, list)
        assert len(dv.dataverseContacts) == 0
        assert not dv.affiliation
        assert not dv.description
        assert not dv.dataverseType
Пример #13
0
    def test_dataverse_import_metadata_format_wrong(self):
        """Test Dataverse.import_metadata() with non-valid format."""
        dv = Dataverse()
        dv.import_metadata(TEST_DIR + '/data/dataverse_min.json', 'wrong')

        assert isinstance(dv.datasets, list)
        assert len(dv.datasets) == 0
        assert not dv.datasets
        assert isinstance(dv.dataverses, list)
        assert len(dv.dataverses) == 0
        assert not dv.dataverses
        assert not dv.pid
        assert not dv.name
        assert not dv.alias
        assert isinstance(dv.dataverseContacts, list)
        assert len(dv.dataverseContacts) == 0
        assert not dv.dataverseContacts
        assert not dv.affiliation
        assert not dv.description
        assert not dv.dataverseType
Пример #14
0
    def test_dataverse_set_dv_up(self, import_dataverse_min_dict):
        """Test Dataverse.set() with format=`dv_up`.

        Parameters
        ----------
        import_dataverse_min_dict : dict
            Fixture, which returns a flat dataset dict() coming from
            `tests/data/dataverse_min.json`.

        """
        data = import_dataverse_min_dict
        dv = Dataverse()
        dv.set(data)

        assert isinstance(dv.datasets, list)
        assert not dv.datasets
        assert isinstance(dv.dataverses, list)
        assert not dv.dataverses
        assert not dv.pid
        assert dv.alias == 'test-pyDataverse'
        assert dv.name == 'Test pyDataverse'
        assert len(dv.dataverseContacts) == 1
        assert dv.dataverseContacts[0]['contactEmail'] == '*****@*****.**'
Пример #15
0
def create_testdata(config_file: str, force: bool) -> None:
    """Create testdata defined in a config file.

    Creates a pre-defined set of testdata on your
    instance. By default, the function uses the
    AUSSDA test data repository, which is so far not
    publicly available. If `PRODUCTION` is `true`,
    this function will not execute, as long as you
    not add `--force` to the function call. This is
    to protect from unwanted changes on a production
    instance.

    """
    # Init
    if config.PRODUCTION and not force:
        print(
            "Create testdata on a PRODUCTION instance not allowed. Use --force to force it."
        )
        sys.exit()
    pid_idx = []
    users = read_json(config.USER_FILENAME)
    workflow = read_json(os.path.join(ROOT_DIR, config_file))

    # Dataverses
    for dv_conf in workflow["dataverses"]:
        dv_alias = None
        if "create" in dv_conf:
            api = NativeApi(
                config.BASE_URL,
                users[dv_conf["create"]["user-handle"]]["api-token"])
            dv = Dataverse()
            dv_filename = os.path.join(ROOT_DIR,
                                       dv_conf["create"]["metadata-filename"])
            dv.from_json(read_file(dv_filename))
            if "update" in dv_conf["create"]:
                for key, val in dv_conf["create"]["update"].items():
                    kwargs = {key: val}
                    dv.set(kwargs)
            dv_alias = dv.get()["alias"]
            resp = api.create_dataverse(dv_conf["create"]["parent"], dv.json())

        if "publish" in dv_conf:
            api = NativeApi(
                config.BASE_URL,
                users[dv_conf["publish"]["user-handle"]]["api-token"])
            if not dv_alias and "alias" in dv_conf["publish"]:
                dv_alias = dv_conf["publish"]["alias"]
            resp = api.publish_dataverse(dv_alias)

    # Datasets
    for ds_conf in workflow["datasets"]:
        pid = None
        if "create" in ds_conf:
            api = NativeApi(
                config.BASE_URL,
                users[ds_conf["create"]["user-handle"]]["api-token"])
            ds = Dataset()
            ds_filename = os.path.join(ROOT_DIR,
                                       ds_conf["create"]["metadata-filename"])
            ds.from_json(read_file(ds_filename))
            if "update" in ds_conf["create"]:
                for key, val in ds_conf["create"]["update"].items():
                    kwargs = {key: val}
                    ds.set(kwargs)
            resp = api.create_dataset(dv_alias, ds.json())
            pid = resp.json()["data"]["persistentId"]
            pid_idx.append(pid)

        if "publish" in ds_conf:
            if not pid:
                print("ERROR: PID missing!")
                sys.exit()
            api = NativeApi(
                config.BASE_URL,
                users[ds_conf["publish"]["user-handle"]]["api-token"])
            resp = api.publish_dataset(pid, release_type="major")

    # Datafiles
    for dataset_id, ds_datafiles in workflow["datafiles"].items():
        if int(dataset_id) == workflow["datasets"][int(dataset_id)]["id"]:
            pid = pid_idx[int(dataset_id)]
        else:
            print("ERROR: Dataset ID not matching.")
            sys.exit()
        for df_conf in ds_datafiles:
            if "upload" in df_conf:
                api = NativeApi(
                    config.BASE_URL,
                    users[df_conf["upload"]["user-handle"]]["api-token"],
                )
                metadata = read_json(df_conf["upload"]["metadata-filename"])
                df = Datafile()
                df.set(metadata)
                if "update" in df_conf["upload"]:
                    for key, val in df_conf["upload"]["update"].items():
                        kwargs = {key: val}
                        df.set(kwargs)
                df.set({"pid": pid})
                filename = df_conf["upload"]["filename"]
                resp = api.upload_datafile(pid, filename, df.json())
                if filename[-4:] == ".sav" or filename[-4:] == ".dta":
                    sleep(30)
                else:
                    sleep(3)
        if "publish-dataset" in df_conf:
            api = NativeApi(
                config.BASE_URL,
                users[df_conf["publish-dataset"]["user-handle"]]["api-token"],
            )
            if df_conf["publish-dataset"]:
                resp = api.publish_dataset(pid, release_type="major")