def test_update_dataset(self): data = get_minimal_dataset_template() status, dataset = metax.create_dataset(data) self.assertIn(status, self.OK, "could not create dataset") dataset['research_dataset']['title']['en'] = 'title updated' status, updated_data = metax.update_dataset(dataset['id'], dataset) self.assertIn(status, self.OK, "Metax update failure") urn = updated_data["identifier"] etsin_status, etsin_data = etsin.view_dataset(urn) self.assertIn(etsin_status, self.OK, "Etsin failure")
def test_delete_dataset(self): data = get_minimal_dataset_template() status, cdata = metax.create_dataset(data) self.assertIn(status, self.OK, "could not create dataset") urn = cdata["identifier"] time.sleep(2) status = metax.delete_dataset(cdata['id']) self.assertIn(status, self.OK, "Metax dataset delete failure") etsin_status, etsin_data = etsin.view_dataset(urn)
def test_create_dataset(self): # loading the example dataset data = get_minimal_dataset_template() status, cdata = metax.create_dataset(data) self.assertIn(status, self.OK, "could not create dataset") urn = cdata["identifier"] time.sleep(10) etsin_status, etsin_data = etsin.view_dataset(urn) self.assertIn(etsin_status, self.OK, "Etsin could not found the dataset")
def test_update_dataset(self): data = load_json_file('basic_dataset.json') status, dataset = metax.create_dataset(data) self.assertIn(status, self.OK, "could not create dataset") # data = load_json_file('metax_dataset.json') dataset['research_dataset']['title']['en'] = 'title updated' status, updated_data = metax.update_dataset(dataset['id'], dataset) self.assertIn(status, self.OK, "Metax update failure") urn = updated_data["identifier"] etsin_status, etsin_data = etsin.view_dataset(urn) self.assertIn(etsin_status, self.OK, "Etsin failure")
def test_create_dataset(self): # loading the example dataset data = load_json_file('basic_dataset.json') status, cdata = metax.create_dataset(data) self.assertIn(status, self.OK, "could not create dataset") urn = cdata["identifier"] time.sleep(10) etsin_status, etsin_data = etsin.view_dataset(urn) self.assertIn(etsin_status, self.OK, "Etsin could not found the dataset")
def test_delete_dataset(self): data = load_json_file('basic_dataset.json') status, cdata = metax.create_dataset(data) self.assertIn(status, self.OK, "could not create dataset") urn = cdata["identifier"] time.sleep(2) status = metax.delete_dataset(cdata['id']) self.assertIn(status, self.OK, "Metax dataset delete failure") etsin_status, etsin_data = etsin.view_dataset(urn) self.assertIn(etsin_status, self.FAIL, "Etsin found the deleted dataset")
def test_reject_dataset(self): # Create a dataset in metax and reject the dataset for preservation # loading the example dataset data = get_minimal_dataset_template() # creating a dataset status, cdata = metax.create_dataset(data) self.assertIn(status, self.OK, "Metax create dataset fails") id = cdata['id'] # rejecting the dataset status = pas.reject_dataset(id) self.assertIn(status, self.OK, "PAS dataset rejection fails")
def test_reject_dataset(self): # Create a dataset in metax and reject the dataset for preservation # loading the example dataset data = load_json_file('basic_dataset.json') # creating a dataset status, cdata = metax.create_dataset(data) self.assertIn(status, self.OK, "Metax create dataset fails") id = cdata['id'] # rejecting the dataset status = pas.reject_dataset(id) self.assertIn(status, self.OK, "PAS dataset rejection fails")
def test_delete_dataset(self): data = get_minimal_dataset_template() status, response = metax.create_dataset(data) self.assertIn(status, self.OK, "could not create dataset: " + str(response)) cdata = response.json() urn = cdata["identifier"] time.sleep(2) status, response = metax.delete_dataset(cdata['id']) self.assertIn(status, self.OK, "Metax dataset delete failure: " + str(response)) etsin_status, etsin_data = etsin.view_dataset(urn)
def test_preserve_dataset(self): # Create a dataset in metax and preserve the dataset # loading the example dataset data = get_minimal_dataset_template() # creating a dataset status, cdata = metax.create_dataset(data) self.assertIn(status, self.OK, "Metax create dataset fails") id = cdata['id'] # preserving the dataset status = pas.preserve_dataset(id) self.assertIn(status, self.OK, "PAS preserve fails")
def test_preserve_dataset(self): # Create a dataset in metax and preserve the dataset # loading the example dataset data = load_json_file('basic_dataset.json') # creating a dataset status, cdata = metax.create_dataset(data) self.assertIn(status, self.OK, "Metax create dataset fails") id = cdata['id'] # preserving the dataset status = pas.preserve_dataset(id) self.assertIn(status, self.OK, "PAS preserve fails")
def test_reset_dataset(self): # Create a dataset in metax, preserve the dataset and then reset the dataset # loading the example dataset data = get_minimal_dataset_template() # creating a dataset status, cdata = metax.create_dataset(data) self.assertIn(status, self.OK, "create dataset fails") id = cdata['id'] # preserving the dataset status = pas.preserve_dataset(id) self.assertIn(status, self.OK, "dataset preservations fails") time.sleep(5) # resetting the dataset status = pas.reset_dataset(id) self.assertIn(status, self.OK, "dataset reset fails")
def test_remove_dataset(self): # Create a dataset in metax, preserve the dataset and then remove the dataset from preservation # loading the example dataset data = load_json_file('basic_dataset.json') # creating a dataset status, cdata = metax.create_dataset(data) self.assertIn(status, self.OK, "create dataset fails") id = cdata['id'] # preserving the dataset status = pas.preserve_dataset(id) self.assertIn(status, self.OK, "dataset preservation fails") time.sleep(5) # removing the dataset status = pas.remove_dataset(id) self.assertIn(status, self.OK, "dataset removal fails")
def _create_dataset_for_preservation(self, file_storage, key, data): response = get_contract() self.assertIn(response.status_code, self.OK, "Metax get contract fails: " + str(response)) contract_id = response.json()['id'] dataset = get_minimal_dataset_template() dataset["research_dataset"]['files'] = [] dataset["research_dataset"]['directories'] = [] dataset["data_catalog"] = "urn:nbn:fi:att:data-catalog-" + file_storage dataset["editor"] = { "owner_id": "053d18ecb29e752cb7a35cd77b34f5fd", "creator_id": "053d18ecb29e752cb7a35cd77b34f5fd", "identifier": "qvain", "record_id": "100" } dataset["research_dataset"]["access_rights"]["restriction_grounds"] = [ { "identifier": ("http://uri.suomi.fi/codelist/fairdata/" "restriction_grounds/code/other"), "pref_label": { "fi": "Avoin, ei tiedossa olevia rajoituksia", "und": "Avoin, ei tiedossa olevia rajoituksia" } } ] dataset["version_notes"] = ["This version is initial version."] dataset["research_dataset"]["provenance"] = [{ "preservation_event": { "in_scheme": ("http://uri.suomi.fi/codelist/fairdata/" "preservation_event"), "identifier": ("http://uri.suomi.fi/codelist/fairdata/" "preservation_event/code/cre"), "pref_label": { "en": "Creation", "fi": "Luonti", "und": "Luonti" } }, "temporal": { "start_date": "2018-06-01T17:41:59+03:00", "end_date": "2018-06-02T17:41:59+03:00" }, "description": { "en": "Provenance description" } }] dataset["research_dataset"]["publisher"] = { "name": { "fi": "School services, ARTS", "und": "School services, ARTS" }, "@type": "Organization", "homepage": { "title": { "en": "Publisher website", "fi": "Julkaisijan kotisivu" }, "identifier": "http://www.publisher.fi/" }, "identifier": "http://uri.suomi.fi/codelist/fairdata/organization/code/10076-A800", "is_part_of": { "name": { "en": "Aalto University", "fi": "Aalto yliopisto", "sv": "Aalto universitetet", "und": "Aalto yliopisto" }, "@type": "Organization", "homepage": { "title": { "en": "Publisher parent website", "fi": "Julkaisijan yläorganisaation kotisivu" }, "identifier": "http://www.publisher_parent.fi/" }, "identifier": "http://uri.suomi.fi/codelist/fairdata/organization/code/10076" }, "contributor_type": [{ "in_scheme": "http://uri.suomi.fi/codelist/fairdata/contributor_type", "identifier": "http://uri.suomi.fi/codelist/fairdata/contributor_type/code/Distributor", "pref_label": { "en": "Distributor", "fi": "Jakelija", "sv": "Distributör", "und": "Jakelija" } }] } st = datetime.datetime.fromtimestamp(time.time()) time_stamp = str(st.replace(microsecond=0).isoformat()) + "+00:00" dataset['research_dataset']['modified'] = time_stamp dataset['research_dataset']['title']['en'] = ( "Fairdata Integration Test Dataset " + time_stamp) dataset['research_dataset']['issued'] = "1997-02-21" dataset['research_dataset'][key] = data status, response = metax.create_dataset(dataset, datasetuser=metax_user, datasetpwd=metax_pwd) self.assertIn(status, self.OK, 'Metax create dataset fails: ' + str(response.json())) dataset = response.json() self.dataset_id = dataset['id'] pas.set_dataset_contract(self.dataset_id, contract_id) status, response = metax.update_dataset(dataset['identifier'], dataset, datasetuser=metax_user, datasetpwd=metax_pwd) self.assertIn(status, self.OK, 'Adding files to dataset fails: ' + str(response)) self._update_file_metadata() print("File metadata updated in Metax")