def test_concept_label_import(self): og_tile_count = TileModel.objects.count() BusinessDataImporter("tests/fixtures/data/csv/concept_label_import.csv" ).import_business_data() new_tile_count = TileModel.objects.count() tile_difference = new_tile_count - og_tile_count self.assertEqual(tile_difference, 1)
def test_json_export(self): def deep_sort(obj): """ Recursively sort list or dict nested lists Taken from https://stackoverflow.com/questions/18464095/how-to-achieve-assertdictequal-with-assertsequenceequal-applied-to-values """ if isinstance(obj, dict): _sorted = {} for key in sorted(obj): _sorted[key] = deep_sort(obj[key]) elif isinstance(obj, list): new_list = [] for val in obj: new_list.append(deep_sort(val)) _sorted = sorted(new_list) else: _sorted = obj return _sorted BusinessDataImporter('tests/fixtures/data/json/resource_export_business_data_truth.json').import_business_data() export = BusinessDataExporter('json').export('ab74af76-fa0e-11e6-9e3e-026d961c88e6') json_export = deep_sort(json.loads(export[0]['outputfile'].getvalue())) json_truth = deep_sort(json.loads(open('tests/fixtures/data/json/resource_export_business_data_truth.json').read())) self.assertDictEqual(json_export, json_truth)
def test_required_node_import(self): og_tile_count = TileModel.objects.count() BusinessDataImporter('tests/fixtures/data/csv/required_node_import.csv' ).import_business_data() new_tile_count = TileModel.objects.count() tile_difference = new_tile_count - og_tile_count self.assertEqual(tile_difference, 0)
def test_1_1(self): og_tile_count = TileModel.objects.count() BusinessDataImporter( 'tests/fixtures/data/csv/cardinality_test_data/1-1.csv' ).import_business_data() new_tile_count = TileModel.objects.count() tile_difference = new_tile_count - og_tile_count self.assertEqual(tile_difference, 2)
def test_single_n_to_1(self): og_tile_count = TileModel.objects.count() BusinessDataImporter( "tests/fixtures/data/csv/cardinality_test_data/single-n_to_1.csv" ).import_business_data() new_tile_count = TileModel.objects.count() tile_difference = new_tile_count - og_tile_count self.assertEqual(tile_difference, 1)
def __init__(self, *args, **kwargs): super(ArchesTestCase, self).__init__(*args, **kwargs) if settings.DEFAULT_BOUNDS == None: management.call_command('migrate') with open(os.path.join('tests/fixtures/system_settings/Arches_System_Settings_Model.json'), 'rU') as f: archesfile = JSONDeserializer().deserialize(f) ResourceGraphImporter(archesfile['graph'], True) BusinessDataImporter('tests/fixtures/system_settings/Arches_System_Settings_Local.json').import_business_data() settings.update_from_db()
def test_csv_export(self): BusinessDataImporter("tests/fixtures/data/csv/resource_export_test.csv").import_business_data() export = BusinessDataExporter("csv", configs="tests/fixtures/data/csv/resource_export_test.mapping", single_file=True).export() csv_output = list(csv.DictReader(export[0]["outputfile"].getvalue().split("\r\n")))[0] csvinputfile = "tests/fixtures/data/csv/resource_export_test.csv" csv_input = list(csv.DictReader(open(csvinputfile, "rU", encoding="utf-8"), restkey="ADDITIONAL", restval="MISSING"))[0] self.assertDictEqual(dict(csv_input), dict(csv_output))
def test_csv_export(self): BusinessDataImporter('tests/fixtures/data/csv/resource_export_test.csv').import_business_data() export = BusinessDataExporter('csv', configs='tests/fixtures/data/csv/resource_export_test.mapping', single_file=True).export() csv_export = filter(lambda export: 'csv' in export['name'], export)[0]['outputfile'].getvalue().split('\r') csv_output = list(unicodecsv.DictReader(BytesIO(export[0]['outputfile'].getvalue()), encoding='utf-8-sig'))[0] csvinputfile = 'tests/fixtures/data/csv/resource_export_test.csv' csv_input = list(unicodecsv.DictReader(open(csvinputfile, 'rU'), encoding='utf-8-sig', restkey='ADDITIONAL', restval='MISSING'))[0] self.assertDictEqual(csv_input, csv_output)
def test_7b_5121_branches(self): # This loads the referenced resource, 2a615f66...001122 BusinessDataImporter("tests/fixtures/jsonld_base/data/test_5121b_reference_instances.json").import_business_data() # The third node is the resource-instance, as has_note is required in the semantic branch # So none of the three nodes are ambiguous and should all load at the same time data = """ { "@id": "http://*****:*****@type": "http://www.cidoc-crm.org/cidoc-crm/E21_Person", "http://www.cidoc-crm.org/cidoc-crm/P67i_is_referred_to_by": [{ "@type": "http://www.cidoc-crm.org/cidoc-crm/E33_Linguistic_Object", "http://www.cidoc-crm.org/cidoc-crm/P2_has_type": { "@id": "http://*****:*****@type": "http://www.cidoc-crm.org/cidoc-crm/E55_Type", "http://www.w3.org/2000/01/rdf-schema#label": "Concept 1" }, "http://www.cidoc-crm.org/cidoc-crm/P3_has_note": "Test Content" }, { "@type": "http://www.cidoc-crm.org/cidoc-crm/E33_Linguistic_Object", "http://www.cidoc-crm.org/cidoc-crm/P3_has_note": "No Concept, still unique" }, { "@id": "http://*****:*****@type": "http://www.cidoc-crm.org/cidoc-crm/E33_Linguistic_Object" }] } """ url = reverse( "resources_graphid", kwargs={"graphid": "9f716aa2-bf96-11e9-bd39-0242ac160002", "resourceid": "87654321-c000-1100-b400-0242ac160002"}, ) response = self.client.put(url, data=data, HTTP_AUTHORIZATION=f"Bearer {self.token}") print(f"Test 7b response: {response.content}") self.assertTrue(response.status_code == 201) js = response.json() if type(js) == list: js = js[0] rtb = "http://www.cidoc-crm.org/cidoc-crm/P67i_is_referred_to_by" note = "http://www.cidoc-crm.org/cidoc-crm/P3_has_note" self.assertTrue(rtb in js) self.assertTrue(len(js[rtb]) == 3) for r in js[rtb]: hasnote = note in r isres = r["@id"].startswith("http://localhost:8000/resources/") self.assertTrue((hasnote and not isres) or (isres and not hasnote)) self.assertTrue(not (hasnote and isres))
def test_5_5098_resinst_branch(self): # 2019-11-01 - Conversely this fails, as it is in a branch BusinessDataImporter("tests/fixtures/jsonld_base/data/test_2_instances.json").import_business_data() data = """ { "@id": "http://*****:*****@type": "http://www.cidoc-crm.org/cidoc-crm/E22_Man-Made_Object", "http://www.cidoc-crm.org/cidoc-crm/P67i_is_referred_to_by": { "@id": "http://*****:*****@type": "http://www.cidoc-crm.org/cidoc-crm/E33_Linguistic_Object", "http://www.cidoc-crm.org/cidoc-crm/P128i_is_carried_by": [ { "@id": "http://*****:*****@type": "http://www.cidoc-crm.org/cidoc-crm/E22_Man-Made_Object" }, { "@id": "http://*****:*****@type": "http://www.cidoc-crm.org/cidoc-crm/E22_Man-Made_Object" } ] } } """ # Load up the models and data only once with open(os.path.join("tests/fixtures/jsonld_base/models/5098_b_resinst.json"), "rU") as f: archesfile = JSONDeserializer().deserialize(f) ResourceGraphImporter(archesfile["graph"]) url = reverse( "resources_graphid", kwargs={"graphid": "40dbcffa-faa1-11e9-84de-3af9d3b32b71", "resourceid": "7fffffff-faa1-11e9-84de-3af9d3b32b71"}, ) response = self.client.put(url, data=data, HTTP_AUTHORIZATION=f"Bearer {self.token}") self.assertEqual(response.status_code, 201) js = response.json() if type(js) == list: js = js[0] print(f"Got json for test 5: {js}") self.assertTrue("@id" in js) self.assertTrue(js["@id"] == "http://localhost:8000/resources/7fffffff-faa1-11e9-84de-3af9d3b32b71") self.assertTrue("http://www.cidoc-crm.org/cidoc-crm/P67i_is_referred_to_by" in js) feats = js["http://www.cidoc-crm.org/cidoc-crm/P67i_is_referred_to_by"]["http://www.cidoc-crm.org/cidoc-crm/P128i_is_carried_by"] self.assertTrue(type(feats) == list) self.assertTrue(len(feats) == 2)
def test_4_5098_resinst(self): # Make instances for this new one to reference BusinessDataImporter("tests/fixtures/jsonld_base/data/test_2_instances.json").import_business_data() data = """ { "@id": "http://*****:*****@type": "http://www.cidoc-crm.org/cidoc-crm/E22_Man-Made_Object", "http://www.cidoc-crm.org/cidoc-crm/P130_shows_features_of": [ { "@id": "http://*****:*****@type": "http://www.cidoc-crm.org/cidoc-crm/E22_Man-Made_Object" }, { "@id": "http://*****:*****@type": "http://www.cidoc-crm.org/cidoc-crm/E22_Man-Made_Object" } ], "http://www.cidoc-crm.org/cidoc-crm/P3_has_note": "res inst list import" } """ url = reverse( "resources_graphid", kwargs={"graphid": "ee72fb1e-fa6c-11e9-b369-3af9d3b32b71", "resourceid": "abcd1234-1234-1129-b6e7-3af9d3b32b71"}, ) response = self.client.put(url, data=data, HTTP_AUTHORIZATION=f"Bearer {self.token}") print(f"Test 4: {response.content}") self.assertEqual(response.status_code, 201) js = response.json() if type(js) == list: js = js[0] # print(f"Got json for test 4: {js}") self.assertTrue("@id" in js) self.assertTrue(js["@id"] == "http://*****:*****@id"] in rids) self.assertTrue(feats[1]["@id"] in rids)
def import_business_data(self, data_source, config_file=None, overwrite=None, bulk_load=False): """ Imports business data from all formats """ if overwrite == '': print '*' * 80 print 'No overwrite option indicated. Please rerun command with \'-ow\' parameter.' print '*' * 80 sys.exit() if data_source == '': data_source = settings.BUSINESS_DATA_FILES if isinstance(data_source, basestring): data_source = [data_source] if data_source != (): for path in data_source: if os.path.isabs(path): if os.path.isfile(os.path.join(path)): BusinessDataImporter(path, config_file).import_business_data( overwrite=overwrite, bulk=bulk_load) else: print '*' * 80 print 'No file found at indicated location: {0}'.format( path) print '*' * 80 sys.exit() else: print '*' * 80 print 'ERROR: The specified file path appears to be relative. Please rerun command with an absolute file path.' print '*' * 80 sys.exit() else: print '*' * 80 print 'No BUSINESS_DATA_FILES locations specified in your settings file. Please rerun this command with BUSINESS_DATA_FILES locations specified or pass the locations in manually with the \'-s\' parameter.' print '*' * 80 sys.exit()
def test_csv_export(self): BusinessDataImporter('tests/fixtures/data/csv/resource_export_test.csv' ).import_business_data() export = BusinessDataExporter( 'csv', configs='tests/fixtures/data/csv/resource_export_test.mapping', single_file=True).export() csv_output = list( csv.DictReader( export[0]['outputfile'].getvalue().split('\r\n')))[0] csvinputfile = 'tests/fixtures/data/csv/resource_export_test.csv' csv_input = list( csv.DictReader(open(csvinputfile, 'rU', encoding="utf-8"), restkey='ADDITIONAL', restval='MISSING'))[0] self.assertDictEqual(dict(csv_input), dict(csv_output))
def setUp(self): mobile_survey = MobileSurvey() mobile_survey.name = "TEST MOBILE SURVEY" mobile_survey.description = "FOR TESTING" mobile_survey.active = True mobile_survey.createdby = User.objects.get(id=1) mobile_survey.lasteditedby = User.objects.get(id=1) mobile_survey.iconclass = "fa fa-building" mobile_survey.nodegroups = [] mobile_survey.datadownloadconfig={"download":False, "count":10, "resources":[], "custom":""} mobile_survey.id = '08960fb5-385b-11e8-add6-c4b301baab9f' mobile_survey.save() mobile_survey = MobileSurvey.objects.get(pk=mobile_survey.id) mobile_survey.save() self.mobile_survey = mobile_survey self.client = Client() with open(os.path.join('tests/fixtures/resource_graphs/Mobile Survey Test.json'), 'rU') as f: archesfile = JSONDeserializer().deserialize(f) ResourceGraphImporter(archesfile['graph']) BusinessDataImporter('tests/fixtures/data/mobile_survey_test_data.json').import_business_data()
def setUp(self): ResourceInstance.objects.all().delete() skos = SKOSReader() rdf = skos.read_file( 'tests/fixtures/data/concept_label_test_scheme.xml') ret = skos.save_concepts_from_skos(rdf) skos = SKOSReader() rdf = skos.read_file( 'tests/fixtures/data/concept_label_test_collection.xml') ret = skos.save_concepts_from_skos(rdf) with open( os.path.join( 'tests/fixtures/resource_graphs/resource_export_test.json' ), 'rU') as f: archesfile = JSONDeserializer().deserialize(f) ResourceGraphImporter(archesfile['graph']) BusinessDataImporter('tests/fixtures/data/csv/resource_export_test.csv' ).import_business_data()
def setUp(self): ResourceInstance.objects.all().delete() skos = SKOSReader() rdf = skos.read_file('tests/fixtures/data/concept_label_test_scheme.xml') ret = skos.save_concepts_from_skos(rdf) skos = SKOSReader() rdf = skos.read_file('tests/fixtures/data/concept_label_test_collection.xml') ret = skos.save_concepts_from_skos(rdf) with open(os.path.join('tests/fixtures/resource_graphs/resource_export_test.json'), 'rU') as f: archesfile = JSONDeserializer().deserialize(f) ResourceGraphImporter(archesfile['graph']) # loading RDF/JSONLD export fixtures skos = SKOSReader() rdf = skos.read_file('tests/fixtures/data/rdf_export_thesaurus.xml') ret = skos.save_concepts_from_skos(rdf) skos = SKOSReader() rdf = skos.read_file('tests/fixtures/data/rdf_export_collections.xml') ret = skos.save_concepts_from_skos(rdf) # Models for model_name in ['object_model', 'document_model']: with open(os.path.join( 'tests/fixtures/resource_graphs/rdf_export_{0}.json'.format(model_name)), 'rU') as f: archesfile = JSONDeserializer().deserialize(f) ResourceGraphImporter(archesfile['graph']) # Fixture Instance Data for tests for instance_name in ['document', 'object']: BusinessDataImporter( 'tests/fixtures/data/rdf_export_{0}.json'.format(instance_name)).import_business_data() # for RDF/JSON-LD export tests self.DT = DataTypeFactory() self.archesproject = Namespace(test_settings.ARCHES_NAMESPACE_FOR_DATA_EXPORT) self.cidoc = Namespace("http://www.cidoc-crm.org/cidoc-crm/")
def import_business_data(self, data_source, config_file=None, overwrite=None, bulk_load=False, create_concepts=False): """ Imports business data from all formats. A config file (mapping file) is required for .csv format. """ if overwrite == '': utils.print_message('No overwrite option indicated. Please rerun command with \'-ow\' parameter.') sys.exit() if data_source == '': data_source = settings.BUSINESS_DATA_FILES if isinstance(data_source, basestring): data_source = [data_source] create_collections = False if create_concepts: create_concepts = str(create_concepts).lower() if create_concepts == 'create': create_collections = True print 'Creating new collections . . .' elif create_concepts == 'append': print 'Appending to existing collections . . .' create_concepts = True if len(data_source) > 0: for source in data_source: path = utils.get_valid_path(source) if path is not None: print 'Importing {0}. . .'.format(path) BusinessDataImporter(path, config_file).import_business_data(overwrite=overwrite, bulk=bulk_load, create_concepts=create_concepts, create_collections=create_collections) else: utils.print_message('No file found at indicated location: {0}'.format(source)) sys.exit() else: utils.print_message('No BUSINESS_DATA_FILES locations specified in your settings file. Please rerun this command with BUSINESS_DATA_FILES locations specified or pass the locations in manually with the \'-s\' parameter.') sys.exit()
def setUpClass(cls): # This runs once per instantiation cls.loadOntology() cls.factory = RequestFactory() cls.client = Client() #cls.client.login(username='******', password='******') #cls.user = User.objects.get(username='******') skos = SKOSReader() rdf = skos.read_file( 'tests/fixtures/jsonld_base/rdm/jsonld_test_thesaurus.xml') ret = skos.save_concepts_from_skos(rdf) skos = SKOSReader() rdf = skos.read_file( 'tests/fixtures/jsonld_base/rdm/jsonld_test_collections.xml') ret = skos.save_concepts_from_skos(rdf) # Load up the models and data only once with open( os.path.join( 'tests/fixtures/jsonld_base/models/test_1_basic_object.json' ), 'rU') as f: archesfile = JSONDeserializer().deserialize(f) ResourceGraphImporter(archesfile['graph']) BusinessDataImporter( 'tests/fixtures/jsonld_base/data/test_1_instance.json' ).import_business_data() with open( os.path.join( 'tests/fixtures/jsonld_base/models/test_2_complex_object.json' ), 'rU') as f: archesfile2 = JSONDeserializer().deserialize(f) ResourceGraphImporter(archesfile2['graph']) BusinessDataImporter( 'tests/fixtures/jsonld_base/data/test_2_instances.json' ).import_business_data() with open( os.path.join( 'tests/fixtures/jsonld_base/models/5136_res_inst_plus_res_inst.json' ), 'rU') as f: archesfile2 = JSONDeserializer().deserialize(f) ResourceGraphImporter(archesfile2['graph']) BusinessDataImporter( 'tests/fixtures/jsonld_base/data/test_3_instances.json' ).import_business_data() with open( os.path.join( 'tests/fixtures/jsonld_base/models/nesting_test.json'), 'rU') as f: archesfile2 = JSONDeserializer().deserialize(f) ResourceGraphImporter(archesfile2['graph']) BusinessDataImporter( 'tests/fixtures/jsonld_base/data/test_nest_instances.json' ).import_business_data() with open( os.path.join( 'tests/fixtures/jsonld_base/models/4564-person.json'), 'rU') as f: archesfile2 = JSONDeserializer().deserialize(f) ResourceGraphImporter(archesfile2['graph']) with open( os.path.join( 'tests/fixtures/jsonld_base/models/4564-group.json'), 'rU') as f: archesfile2 = JSONDeserializer().deserialize(f) ResourceGraphImporter(archesfile2['graph']) with open( os.path.join( 'tests/fixtures/jsonld_base/models/4564-referenced.json'), 'rU') as f: archesfile2 = JSONDeserializer().deserialize(f) ResourceGraphImporter(archesfile2['graph']) BusinessDataImporter( 'tests/fixtures/jsonld_base/data/test_4564_group.json' ).import_business_data() BusinessDataImporter( 'tests/fixtures/jsonld_base/data/test_4564_reference.json' ).import_business_data() management.call_command('datatype', 'register', source='tests/fixtures/datatypes/color.py') management.call_command( 'datatype', 'register', source='tests/fixtures/datatypes/semantic_like.py') with open( os.path.join( 'tests/fixtures/jsonld_base/models/5299-basic.json'), 'rU') as f: archesfile2 = JSONDeserializer().deserialize(f) ResourceGraphImporter(archesfile2['graph']) BusinessDataImporter( 'tests/fixtures/jsonld_base/data/test_5299_instances.json' ).import_business_data() with open( os.path.join( 'tests/fixtures/jsonld_base/models/5299_complex.json'), 'rU') as f: archesfile2 = JSONDeserializer().deserialize(f) ResourceGraphImporter(archesfile2['graph']) BusinessDataImporter( 'tests/fixtures/jsonld_base/data/test_5299_complex.json' ).import_business_data()