def index_resources_by_type(resource_types, result_summary): """ Collects and indexes all resources """ for resource_type in resource_types: resources = archesmodels.Entities.objects.filter(entitytypeid = resource_type) print "Indexing {0} {1} resources".format(len(resources), resource_type[0]) result_summary[resource_type[0]] = {'database':len(resources), 'indexed':0} errors = [] for resource in resources: try: resource = Resource().get(resource.entityid) resource.index() except Exception as e: if e not in errors: errors.append(e) if len(errors) > 0: print errors[0], ':', len(errors) se = SearchEngineFactory().create() related_resource_records = archesmodels.RelatedResource.objects.all() for related_resource_record in related_resource_records: se.index_data(index='resource_relations', doc_type='all', body=model_to_dict(related_resource_record), idfield='resourcexid') return result_summary
def related_resources(request, resourceid): ## get allowed resource types based on permissions allowedtypes = get_allowed_types(request) is_anon = False if request.user.username == "anonymous": is_anon = True if request.method == 'GET': lang = request.GET.get('lang', settings.LANGUAGE_CODE) start = request.GET.get('start', 0) resources = get_related_resources(resourceid, lang, start=start, limit=15, allowedtypes=allowedtypes, is_anon=is_anon) return JSONResponse(resources, indent=4) if 'edit' in request.user.user_groups and request.method == 'DELETE': se = SearchEngineFactory().create() data = JSONDeserializer().deserialize(request.body) entityid1 = data.get('entityid1') entityid2 = data.get('entityid2') resourcexid = data.get('resourcexid') realtionshiptype = data.get('realtionshiptype') resource = Resource(entityid1) resource.delete_resource_relationship(entityid2, realtionshiptype) se.delete(index='resource_relations', doc_type='all', id=resourcexid) return JSONResponse({ 'success': True })
def write_resources(self, resources, resource_export_configs): json_resources = [] json_resources_for_export = [] iso_date = datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S") json_file_name = os.path.join('{0}_{1}.{2}'.format('EAMENA', iso_date, 'json')) f = StringIO() for count, resource in enumerate(resources, 1): if count % 1000 == 0: print "%s Resources exported" % count errors = [] try: a_resource = Resource().get(resource['_id']) a_resource.form_groups = None json_resources.append(a_resource) except Exception as e: if e not in errors: errors.append(e) if len(errors) > 0: print errors[0], ':', len(errors) f.write((JSONSerializer().serialize({'resources':json_resources}, indent = 4, separators=(',',':')))) json_resources_for_export.append({'name': json_file_name, 'outputfile': f}) return json_resources_for_export
def build_master_graph(self, resource, schema): master_graph = None entity_data = [] if len(entity_data) > 0: master_graph = entity_data[0] for mapping in entity_data[1:]: master_graph.merge(mapping) for group in resource.groups: entity_data2 = [] for row in group.rows: entity = Resource() entity.create_from_mapping(row.resourcetype, schema[row.attributename]['steps'], row.attributename, row.attributevalue) entity_data2.append(entity) mapping_graph = entity_data2[0] for mapping in entity_data2[1:]: mapping_graph.merge(mapping) if master_graph == None: master_graph = mapping_graph else: node_type_to_merge_at = schema[row.attributename]['mergenodeid'] master_graph.merge_at(mapping_graph, node_type_to_merge_at) return master_graph
def createBacklogIds(): entitytype = archesmodels.EntityTypes.objects.get(pk = "ACTOR.E39") type = 'ACTOR' all_entities = archesmodels.Entities.objects.filter(entitytypeid__exact = entitytype) entities =[] errors = [] for count, entity in enumerate(all_entities, 1): if count % 5000 == 0: print "%s resources inspected" % count try: relation = archesmodels.Relations.objects.get(ruleid=archesmodels.Rules.objects.get(entitytypedomain=entitytype, entitytyperange="EAMENA_ID.E42").ruleid, entityiddomain =entity.entityid) except ObjectDoesNotExist: entities.append(entity) print "There are %s resources and %s which do not have a EAMENA_ID.E42" % (all_entities.count(), len(entities)) for count, entity in enumerate(entities, 1): if count % 1000 == 0: print "%s UniqueIds created" % count entity2 = archesmodels.Entities() entity2.entitytypeid = archesmodels.EntityTypes.objects.get(pk = "EAMENA_ID.E42") entity2.entityid = str(uuid.uuid4()) entity2.save() rule = archesmodels.Rules.objects.get(entitytypedomain = entity.entitytypeid, entitytyperange = entity2.entitytypeid, propertyid = 'P1') archesmodels.Relations.objects.get_or_create(entityiddomain = entity, entityidrange = entity2, ruleid = rule) uniqueidmodel = Entity._get_model('uniqueids') uniqueidmodelinstance = uniqueidmodel() uniqueidmodelinstance.entityid = entity2 uniqueidmodelinstance.id_type = type try: lastID = uniqueidmodel.objects.filter(id_type__exact=type).latest() IdInt = int(lastID.val) + 1 uniqueidmodelinstance.val = str(IdInt) except ObjectDoesNotExist: print "The resource %s has been assigned the first ID with entityid %s" % (entity.entityid,entity2.entityid) uniqueidmodelinstance.val = str(1) uniqueidmodelinstance.order_date = datetime.datetime.now() uniqueidmodelinstance.save() zerosLength = settings.ID_LENGTH if settings.ID_LENGTH > len(uniqueidmodelinstance.val) else len(uniqueidmodelinstance.val) value = type +"-"+uniqueidmodelinstance.val.zfill(zerosLength) # ReindexResource(entity.entityid, entity2.entityid, value) try: resource = Resource().get(entity.entityid) resource.index() except Exception as e: if e not in errors: errors.append(e) if len(errors) > 0: print errors[0], ':', len(errors)
def test_set_entity_value(self): python_object = { "entityid":"", "entitytypeid":"PERSON.E1", "value":"", "property":"P1", "child_entities":[] } entity1 = Resource(python_object) entity1.save() self.assertNotEqual(python_object['entityid'], entity1.entityid) entity2 = Resource().get(entity1.entityid) entity2.set_entity_value('ADDRESS.E1', '5703') entity2.save() entity3 = Resource().get(entity2.entityid) self.assertEqual(int(entity3.child_entities[0].child_entities[0].child_entities[0].value), 5703) # def get_db_stats(): # return { # 'entities': models.Entities.objects.count(), # 'relations': models.Relations.objects.count(), # 'strings': models.Strings.objects.count(), # 'numbers': models.Numbers.objects.count(), # 'domains': models.Domains.objects.count() # }
def get(self, request, resourceid=None): lang = request.GET.get('lang', settings.LANGUAGE_CODE) start = request.GET.get('start', 0) ret = [] try: resource = Resource.objects.get(pk=resourceid) except ObjectDoesNotExist: resource = Resource() page = 1 if request.GET.get('page') == '' else int(request.GET.get('page', 1)) related_resources = resource.get_related_resources(lang=lang, start=start, limit=1000, page=page) if related_resources is not None: ret = self.paginate_related_resources(related_resources, page, request) return JSONResponse(ret)
def related_resources(request, resourceid): if request.method == 'GET': lang = request.GET.get('lang', settings.LANGUAGE_CODE) start = request.GET.get('start', 0) return JSONResponse(get_related_resources(resourceid, lang, start=start, limit=15), indent=4) if 'edit' in request.user.user_groups and request.method == 'DELETE': se = SearchEngineFactory().create() data = JSONDeserializer().deserialize(request.body) entityid1 = data.get('entityid1') entityid2 = data.get('entityid2') resourcexid = data.get('resourcexid') realtionshiptype = data.get('realtionshiptype') resource = Resource(entityid1) resource.delete_resource_relationship(entityid2, realtionshiptype) se.delete(index='resource_relations', doc_type='all', id=resourcexid) return JSONResponse({ 'success': True })
def delete_resources_from_csv(data_source): """Reads a list of Resource IDs from a csv file and deletes them""" with open(data_source, 'rb') as csvfile: try: dialect = csv.Sniffer().sniff(csvfile.read(1024)) csvfile.seek(0) except csv.Error: print "The source data is not a CSV file" resource_list = csv.reader(csvfile, delimiter = ',') print "There are",sum(1 for line in open(data_source))," resources that will be deleted" for r_id in resource_list: try: uuid.UUID(r_id[0]) try: resource = Resource(r_id[0]) resource.delete_index() note = '{0} Deleted'.format(r_id[0]) resource.delete_all_resource_relationships() resource.delete(note=note) except ObjectDoesNotExist: print 'Entity ',r_id[0],' does not exist. Nothing to delete' except(ValueError): print r_id[0], "is not a valid UUID" break
def resource_list_to_entities(self, resource_list): '''Takes a collection of imported resource records and saves them as arches entities''' start = time() d = datetime.datetime.now() load_id = 'LOADID:{0}-{1}-{2}-{3}-{4}-{5}'.format(d.year, d.month, d.day, d.hour, d.minute, d.microsecond) #Should we append the timestamp to the exported filename? ret = {'successfully_saved':0, 'failed_to_save':[]} schema = None current_entitiy_type = None legacyid_to_entityid = {} errors = [] progress_interval = 25 for count, resource in enumerate(resource_list): if count % 2 == 0 and count % progress_interval != 0: print "." if count >= progress_interval and count % progress_interval == 0: print count, 'of', len(resource_list), 'loaded' print " flushing memory" call("sync && echo 3 | sudo tee /proc/sys/vm/drop_caches") masterGraph = None entityData = [] if current_entitiy_type != resource.entitytypeid: schema = Resource.get_mapping_schema(resource.entitytypeid) master_graph = self.build_master_graph(resource, schema) self.pre_save(master_graph) master_graph.save(user=self.user, note=load_id) master_graph.index() resource.entityid = master_graph.entityid legacyid_to_entityid[resource.resource_id] = master_graph.entityid ret['successfully_saved'] += 1 # self.resources.append({ # '_index': 'entity', # '_type': master_graph.entitytypeid, # '_id': master_graph.entityid, # '_source': master_graph.prepare_documents_for_search_index()[0] # }) ret['legacyid_to_entityid'] = legacyid_to_entityid elapsed = (time() - start) print len(resource_list), 'resources loaded' if len(resource_list) > 0: print 'total time to etl = %s' % (elapsed) print 'average time per entity = %s' % (elapsed/len(resource_list)) print 'Load Identifier =', load_id print '***You can reverse this load with the following command:' print 'python manage.py packages -o remove_resources --load_id', load_id return ret
def save_resource(self, populated_tiles, resourceinstanceid, legacyid, resources, target_resource_model, bulk, save_count): # create a resource instance only if there are populated_tiles errors = [] if len(populated_tiles) > 0: newresourceinstance = Resource( resourceinstanceid=resourceinstanceid, graph_id=target_resource_model, legacyid=legacyid, createdtime=datetime.datetime.now() ) # add the tiles to the resource instance newresourceinstance.tiles = populated_tiles # if bulk saving then append the resources to a list otherwise just save the resource if bulk: resources.append(newresourceinstance) if len(resources) >= settings.BULK_IMPORT_BATCH_SIZE: Resource.bulk_save(resources=resources) del resources[:] #clear out the array else: try: newresourceinstance.save() except TransportError as e: cause = json.dumps(e.info['error']['caused_by'],indent=1) msg = '%s: WARNING: failed to index document in resource: %s. Exception detail:\n%s\n' % (datetime.datetime.now(), resourceinstanceid, cause) errors.append({'type': 'WARNING', 'message': msg}) newresourceinstance.delete() save_count=save_count-1 except Exception as e: msg = '%s: WARNING: failed to index document in resource: %s. Exception detail:\n%s\n' % (datetime.datetime.now(), resourceinstanceid, e) errors.append({'type': 'WARNING', 'message': msg}) newresourceinstance.delete() save_count=save_count-1 else: errors.append({'type': 'WARNING', 'message': 'No resource created for legacyid: {0}. Make sure there is data to be imported for this resource and it is mapped properly in your mapping file.'.format(legacyid)}) if len(errors) > 0: self.errors += errors if save_count % (settings.BULK_IMPORT_BATCH_SIZE/4) == 0: print '%s resources processed' % str(save_count)
def write_resources(self, dest_dir): cursor = connection.cursor() cursor.execute("""select entitytypeid from data.entity_types where isresource = TRUE""") resource_types = cursor.fetchall() json_resources = [] with open(dest_dir, 'w') as f: for resource_type in resource_types: resources = archesmodels.Entities.objects.filter(entitytypeid = resource_type) print "Writing {0} {1} resources".format(len(resources), resource_type[0]) errors = [] for resource in resources: try: a_resource = Resource().get(resource.entityid) a_resource.form_groups = None json_resources.append(a_resource) except Exception as e: if e not in errors: errors.append(e) if len(errors) > 0: print errors[0], ':', len(errors) f.write((JSONSerializer().serialize({'resources':json_resources}, separators=(',',':'))))
def save_resource(self, populated_tiles, resourceinstanceid, legacyid, resources, target_resource_model, bulk, save_count): # create a resource instance only if there are populated_tiles errors = [] if len(populated_tiles) > 0: newresourceinstance = Resource( resourceinstanceid=resourceinstanceid, graph_id=target_resource_model, legacyid=legacyid, createdtime=datetime.datetime.now() ) # add the tiles to the resource instance newresourceinstance.tiles = populated_tiles # if bulk saving then append the resources to a list otherwise just save the resource if bulk: resources.append(newresourceinstance) if len(resources) == settings.BULK_IMPORT_BATCH_SIZE: Resource.bulk_save(resources=resources) del resources[:] #clear out the array else: newresourceinstance.save() else: errors.append({'type': 'WARNING', 'message': 'No resource created for legacyid: {0}. Make sure there is data to be imported for this resource and it is mapped properly in your mapping file.'.format(legacyid)}) if len(errors) > 0: self.errors += errors if save_count % (settings.BULK_IMPORT_BATCH_SIZE/4) == 0: print '%s resources processed' % str(save_count)
def resource_list_to_entities(self, resource_list, archesjson=False): '''Takes a collection of imported resource records and saves them as arches entities''' start = time() d = datetime.datetime.now() load_id = 'LOADID:{0}-{1}-{2}-{3}-{4}-{5}'.format(d.year, d.month, d.day, d.hour, d.minute, d.microsecond) #Should we append the timestamp to the exported filename? ret = {'successfully_saved':0, 'failed_to_save':[]} schema = None current_entitiy_type = None legacyid_to_entityid = {} errors = [] progress_interval = 250 for count, resource in enumerate(resource_list): if count >= progress_interval and count % progress_interval == 0: print count, 'of', len(resource_list), 'loaded' if archesjson == False: masterGraph = None if current_entitiy_type != resource.entitytypeid: schema = Resource.get_mapping_schema(resource.entitytypeid) master_graph = self.build_master_graph(resource, schema) self.pre_save(master_graph) try: uuid.UUID(resource.resource_id) entityid = resource.resource_id except(ValueError): entityid = '' master_graph.save(user=self.user, note=load_id, resource_uuid=entityid) master_graph.index() resource.entityid = master_graph.entityid legacyid_to_entityid[resource.resource_id] = master_graph.entityid else: new_resource = Resource(resource) new_resource.save(user=self.user, note=load_id, resource_uuid=new_resource.entityid) try: new_resource.index() except: print 'Could not index resource. This may be because the valueid of a concept is not in the database.' legacyid_to_entityid[new_resource.entityid] = new_resource.entityid ret['successfully_saved'] += 1 ret['legacyid_to_entityid'] = legacyid_to_entityid elapsed = (time() - start) print len(resource_list), 'resources loaded' if len(resource_list) > 0: print 'total time to etl = %s' % (elapsed) print 'average time per entity = %s' % (elapsed/len(resource_list)) print 'Load Identifier =', load_id print '***You can reverse this load with the following command:' print 'python manage.py packages -o remove_resources --load_id', load_id return ret
def UnloadRelations(source): """ Simple utility to unload relations AZ 17/1/17 """ with open(source, 'rb') as csvfile: reader = csv.DictReader(csvfile, delimiter= ',') se = SearchEngineFactory().create() for row in reader: entity = Resource() entity.entityid = row['RESOURCEID_FROM'] related_oldindex = get_related_resources(row['RESOURCEID_FROM']) if related_oldindex: for releted_res in related_oldindex['resource_relationships']: if str(releted_res['entityid2']) == str(row['RESOURCEID_TO']): se.delete(index='resource_relations', doc_type='all', id=releted_res['resourcexid']) try: relationship = RelatedResource.objects.get(entityid1=entity.entityid, entityid2=row['RESOURCEID_TO'],relationshiptype=row['RELATION_TYPE']) entity.delete_resource_relationship(row['RESOURCEID_TO'], row['RELATION_TYPE']) except: print "Issues deleting DB instance of relation with entity1 %s and entity2 %s . Most likely, the instance has already been deleted" % (row['RESOURCEID_FROM'], row['RESOURCEID_TO']) pass
def delete_resources(load_id): """Takes the load id stored in the note column of the edit log and deletes each resource with that id""" resources_for_removal = archesmodels.EditLog.objects.filter( Q(note=load_id) ) resourceids = set([editlog.resourceid for editlog in resources_for_removal]) for r_id in resourceids: try: resource = Resource(r_id) resource.delete_index() note = '{0} Deleted'.format(load_id) resource.delete_all_resource_relationships() resource.delete(note=note) except ObjectDoesNotExist: print 'Entity does not exist. Nothing to delete'
def test_set_entity_value(self): python_object = { "entityid":"", "entitytypeid":"CAR.E1", "value":"", "property":"P1", "child_entities":[] } entity1 = Resource(python_object) entity1.save() self.assertNotEqual(python_object['entityid'], entity1.entityid) entity2 = Resource().get(entity1.entityid) entity2.set_entity_value('HORSEPOWER.E1', '300') entity2.save() entity3 = Resource().get(entity2.entityid) self.assertEqual(int(entity3.child_entities[0].child_entities[0].child_entities[0].value), 300)
def test_post_save_data_integrity(self): python_object = { "entityid":"", "entitytypeid":"CAR.E1", "value":"", "property":"P1", "child_entities":[{ "entityid":"", "entitytypeid":"PROPULSION_SYSTEM.E1", "value":"", "property":"P1", "child_entities":[{ "entityid":"", "entitytypeid":"ENGINE.E1", "value":"", "property":"P1", "child_entities":[{ "entityid":"", "entitytypeid":"HORSEPOWER.E1", "value":"300", "property":"P1", "child_entities":[] }] }] }] } entity = Resource(python_object) entity.save() self.assertNotEqual(python_object['entityid'], entity.entityid) entity = Resource().get(entity.entityid) self.assertEqual(int(entity.child_entities[0].child_entities[0].child_entities[0].value), 300) entity.child_entities[0].child_entities[0].entityid = '' entity.child_entities[0].child_entities[0].child_entities[0].entityid = '' entity.save() #entity = Resource().get(entity.entityid) #print JSONSerializer().serialize(entity) #self.assertEqual(int(entity.child_entities[0].child_entities[0].child_entities[0].value), 300) # test for database integrity self.assertEqual(models.Entities.objects.count(), 4) self.assertEqual(models.Relations.objects.count(), 3)
def test_post_save_data_integrity2(self): python_object = { "entityid":"", "entitytypeid":"PERSON.E1", "value":"", "property":"P1", "child_entities":[{ "entityid":"", "entitytypeid":"LOCATION.E1", "value":"", "property":"P1", "child_entities":[{ "entityid":"", "entitytypeid":"PERIOD.E1", "value":"", "property":"P1", "child_entities":[{ "entityid":"", "entitytypeid":"ADDRESS.E1", "value":"859", "property":"P1", "child_entities":[] }] }] }] } entity = Resource(python_object) entity.save() self.assertNotEqual(python_object['entityid'], entity.entityid) entity = Resource().get(entity.entityid) self.assertEqual(int(entity.child_entities[0].child_entities[0].child_entities[0].value), 859) entity.child_entities[0].child_entities = [] count_of_entities_before_save = models.Entities.objects.count() entity.save() count_of_entities_after_save = models.Entities.objects.count() # test for database integrity self.assertEqual(count_of_entities_before_save - count_of_entities_after_save, 3) self.assertEqual(models.Relations.objects.count(), 0)
def test_set_entity_value(self): python_object = { "entityid": "", "entitytypeid": "PERSON.E1", "value": "", "property": "P1", "child_entities": [] } entity1 = Resource(python_object) entity1.save() self.assertNotEqual(python_object['entityid'], entity1.entityid) entity2 = Resource().get(entity1.entityid) entity2.set_entity_value('ADDRESS.E1', '5703') entity2.save() entity3 = Resource().get(entity2.entityid) self.assertEqual( int(entity3.child_entities[0].child_entities[0].child_entities[0]. value), 5703) # def get_db_stats(): # return { # 'entities': models.Entities.objects.count(), # 'relations': models.Relations.objects.count(), # 'strings': models.Strings.objects.count(), # 'numbers': models.Numbers.objects.count(), # 'domains': models.Domains.objects.count() # }
def post(self, request, resourceid=None): lang = request.GET.get('lang', settings.LANGUAGE_CODE) se = SearchEngineFactory().create() res = dict(request.POST) relationship_type = res['relationship_properties[relationship_type]'][ 0] datefrom = res['relationship_properties[datefrom]'][0] dateto = res['relationship_properties[dateto]'][0] dateto = None if dateto == '' else dateto datefrom = None if datefrom == '' else datefrom notes = res['relationship_properties[notes]'][0] root_resourceinstanceid = res['root_resourceinstanceid'] instances_to_relate = [] relationships_to_update = [] if 'instances_to_relate[]' in res: instances_to_relate = res['instances_to_relate[]'] if 'relationship_ids[]' in res: relationships_to_update = res['relationship_ids[]'] def get_relatable_resources(graphid): """ Takes the graphid of a resource, finds the graphs root node, and returns the relatable graphids """ nodes = models.Node.objects.filter(graph_id=graphid) top_node = [node for node in nodes if node.istopnode == True][0] relatable_resources = [ str(node.graph_id) for node in top_node.get_relatable_resources() ] return relatable_resources def confirm_relationship_permitted(to_id, from_id): resource_instance_to = models.ResourceInstance.objects.filter( resourceinstanceid=to_id)[0] resource_instance_from = models.ResourceInstance.objects.filter( resourceinstanceid=from_id)[0] relatable_to = get_relatable_resources( resource_instance_to.graph_id) relatable_from = get_relatable_resources( resource_instance_from.graph_id) relatable_to_is_valid = str( resource_instance_to.graph_id) in relatable_from relatable_from_is_valid = str( resource_instance_from.graph_id) in relatable_to return (relatable_to_is_valid == True and relatable_from_is_valid == True) for instanceid in instances_to_relate: permitted = confirm_relationship_permitted( instanceid, root_resourceinstanceid[0]) if permitted == True: rr = models.ResourceXResource( resourceinstanceidfrom=Resource( root_resourceinstanceid[0]), resourceinstanceidto=Resource(instanceid), notes=notes, relationshiptype=relationship_type, datestarted=datefrom, dateended=dateto) rr.save() else: print 'relationship not permitted' for relationshipid in relationships_to_update: rr = models.ResourceXResource.objects.get(pk=relationshipid) rr.notes = notes rr.relationshiptype = relationship_type rr.datestarted = datefrom rr.dateended = dateto rr.save() start = request.GET.get('start', 0) se.es.indices.refresh(index=se._add_prefix("resource_relations")) resource = Resource.objects.get(pk=root_resourceinstanceid[0]) page = 1 if request.GET.get('page') == '' else int( request.GET.get('page', 1)) related_resources = resource.get_related_resources(lang=lang, start=start, limit=1000, page=page) ret = [] if related_resources is not None: ret = self.paginate_related_resources(related_resources, page, request) return JSONResponse(ret, indent=4)
def import_business_data(self, business_data=None, mapping=None, overwrite='append', bulk=False): # errors = businessDataValidator(self.business_data) def process_resourceid(resourceid, overwrite): # Test if resourceid is a UUID. try: resourceinstanceid = uuid.UUID(resourceid) # If resourceid is a UUID check if it is already an arches resource. try: ret = Resource.objects.filter( resourceinstanceid=resourceid) # If resourceid is an arches resource and overwrite is true, delete the existing arches resource. if overwrite == 'overwrite': Resource.objects.get( pk=str(ret[0].resourceinstanceid)).delete() resourceinstanceid = resourceinstanceid # If resourceid is not a UUID create one. except: resourceinstanceid = resourceinstanceid except: # Get resources with the given legacyid ret = Resource.objects.filter(legacyid=resourceid) # If more than one resource is returned than make resource = None. This should never actually happen. if len(ret) > 1: resourceinstanceid = None # If no resource is returned with the given legacyid then create an archesid for the resource. elif len(ret) == 0: resourceinstanceid = uuid.uuid4() # If a resource is returned with the give legacyid then return its archesid else: if overwrite == 'overwrite': Resource.objects.get( pk=str(ret[0].resourceinstanceid)).delete() resourceinstanceid = ret[0].resourceinstanceid return resourceinstanceid try: with transaction.atomic(): save_count = 0 try: resourceinstanceid = process_resourceid( business_data[0]['ResourceID'], overwrite) except KeyError: print '*' * 80 print 'ERROR: No column \'ResourceID\' found in business data file. Please add a \'ResourceID\' column with a unique resource identifier.' print '*' * 80 sys.exit() blanktilecache = {} populated_nodegroups = {} populated_nodegroups[resourceinstanceid] = [] previous_row_resourceid = None populated_tiles = [] target_resource_model = None single_cardinality_nodegroups = [ str(nodegroupid) for nodegroupid in NodeGroup.objects.values_list( 'nodegroupid', flat=True).filter(cardinality='1') ] node_datatypes = { str(nodeid): datatype for nodeid, datatype in Node.objects.values_list( 'nodeid', 'datatype').filter(~Q(datatype='semantic'), graph__isresource=True) } all_nodes = Node.objects.all() datatype_factory = DataTypeFactory() concept_lookup = ConceptLookup() new_concepts = {} required_nodes = {} for node in Node.objects.filter(isrequired=True).values_list( 'nodeid', 'name'): required_nodes[str(node[0])] = node[1] # This code can probably be moved into it's own module. resourceids = [] non_contiguous_resource_ids = [] previous_row_for_validation = None for row_number, row in enumerate(business_data): # Check contiguousness of csv file. if row['ResourceID'] != previous_row_for_validation and row[ 'ResourceID'] in resourceids: non_contiguous_resource_ids.append(row['ResourceID']) else: resourceids.append(row['ResourceID']) previous_row_for_validation = row['ResourceID'] if len(non_contiguous_resource_ids) > 0: print '*' * 80 for non_contiguous_resource_id in non_contiguous_resource_ids: print 'ResourceID: ' + non_contiguous_resource_id print 'ERROR: The preceding ResourceIDs are non-contiguous in your csv file. Please sort your csv file by ResourceID and try import again.' print '*' * 80 sys.exit() def cache(blank_tile): if blank_tile.data != {}: for key in blank_tile.data.keys(): if key not in blanktilecache: blanktilecache[str(key)] = blank_tile else: for nodegroup, tile in blank_tile.tiles.iteritems(): for key in tile[0].data.keys(): if key not in blanktilecache: blanktilecache[str(key)] = blank_tile def column_names_to_targetids(row, mapping, row_number): errors = [] new_row = [] if 'ADDITIONAL' in row or 'MISSING' in row: errors.append({ 'type': 'WARNING', 'message': 'No resource created for ResourceID {0}. Line {1} has additional or missing columns.' .format(row['ResourceID'], str(int(row_number.split('on line ')[1]))) }) if len(errors) > 0: self.errors += errors for key, value in row.iteritems(): if value != '': for row in mapping['nodes']: if key.upper() == row['file_field_name'].upper( ): new_row.append( {row['arches_nodeid']: value}) return new_row def transform_value(datatype, value, source, nodeid): ''' Transforms values from probably string/wkt representation to specified datatype in arches. This code could probably move to somehwere where it can be accessed by other importers. ''' request = '' if datatype != '': errors = [] datatype_instance = datatype_factory.get_instance( datatype) if datatype in [ 'concept', 'domain-value', 'concept-list', 'domain-value-list' ]: try: uuid.UUID(value) except: if datatype in [ 'domain-value', 'domain-value-list' ]: collection_id = nodeid else: collection_id = Node.objects.get( nodeid=nodeid).config['rdmCollection'] if collection_id != None: value = concept_lookup.lookup_labelid_from_label( value, collection_id) try: value = datatype_instance.transform_import_values( value, nodeid) errors = datatype_instance.validate(value, source) except Exception as e: errors.append({ 'type': 'ERROR', 'message': 'datatype: {0} value: {1} {2} - {3}'.format( datatype_instance.datatype_model.classname, value, source, e) }) if len(errors) > 0: value = None self.errors += errors else: print _('No datatype detected for {0}'.format(value)) return {'value': value, 'request': request} def get_blank_tile(source_data): if len(source_data) > 0: if source_data[0] != {}: key = str(source_data[0].keys()[0]) if key not in blanktilecache: blank_tile = Tile.get_blank_tile(key) cache(blank_tile) else: blank_tile = blanktilecache[key] else: blank_tile = None else: blank_tile = None # return deepcopy(blank_tile) return cPickle.loads(cPickle.dumps(blank_tile, -1)) def check_required_nodes(tile, required_nodes, all_nodes): # Check that each required node in a tile is populated. errors = [] if len(required_nodes) > 0: if target_tile.data != {}: for target_k, target_v in target_tile.data.iteritems( ): if target_k in required_nodes.keys( ) and target_v is None: populated_tiles.pop( populated_tiles.index(target_tile)) errors.append({ 'type': 'WARNING', 'message': 'The {0} node is required and must be populated in order to populate the {1} nodes. This data was not imported.' .format( required_nodes[target_k], ', '.join( all_nodes. filter(nodegroup_id=str( target_tile.nodegroup_id )).values_list('name', flat=True))) }) elif target_tile.tiles != None: for tile in tiles: check_required_nodes(tile) if len(errors) > 0: self.errors += errors resources = [] for row_number, row in enumerate(business_data): row_number = 'on line ' + unicode( row_number + 2 ) #to represent the row in a csv accounting for the header and 0 index if row['ResourceID'] != previous_row_resourceid and previous_row_resourceid is not None: save_count = save_count + 1 self.save_resource(populated_tiles, resourceinstanceid, legacyid, resources, target_resource_model, bulk, save_count) # reset values for next resource instance populated_tiles = [] resourceinstanceid = process_resourceid( row['ResourceID'], overwrite) populated_nodegroups[resourceinstanceid] = [] source_data = column_names_to_targetids( row, mapping, row_number) if len(source_data) > 0: if source_data[0].keys(): try: target_resource_model = all_nodes.get( nodeid=source_data[0].keys()[0]).graph_id except: print '*' * 80 print 'ERROR: No resource model found. Please make sure the resource model this business data is mapped to has been imported into Arches.' print '*' * 80 sys.exit() target_tile = get_blank_tile(source_data) def populate_tile(source_data, target_tile): ''' source_data = [{nodeid:value},{nodeid:value},{nodeid:value} . . .] All nodes in source_data belong to the same resource. A dictionary of nodeids would not allow for multiple values for the same nodeid. Grouping is enforced by having all grouped attributes in the same row. ''' need_new_tile = False # Set target tileid to None because this will be a new tile, a new tileid will be created on save. target_tile.tileid = uuid.uuid4() target_tile.resourceinstance_id = resourceinstanceid # Check the cardinality of the tile and check if it has been populated. # If cardinality is one and the tile is populated the tile should not be populated again. if str(target_tile.nodegroup_id ) in single_cardinality_nodegroups: target_tile_cardinality = '1' else: target_tile_cardinality = 'n' if str( target_tile.nodegroup_id ) not in populated_nodegroups[resourceinstanceid]: # Check if we are populating a parent tile by inspecting the target_tile.data array. if target_tile.data != {}: # Iterate through the target_tile nodes and begin populating by iterating througth source_data array. # The idea is to populate as much of the target_tile as possible, before moving on to the next target_tile. for target_key in target_tile.data.keys(): for source_tile in source_data: for source_key in source_tile.keys( ): # Check for source and target key match. if source_key == target_key: if target_tile.data[ source_key] == None: # If match populate target_tile node with transformed value. value = transform_value( node_datatypes[ source_key], source_tile[ source_key], row_number, source_key) target_tile.data[ source_key] = value[ 'value'] # target_tile.request = value['request'] # Delete key from source_tile so we do not populate another tile based on the same data. del source_tile[ source_key] # Cleanup source_data array to remove source_tiles that are now '{}' from the code above. source_data[:] = [ item for item in source_data if item != {} ] # Check if we are populating a child tile(s) by inspecting the target_tiles.tiles array. elif target_tile.tiles != None: populated_child_nodegroups = [] for nodegroupid, childtile in target_tile.tiles.iteritems( ): prototype_tile = childtile.pop() if str( prototype_tile.nodegroup_id ) in single_cardinality_nodegroups: child_tile_cardinality = '1' else: child_tile_cardinality = 'n' def populate_child_tiles(source_data): prototype_tile_copy = cPickle.loads( cPickle.dumps( prototype_tile, -1)) prototype_tile_copy.tileid = uuid.uuid4( ) prototype_tile_copy.parenttile = target_tile prototype_tile_copy.resourceinstance_id = resourceinstanceid if str( prototype_tile_copy. nodegroup_id ) not in populated_child_nodegroups: for target_key in prototype_tile_copy.data.keys( ): for source_column in source_data: for source_key in source_column.keys( ): if source_key == target_key: if prototype_tile_copy.data[ source_key] == None: value = transform_value( node_datatypes[ source_key], source_column[ source_key], row_number, source_key ) prototype_tile_copy.data[ source_key] = value[ 'value'] # target_tile.request = value['request'] del source_column[ source_key] else: populate_child_tiles( source_data ) if prototype_tile_copy.data != {}: if len([ item for item in prototype_tile_copy. data.values() if item != None ]) > 0: if str( prototype_tile_copy .nodegroup_id ) not in populated_child_nodegroups: childtile.append( prototype_tile_copy ) if prototype_tile_copy != None: if child_tile_cardinality == '1': populated_child_nodegroups.append( str(prototype_tile_copy .nodegroup_id)) source_data[:] = [ item for item in source_data if item != {} ] populate_child_tiles(source_data) if not target_tile.is_blank(): populated_tiles.append(target_tile) if len(source_data) > 0: need_new_tile = True if target_tile_cardinality == '1': populated_nodegroups[ resourceinstanceid].append( str(target_tile.nodegroup_id)) if need_new_tile: new_tile = get_blank_tile(source_data) if new_tile != None: populate_tile(source_data, new_tile) # mock_request_object = HttpRequest() if target_tile != None and len(source_data) > 0: populate_tile(source_data, target_tile) # Check that required nodes are populated. If not remove tile from populated_tiles array. check_required_nodes(target_tile, required_nodes, all_nodes) previous_row_resourceid = row['ResourceID'] legacyid = row['ResourceID'] if 'legacyid' in locals(): self.save_resource(populated_tiles, resourceinstanceid, legacyid, resources, target_resource_model, bulk, save_count) if bulk: Resource.bulk_save(resources=resources) print _('%s total resource saved' % (save_count + 1)) except Exception as e: exc_type, exc_value, exc_traceback = sys.exc_info() formatted = traceback.format_exception(exc_type, exc_value, exc_traceback) if len(formatted): for message in formatted: print message finally: pass
def resource_manager(request, resourcetypeid='', form_id='default', resourceid=''): if resourceid != '': resource = Resource(resourceid) elif resourcetypeid != '': resource = Resource({'entitytypeid': resourcetypeid}) if form_id == 'default': form_id = resource.form_groups[0]['forms'][0]['id'] form = resource.get_form(form_id) if request.method == 'DELETE': resource.delete_index() se = SearchEngineFactory().create() realtionships = resource.get_related_resources(return_entities=False) for realtionship in realtionships: se.delete(index='resource_relations', doc_type='all', id=realtionship.resourcexid) realtionship.delete() resource.delete() return JSONResponse({ 'success': True }) if request.method == 'POST': data = JSONDeserializer().deserialize(request.POST.get('formdata', {})) form.update(data, request.FILES) with transaction.atomic(): if resourceid != '': resource.delete_index() resource.save(user=request.user) resource.index() resourceid = resource.entityid return redirect('resource_manager', resourcetypeid=resourcetypeid, form_id=form_id, resourceid=resourceid) min_max_dates = models.Dates.objects.aggregate(Min('val'), Max('val')) if request.method == 'GET': if form != None: lang = request.GET.get('lang', settings.LANGUAGE_CODE) form.load(lang) return render(request, 'resource-manager.htm', { 'form': form, 'formdata': JSONSerializer().serialize(form.data), 'form_template': 'views/forms/' + form_id + '.htm', 'form_id': form_id, 'resourcetypeid': resourcetypeid, 'resourceid': resourceid, 'main_script': 'resource-manager', 'active_page': 'ResourceManger', 'resource': resource, 'resource_name': resource.get_primary_name(), 'resource_type_name': resource.get_type_name(), 'form_groups': resource.form_groups, 'min_date': min_max_dates['val__min'].year if min_max_dates['val__min'] != None else 0, 'max_date': min_max_dates['val__max'].year if min_max_dates['val__min'] != None else 1, 'timefilterdata': JSONSerializer().serialize(Concept.get_time_filter_data()), }) else: return HttpResponseNotFound('<h1>Arches form not found.</h1>')
def test_delete_of_entity(self): val = models.Values.objects.get(value='Legal') python_object = { "entityid": "", "entitytypeid": "PERSON.E1", "value": "", "property": "P1", "child_entities": [{ "entityid": "", "entitytypeid": "NAME.E1", "value": "Alexei", "property": "P1", "child_entities": [{ "entityid": "", "entitytypeid": "NAME_TYPE.E1", "value": val.pk, "property": "P1", "child_entities": [] }] }, { "entityid": "", "entitytypeid": "LOCATION.E1", "value": "", "property": "P1", "child_entities": [{ "entityid": "", "entitytypeid": "PERIOD.E1", "value": "", "property": "P1", "child_entities": [{ "entityid": "", "entitytypeid": "ADDRESS.E1", "value": "859", "property": "P1", "child_entities": [] }] }] }] } entity = Resource(python_object) entity.save() count_of_entities_before_delete = models.Entities.objects.count() count_of_relations_before_delete = models.Relations.objects.count() count_of_strings_before_delete = models.Strings.objects.count() count_of_numbers_before_delete = models.Numbers.objects.count() count_of_domains_before_delete = models.Domains.objects.count() entity.delete() count_of_entities_after_delete = models.Entities.objects.count() count_of_relations_after_delete = models.Relations.objects.count() count_of_strings_after_delete = models.Strings.objects.count() count_of_numbers_after_delete = models.Numbers.objects.count() count_of_domains_after_delete = models.Domains.objects.count() with self.assertRaises(models.Entities.DoesNotExist): Resource().get(entity.entityid) self.assertEqual( count_of_entities_before_delete - count_of_entities_after_delete, 6) self.assertEqual( count_of_relations_before_delete - count_of_relations_after_delete, 5) self.assertEqual( count_of_strings_before_delete - count_of_strings_after_delete, 1) self.assertEqual( count_of_numbers_before_delete - count_of_numbers_after_delete, 1) self.assertEqual( count_of_domains_before_delete - count_of_domains_after_delete, 1)
def resource_manager(request, resourcetypeid='', form_id='default', resourceid=''): ## get and check all permissions here permissions = request.user.get_all_permissions() res_perms = {k:[] for k in settings.RESOURCE_TYPE_CONFIGS().keys()} for k,v in res_perms.iteritems(): for p in permissions: t,res = p.split(".")[:2] if k.startswith(res): v.append(t) if resourceid == '' and not 'CREATE' in res_perms[resourcetypeid]: return redirect(settings.LOGIN_URL) if not 'EDIT' in res_perms[resourcetypeid]: return redirect(settings.LOGIN_URL) ## finish permission testing if resourceid != '': resource = Resource(resourceid) elif resourcetypeid != '': resource = Resource({'entitytypeid': resourcetypeid}) if form_id == 'default': form_id = resource.form_groups[0]['forms'][0]['id'] form = resource.get_form(form_id) if request.method == 'DELETE': resource.delete_index() se = SearchEngineFactory().create() relationships = resource.get_related_resources(return_entities=False) for relationship in relationships: se.delete(index='resource_relations', doc_type='all', id=relationship.resourcexid) relationship.delete() resource.delete() return JSONResponse({ 'success': True }) if request.method == 'POST': data = JSONDeserializer().deserialize(request.POST.get('formdata', {})) form.update(data, request.FILES) with transaction.atomic(): if resourceid != '': resource.delete_index() resource.save(user=request.user) resource.index() resourceid = resource.entityid return redirect('resource_manager', resourcetypeid=resourcetypeid, form_id=form_id, resourceid=resourceid) min_max_dates = models.Dates.objects.aggregate(Min('val'), Max('val')) if request.method == 'GET': if form != None: lang = request.GET.get('lang', settings.LANGUAGE_CODE) form.load(lang) return render_to_response('resource-manager.htm', { 'form': form, 'formdata': JSONSerializer().serialize(form.data), 'form_template': 'views/forms/' + form_id + '.htm', 'form_id': form_id, 'resourcetypeid': resourcetypeid, 'resourceid': resourceid, 'main_script': 'resource-manager', 'active_page': 'ResourceManger', 'resource': resource, 'resource_name': resource.get_primary_name(), 'resource_type_name': resource.get_type_name(), 'form_groups': resource.form_groups, 'min_date': min_max_dates['val__min'].year if min_max_dates['val__min'] != None else 0, 'max_date': min_max_dates['val__max'].year if min_max_dates['val__min'] != None else 1, 'timefilterdata': JSONSerializer().serialize(Concept.get_time_filter_data()), }, context_instance=RequestContext(request)) else: return HttpResponseNotFound('<h1>Arches form not found.</h1>')
def post(self, request): if self.action == 'update_tile': json = request.POST.get('data', None) if json != None: data = JSONDeserializer().deserialize(json) try: models.ResourceInstance.objects.get(pk=data['resourceinstance_id']) except ObjectDoesNotExist: resource = Resource() resource.resourceinstanceid = data['resourceinstance_id'] graphid = models.Node.objects.filter(nodegroup=data['nodegroup_id'])[0].graph_id resource.graph_id = graphid resource.save(user=request.user) resource.index() tile_id = data['tileid'] if tile_id != None and tile_id != '': old_tile = Tile.objects.get(pk=tile_id) clean_resource_cache(old_tile) tile = Tile(data) if tile.filter_by_perm(request.user, 'write_nodegroup'): with transaction.atomic(): try: tile.save(request=request) if tile_id == '4345f530-aa90-48cf-b4b3-92d1185ca439': import couchdb import json as json_json couch = couchdb.Server(settings.COUCHDB_URL) for project in models.MobileSurveyModel.objects.all(): db = couch['project_' + str(project.id)] #tile = models.TileModel.objects.get(pk='4345f530-aa90-48cf-b4b3-92d1185ca439') tile_json = json_json.loads(JSONSerializer().serialize(tile)) tile_json['_id'] = tile_json['tileid'] for row in db.view('_all_docs', include_docs=True): if 'tileid' in row.doc and tile_json['_id'] == row.doc['_id']: tile_json['_rev'] = row.doc['_rev'] db.save(tile_json) except ValidationError as e: return JSONResponse({'status':'false','message':e.args}, status=500) tile.after_update_all() clean_resource_cache(tile) update_system_settings_cache(tile) return JSONResponse(tile) else: return JSONResponse({'status':'false','message': [_('Request Failed'), _('Permission Denied')]}, status=500) if self.action == 'reorder_tiles': json = request.body if json != None: data = JSONDeserializer().deserialize(json) if 'tiles' in data and len(data['tiles']) > 0: sortorder = 0 with transaction.atomic(): for tile in data['tiles']: t = Tile(tile) if t.filter_by_perm(request.user, 'write_nodegroup'): t.sortorder = sortorder t.save(update_fields=['sortorder'], request=request) sortorder = sortorder + 1 return JSONResponse(data) if self.action == 'delete_provisional_tile': data = request.POST if 'tileid' in data: provisionaledits = self.delete_provisional_edit(data, request) return JSONResponse(provisionaledits) else: payload = data.get('payload', None) if payload is not None: edits = jsonparser.loads(payload) for edit in edits['edits']: provisionaledits = self.delete_provisional_edit(edit, request) return JSONResponse({'result':'success'}) return HttpResponseNotFound()
def get(self, request, graphid=None, resourceid=None, view_template='views/resource/editor.htm', main_script='views/resource/editor', nav_menu=True): if graphid is not None: resource_instance = Resource() resource_instance.graph_id = graphid resource_instance.save(**{'request':request}) resource_instance.index() return redirect('resource_editor', resourceid=resource_instance.pk) if resourceid is not None: resource_instance = models.ResourceInstance.objects.get(pk=resourceid) resource_graphs = Graph.objects.exclude(pk=settings.SYSTEM_SETTINGS_RESOURCE_MODEL_ID).exclude(isresource=False).exclude(isactive=False) graph = Graph.objects.get(graphid=resource_instance.graph.pk) relationship_type_values = get_resource_relationship_types() form = Form(resource_instance.pk) datatypes = models.DDataType.objects.all() widgets = models.Widget.objects.all() map_layers = models.MapLayer.objects.all() map_sources = models.MapSource.objects.all() geocoding_providers = models.Geocoder.objects.all() forms = resource_instance.graph.form_set.filter(visible=True) forms_x_cards = models.FormXCard.objects.filter(form__in=forms) forms_w_cards = [] for form_x_card in forms_x_cards: cm = models.CardModel.objects.get(pk=form_x_card.card_id) if request.user.has_perm('read_nodegroup', cm.nodegroup): forms_w_cards.append(form_x_card.form) displayname = Resource.objects.get(pk=resourceid).displayname if displayname == 'undefined': displayname = 'Unnamed Resource' date_nodes = models.Node.objects.filter(datatype='date', graph__isresource=True, graph__isactive=True) searchable_datatypes = [d.pk for d in models.DDataType.objects.filter(issearchable=True)] searchable_nodes = models.Node.objects.filter(graph__isresource=True, graph__isactive=True, datatype__in=searchable_datatypes, issearchable=True) resource_cards = models.CardModel.objects.filter(graph__isresource=True, graph__isactive=True) context = self.get_context_data( main_script=main_script, resource_type=resource_instance.graph.name, relationship_types=relationship_type_values, iconclass=resource_instance.graph.iconclass, form=JSONSerializer().serialize(form), forms=JSONSerializer().serialize(forms_w_cards), datatypes_json=JSONSerializer().serialize(datatypes), widgets=widgets, date_nodes=date_nodes, map_layers=map_layers, map_sources=map_sources, geocoding_providers = geocoding_providers, widgets_json=JSONSerializer().serialize(widgets), resourceid=resourceid, resource_graphs=resource_graphs, graph_json=JSONSerializer().serialize(graph), displayname=displayname, resource_cards=JSONSerializer().serialize(resource_cards), searchable_nodes=JSONSerializer().serialize(searchable_nodes), saved_searches=JSONSerializer().serialize(settings.SAVED_SEARCHES), ) if graph.iconclass: context['nav']['icon'] = graph.iconclass context['nav']['title'] = graph.name context['nav']['menu'] = nav_menu if resourceid == settings.RESOURCE_INSTANCE_ID: context['nav']['help'] = (_('Managing System Settings'),'help/system-settings-help.htm') else: context['nav']['help'] = (_('Using the Resource Editor'),'help/resource-editor-help.htm') return render(request, view_template, context) return HttpResponseNotFound()
def setUpClass(cls): se = SearchEngineFactory().create() se.delete_index(index='strings') se.delete_index(index='resource') cls.client = Client() cls.client.login(username='******', password='******') models.ResourceInstance.objects.all().delete() with open(os.path.join('tests/fixtures/resource_graphs/Search Test Model.json'), 'rU') as f: archesfile = JSONDeserializer().deserialize(f) ResourceGraphImporter(archesfile['graph']) cls.search_model_graphid = 'e503a445-fa5f-11e6-afa8-14109fd34195' cls.search_model_cultural_period_nodeid = '7a182580-fa60-11e6-96d1-14109fd34195' cls.search_model_creation_date_nodeid = '1c1d05f5-fa60-11e6-887f-14109fd34195' cls.search_model_name_nodeid = '2fe14de3-fa61-11e6-897b-14109fd34195' # Add a concept that defines a min and max date concept = { "id": "00000000-0000-0000-0000-000000000001", "legacyoid": "ARCHES", "nodetype": "ConceptScheme", "values": [], "subconcepts": [ { "values": [ { "value": "ANP TEST", "language": "en-US", "category": "label", "type": "prefLabel", "id": "", "conceptid": "" }, { "value": "1950", "language": "en-US", "category": "note", "type": "min_year", "id": "", "conceptid": "" }, { "value": "1980", "language": "en-US", "category": "note", "type": "max_year", "id": "", "conceptid": "" } ], "relationshiptype": "hasTopConcept", "nodetype": "Concept", "id": "", "legacyoid": "", "subconcepts": [], "parentconcepts": [], "relatedconcepts": [] } ] } post_data = JSONSerializer().serialize(concept) content_type = 'application/x-www-form-urlencoded' response = cls.client.post(reverse('concept', kwargs={'conceptid':'00000000-0000-0000-0000-000000000001'}), post_data, content_type) response_json = json.loads(response.content) valueid = response_json['subconcepts'][0]['values'][0]['id'] # add resource instance with only a cultural period defined cls.cultural_period_resource = Resource(graph_id=cls.search_model_graphid) tile = Tile(data={cls.search_model_cultural_period_nodeid: [valueid]},nodegroup_id=cls.search_model_cultural_period_nodeid) cls.cultural_period_resource.tiles.append(tile) cls.cultural_period_resource.save() # add resource instance with only a creation date defined cls.date_resource = Resource(graph_id=cls.search_model_graphid) tile = Tile(data={cls.search_model_creation_date_nodeid: '1941-01-01'},nodegroup_id=cls.search_model_creation_date_nodeid) cls.date_resource.tiles.append(tile) tile = Tile(data={cls.search_model_name_nodeid: 'testing 123'},nodegroup_id=cls.search_model_name_nodeid) cls.date_resource.tiles.append(tile) cls.date_resource.save() # add resource instance with with no dates or periods defined cls.name_resource = Resource(graph_id=cls.search_model_graphid) tile = Tile(data={cls.search_model_name_nodeid: 'some test name'},nodegroup_id=cls.search_model_name_nodeid) cls.name_resource.tiles.append(tile) cls.name_resource.save() # add delay to allow for indexes to be updated time.sleep(1)
def resource_manager(request, resourcetypeid='', form_id='default', resourceid=''): if resourceid != '': resource = Resource(resourceid) elif resourcetypeid != '': resource = Resource({'entitytypeid': resourcetypeid}) if form_id == 'default': form_id = resource.form_groups[0]['forms'][0]['id'] form = resource.get_form(form_id) if request.method == 'DELETE': resource.delete_index() se = SearchEngineFactory().create() realtionships = resource.get_related_resources(return_entities=False) for realtionship in realtionships: se.delete(index='resource_relations', doc_type='all', id=realtionship.resourcexid) realtionship.delete() resource.delete() return JSONResponse({'success': True}) if request.method == 'POST': data = JSONDeserializer().deserialize(request.POST.get('formdata', {})) form.set_user(request.user) form.update(data, request.FILES) with transaction.atomic(): if resourceid != '': resource.delete_index() resource.save(user=request.user) resource.index() resourceid = resource.entityid if request.is_ajax(): return JSONResponse({ "url": reverse('resource_manager', kwargs={ 'resourcetypeid': resourcetypeid, 'form_id': form_id, 'resourceid': resourceid }) }) return redirect('resource_manager', resourcetypeid=resourcetypeid, form_id=form_id, resourceid=resourceid) min_max_dates = models.Dates.objects.aggregate(Min('val'), Max('val')) if request.method == 'GET': if form != None: ## geom will be a geojson FeatureCollection or 'null' geom = JSONSerializer().serialize(resource.get_geom()) lang = request.GET.get('lang', request.LANGUAGE_CODE) form.load(lang) return render_to_response('resource-manager.htm', { 'form': form, 'formdata': JSONSerializer().serialize(form.data), 'form_template': 'views/forms/' + form_id + '.htm', 'form_id': form_id, 'resourcetypeid': resourcetypeid, 'resourceid': resourceid, 'main_script': 'resource-manager', 'active_page': 'ResourceManger', 'resource': resource, 'resource_name': resource.get_primary_name(), 'resource_type_name': resource.get_type_name(), 'form_groups': resource.form_groups, 'min_date': min_max_dates['val__min'].year if min_max_dates['val__min'] != None else 0, 'max_date': min_max_dates['val__max'].year if min_max_dates['val__min'] != None else 1, 'timefilterdata': JSONSerializer().serialize(Concept.get_time_filter_data()), 'resource_icon': settings.RESOURCE_TYPE_CONFIGS()[resourcetypeid]['icon_class'], 'resource_geom': geom, 'child_resource': 'HERITAGE_FEATURE.E24' if resourcetypeid == 'HERITAGE_RESOURCE_GROUP.E27' else 'HERITAGE_COMPONENT.B2' }, context_instance=RequestContext(request)) else: return HttpResponseNotFound('<h1>Arches form not found.</h1>')
def create_indexes(): Resource().prepare_resource_relations_index(create=True) Resource().prepare_search_index('PERSON.E1', create=True)
def import_business_data(self, business_data, mapping=None, overwrite="append", prevent_indexing=False): reporter = ResourceImportReporter(business_data) try: if mapping is None or mapping == "": self.import_business_data_without_mapping( business_data, reporter, overwrite=overwrite, prevent_indexing=prevent_indexing) else: blanktilecache = {} target_nodegroup_cardinalities = {} for nodegroup in JSONSerializer().serializeToPython( NodeGroup.objects.all()): target_nodegroup_cardinalities[ nodegroup["nodegroupid"]] = nodegroup["cardinality"] for resource in business_data["resources"]: reporter.update_tiles(len(resource["tiles"])) parenttileids = [] populated_tiles = [] resourceinstanceid = uuid.uuid4() populated_nodegroups = [] target_resource_model = mapping["resource_model_id"] for tile in resource["tiles"]: if tile["data"] != {}: def get_tiles(tile): if tile["parenttile_id"] is not None: if tile["parenttile_id"] not in parenttileids: parenttileids.append( tile["parenttile_id"]) ret = [] for sibling_tile in resource["tiles"]: if sibling_tile[ "parenttile_id"] == tile[ "parenttile_id"]: ret.append(sibling_tile) else: ret = None else: ret = [tile] # deletes nodes that don't have values if ret is not None: for tile in ret: for key, value in tile["data"].items(): if value == "": del tile["data"][key] return ret tiles = get_tiles(tile) if tiles is not None: mapped_tiles = self.replace_source_nodeid( tiles, mapping) blank_tile = self.get_blank_tile( tiles, blanktilecache, tiles, resourceinstanceid) def populate_tile(sourcetilegroup, target_tile): need_new_tile = False target_tile_cardinality = target_nodegroup_cardinalities[ str(target_tile.nodegroup_id)] if str(target_tile.nodegroup_id ) not in populated_nodegroups: if target_tile.data != {}: for source_tile in sourcetilegroup: for tiledata in source_tile[ "data"]: for nodeid in list( tiledata.keys()): if nodeid in target_tile.data: if target_tile.data[ nodeid] is None: target_tile.data[ nodeid] = tiledata[ nodeid] for key in list( tiledata .keys( )): if key == nodeid: del tiledata[ nodeid] for tiledata in source_tile[ "data"]: if tiledata == {}: source_tile[ "data"].remove( tiledata) elif target_tile.tiles is not None: populated_child_tiles = [] populated_child_nodegroups = [] for childtile in target_tile.tiles: childtile_empty = True child_tile_cardinality = target_nodegroup_cardinalities[ str(childtile.nodegroup_id )] if str( childtile.nodegroup_id ) not in populated_child_nodegroups: prototype_tile = childtile prototype_tile.tileid = None for source_tile in sourcetilegroup: if prototype_tile.nodegroup_id not in populated_child_nodegroups: prototype_tile_copy = deepcopy( prototype_tile) for data in source_tile[ "data"]: for nodeid in list( data. keys( )): if nodeid in list( prototype_tile . data . keys( )): if prototype_tile.data[ nodeid] is None: prototype_tile_copy.data[ nodeid] = data[ nodeid] for key in list( data . keys( ) ): if key == nodeid: del data[ nodeid] if child_tile_cardinality == "1": populated_child_nodegroups.append( prototype_tile . nodegroup_id ) for data in source_tile[ "data"]: if data == {}: source_tile[ "data"].remove( data ) for key in list( prototype_tile_copy .data.keys( )): if prototype_tile_copy.data[ key] is not None: childtile_empty = False if prototype_tile_copy.data == {} or childtile_empty: prototype_tile_copy = None if prototype_tile_copy is not None: populated_child_tiles.append( prototype_tile_copy ) else: break target_tile.tiles = populated_child_tiles if target_tile.data: if target_tile.data == {} and target_tile.tiles == {}: target_tile = None populated_tiles.append(target_tile) for source_tile in sourcetilegroup: if source_tile["data"]: for data in source_tile[ "data"]: if len(data) > 0: need_new_tile = True if need_new_tile: if self.get_blank_tile( sourcetilegroup, blanktilecache, tiles, resourceinstanceid ) is not None: populate_tile( sourcetilegroup, self.get_blank_tile( sourcetilegroup, blanktilecache, tiles, resourceinstanceid), ) if target_tile_cardinality == "1": populated_nodegroups.append( str(target_tile.nodegroup_id)) else: target_tile = None if blank_tile is not None: populate_tile(mapped_tiles, blank_tile) newresourceinstance = Resource( resourceinstanceid=resourceinstanceid, graph_id=target_resource_model, legacyid=None, createdtime=datetime.datetime.now(), ) newresourceinstance.tiles = populated_tiles newresourceinstance.save(index=(not prevent_indexing)) reporter.update_resources_saved() except (KeyError, TypeError) as e: print(e) finally: reporter.report_results()
def post(self, request): if self.action == 'update_tile': json = request.POST.get('data', None) accepted_provisional = request.POST.get('accepted_provisional', None) if accepted_provisional != None: accepted_provisional_edit = JSONDeserializer().deserialize( accepted_provisional) if json != None: data = JSONDeserializer().deserialize(json) if data['resourceinstance_id'] == '': data['resourceinstance_id'] = uuid.uuid4() try: models.ResourceInstance.objects.get( pk=data['resourceinstance_id']) except ObjectDoesNotExist: resource = Resource() graphid = models.Node.objects.filter( nodegroup=data['nodegroup_id'])[0].graph_id resource.graph_id = graphid resource.save(user=request.user) data['resourceinstance_id'] = resource.pk resource.index() tile_id = data['tileid'] if tile_id != None and tile_id != '': try: old_tile = Tile.objects.get(pk=tile_id) clean_resource_cache(old_tile) except ObjectDoesNotExist: return JSONResponse( { 'status': 'false', 'message': [ _('This tile is no longer available'), _('It was likely deleted by another user') ] }, status=500) tile = Tile(data) if tile.filter_by_perm(request.user, 'write_nodegroup'): with transaction.atomic(): try: if accepted_provisional == None: try: tile.save(request=request) except TileValidationError as e: resource_tiles = models.TileModel.objects.filter( resourceinstance=tile.resourceinstance) if resource_tiles.count() == 0: Resource.objects.get( pk=tile.resourceinstance_id ).delete(request.user, 'test') return JSONResponse( { 'status': 'false', 'message': [ e.message, _('Unable to Save. Please verify your input is valid' ) ] }, status=500) except Exception as e: message = "Unable to save. A {0} has occurred. Arguments: {1!r}".format( type(e).__name__, e.args) return JSONResponse( { 'status': 'false', 'message': [ message, _('Please contact your system administrator' ) ] }, status=500) else: if accepted_provisional is not None: provisional_editor = User.objects.get( pk=accepted_provisional_edit["user"]) tile.save( provisional_edit_log_details={ "user": request.user, "action": "accept edit", "edit": accepted_provisional_edit, "provisional_editor": provisional_editor }) if tile.provisionaledits is not None and str( request.user.id) in tile.provisionaledits: tile.data = tile.provisionaledits[str( request.user.id)]['value'] except ValidationError as e: return JSONResponse( { 'status': 'false', 'message': e.args }, status=500) except Exception as e: exception_title = 'Saving tile failed' exception_message = str(e) if hasattr(e, 'message') and e.message: exception_message += "({0})".format(e.message) logger.error(exception_title + ''' [Tile id: {tile_id}] \ [Exception message: {message}] \ [Exception trace: {trace}]'''.format( tile_id=tile_id, message=exception_message, trace=traceback.format_exc())) return JSONResponse( { 'status': 'false', 'message': [ _(exception_title), _(str(exception_message)) ] }, status=500) tile.after_update_all() clean_resource_cache(tile) update_system_settings_cache(tile) return JSONResponse(tile) else: return JSONResponse( { 'status': 'false', 'message': [_('Request Failed'), _('Permission Denied')] }, status=500) if self.action == 'reorder_tiles': json = request.body if json != None: data = JSONDeserializer().deserialize(json) if 'tiles' in data and len(data['tiles']) > 0: sortorder = 0 with transaction.atomic(): for tile in data['tiles']: t = Tile(tile) if t.filter_by_perm(request.user, 'write_nodegroup'): t.sortorder = sortorder t.save(update_fields=['sortorder'], request=request) sortorder = sortorder + 1 return JSONResponse(data) if self.action == 'delete_provisional_tile': user = request.POST.get('user', None) tileid = request.POST.get('tileid', None) users = request.POST.get('users', None) tile = Tile.objects.get(tileid=tileid) is_provisional = tile.is_provisional() if tileid is not None and user is not None: provisionaledits = self.delete_provisional_edit( tile, user, reviewer=request.user) elif tileid is not None and users is not None: users = jsonparser.loads(users) for user in users: self.delete_provisional_edit(tile, user, reviewer=request.user) if is_provisional == True: return JSONResponse({'result': 'delete'}) else: return JSONResponse({'result': 'success'}) return HttpResponseNotFound()
def test_delete_of_entity(self): val = models.Values.objects.get(value='Legal') python_object = { "entityid":"", "entitytypeid":"PERSON.E1", "value":"", "property":"P1", "child_entities":[{ "entityid":"", "entitytypeid":"NAME.E1", "value":"Alexei", "property":"P1", "child_entities":[{ "entityid":"", "entitytypeid":"NAME_TYPE.E1", "value":val.pk, "property":"P1", "child_entities":[] }] },{ "entityid":"", "entitytypeid":"LOCATION.E1", "value":"", "property":"P1", "child_entities":[{ "entityid":"", "entitytypeid":"PERIOD.E1", "value":"", "property":"P1", "child_entities":[{ "entityid":"", "entitytypeid":"ADDRESS.E1", "value":"859", "property":"P1", "child_entities":[] }] }] }] } entity = Resource(python_object) entity.save() count_of_entities_before_delete = models.Entities.objects.count() count_of_relations_before_delete = models.Relations.objects.count() count_of_strings_before_delete = models.Strings.objects.count() count_of_numbers_before_delete = models.Numbers.objects.count() count_of_domains_before_delete = models.Domains.objects.count() entity.delete() count_of_entities_after_delete = models.Entities.objects.count() count_of_relations_after_delete = models.Relations.objects.count() count_of_strings_after_delete = models.Strings.objects.count() count_of_numbers_after_delete = models.Numbers.objects.count() count_of_domains_after_delete = models.Domains.objects.count() with self.assertRaises(models.Entities.DoesNotExist): Resource().get(entity.entityid) self.assertEqual(count_of_entities_before_delete - count_of_entities_after_delete, 6) self.assertEqual(count_of_relations_before_delete - count_of_relations_after_delete, 5) self.assertEqual(count_of_strings_before_delete - count_of_strings_after_delete, 1) self.assertEqual(count_of_numbers_before_delete - count_of_numbers_after_delete, 1) self.assertEqual(count_of_domains_before_delete - count_of_domains_after_delete, 1)
def upload_attachments(request): """ We'll enter this view for each file within the uploaded folder. So for each one we need to find which resource it belongs to (if any) and add that entry. We're pulling a dictionary of old and new resource ids out from the load_resources process and using that to update and edit the file entities. """ response_data = { 'foldervalid': True, } if request.method == 'POST': resdict = json.loads(request.POST['resdict']) f = request._files['attachments[]'] filename, ext = os.path.splitext(os.path.basename(str(f))) if ext == '.xlsx': return HttpResponse(json.dumps({}), content_type="application/json") archesfile = request.POST['archesfile'] archesfilepath = os.path.join(settings.BULK_UPLOAD_DIR, archesfile) with open(archesfilepath, 'r') as ins: for l in ins: if 'FILE_PATH' in l: data = l.split('|') if data[3] == f._name.replace(" ", "_"): if data[0] not in resdict: response_data['foldervalid'] = False resid = resdict[data[0]] res = Resource(resid) res.set_entity_value('FILE_PATH.E62', f) thumb = generate_thumbnail(f) if thumb != None: res.set_entity_value('THUMBNAIL.E62', thumb) res.save() ## reset the file names as the paths may have been modified ## by django during the above save process res.set_entity_value('FILE_PATH.E62', str(f).replace(" ", "_")) res.set_entity_value('THUMBNAIL.E62', str(thumb).replace(" ", "_")) res.save() return HttpResponse(json.dumps(response_data), content_type="application/json")
def read_resource(self, data, use_ids=False, resourceid=None, graphid=None): if graphid is None and self.graphtree is None: raise Exception("No graphid supplied to read_resource") elif self.graphtree is None: self.graphtree = self.process_graph(graphid) # Ensure we've reset from any previous call self.errors = {} self.idcache = {} self.resources = [] self.resource = None self.use_ids = use_ids if not isinstance(data, list): data = [data] # Force use_ids if there is more than one record being passed in if len(data) > 1: self.use_ids = True # Maybe calculate sort order for this node's tiles try: self.shouldSortTiles = settings.JSON_LD_SORT except: self.shouldSortTiles = False for jsonld_document in data: jsonld_document = expand(jsonld_document)[0] # Possibly bail very early if jsonld_document["@type"][0] != self.graphtree["class"]: raise ValueError( "Instance does not have same top level class as model") if self.use_ids: resourceinstanceid = self.get_resource_id( jsonld_document["@id"]) if resourceinstanceid is None: self.logger.error( "The @id of the resource was not supplied, was null or URI was not correctly formatted" ) raise Exception( "The @id of the resource was not supplied, was null or URI was not correctly formatted" ) self.logger.debug( "Using resource instance ID found: {0}".format( resourceinstanceid)) else: self.logger.debug( "`use_ids` setting is set to False, ignoring @id from the data if any" ) self.resource = Resource() if resourceid is not None: self.resource.pk = resourceid self.resource.graph_id = graphid self.resources.append(self.resource) ### --- Process Instance --- # now walk the instance and align to the tree if "@id" in jsonld_document: result = {"data": [jsonld_document["@id"]]} else: result = {"data": [None]} self.root_json_document = jsonld_document self.data_walk(jsonld_document, self.graphtree, result)
def setUpClass(cls): se = SearchEngineFactory().create() se.delete_index(index='terms,concepts') se.delete_index(index='resources') cls.client = Client() cls.client.login(username='******', password='******') models.ResourceInstance.objects.all().delete() with open( os.path.join( 'tests/fixtures/resource_graphs/Resource Test Model.json'), 'rU') as f: archesfile = JSONDeserializer().deserialize(f) resource_graph_importer(archesfile['graph']) cls.search_model_graphid = 'e503a445-fa5f-11e6-afa8-14109fd34195' cls.search_model_cultural_period_nodeid = '7a182580-fa60-11e6-96d1-14109fd34195' cls.search_model_creation_date_nodeid = '1c1d05f5-fa60-11e6-887f-14109fd34195' cls.search_model_destruction_date_nodeid = 'e771b8a1-65fe-11e7-9163-14109fd34195' cls.search_model_name_nodeid = '2fe14de3-fa61-11e6-897b-14109fd34195' cls.search_model_sensitive_info_nodeid = '57446fae-65ff-11e7-b63a-14109fd34195' cls.search_model_geom_nodeid = '3ebc6785-fa61-11e6-8c85-14109fd34195' cls.user = User.objects.create_user('test', '*****@*****.**', 'test') cls.user.save() cls.user.groups.add(Group.objects.get(name='Guest')) nodegroup = models.NodeGroup.objects.get( pk=cls.search_model_destruction_date_nodeid) assign_perm('no_access_to_nodegroup', cls.user, nodegroup) # Add a concept that defines a min and max date concept = { "id": "00000000-0000-0000-0000-000000000001", "legacyoid": "ARCHES", "nodetype": "ConceptScheme", "values": [], "subconcepts": [{ "values": [{ "value": "Mock concept", "language": "en-US", "category": "label", "type": "prefLabel", "id": "", "conceptid": "" }, { "value": "1950", "language": "en-US", "category": "note", "type": "min_year", "id": "", "conceptid": "" }, { "value": "1980", "language": "en-US", "category": "note", "type": "max_year", "id": "", "conceptid": "" }], "relationshiptype": "hasTopConcept", "nodetype": "Concept", "id": "", "legacyoid": "", "subconcepts": [], "parentconcepts": [], "relatedconcepts": [] }] } post_data = JSONSerializer().serialize(concept) content_type = 'application/x-www-form-urlencoded' response = cls.client.post( reverse( 'concept', kwargs={'conceptid': '00000000-0000-0000-0000-000000000001'}), post_data, content_type) response_json = json.loads(response.content) valueid = response_json['subconcepts'][0]['values'][0]['id'] cls.conceptid = response_json['subconcepts'][0]['id'] # Add resource with Name, Cultural Period, Creation Date and Geometry cls.test_resource = Resource(graph_id=cls.search_model_graphid) # Add Name tile = Tile(data={cls.search_model_name_nodeid: 'Test Name 1'}, nodegroup_id=cls.search_model_name_nodeid) cls.test_resource.tiles.append(tile) # Add Cultural Period tile = Tile(data={cls.search_model_cultural_period_nodeid: [valueid]}, nodegroup_id=cls.search_model_cultural_period_nodeid) cls.test_resource.tiles.append(tile) # Add Creation Date tile = Tile(data={cls.search_model_creation_date_nodeid: '1941-01-01'}, nodegroup_id=cls.search_model_creation_date_nodeid) cls.test_resource.tiles.append(tile) # Add Gometry cls.geom = { "type": "FeatureCollection", "features": [{ "geometry": { "type": "Point", "coordinates": [0, 0] }, "type": "Feature", "properties": {} }] } tile = Tile(data={cls.search_model_geom_nodeid: cls.geom}, nodegroup_id=cls.search_model_geom_nodeid) cls.test_resource.tiles.append(tile) cls.test_resource.save() # add delay to allow for indexes to be updated time.sleep(1)
def make_instance_public(self, resourceinstanceid, graphid=None): resource = Resource(resourceinstanceid) resource.graph_id = graphid if graphid else str(models.ResourceInstance.objects.get(pk=resourceinstanceid).graph_id) resource.remove_resource_instance_permissions() return self.get_instance_permissions(resource)
def import_business_data_without_mapping(self, business_data, reporter, overwrite="append", prevent_indexing=False): errors = [] for resource in business_data["resources"]: if resource["resourceinstance"] is not None: if GraphModel.objects.filter(graphid=str( resource["resourceinstance"]["graph_id"])).count() > 0: resourceinstanceid = uuid.UUID( str(resource["resourceinstance"] ["resourceinstanceid"])) defaults = { "graph_id": uuid.UUID(str( resource["resourceinstance"]["graph_id"])), "legacyid": resource["resourceinstance"]["legacyid"], } new_values = { "resourceinstanceid": resourceinstanceid, "createdtime": datetime.datetime.now() } new_values.update(defaults) if overwrite == "overwrite": resourceinstance = Resource(**new_values) else: try: resourceinstance = Resource.objects.get( resourceinstanceid=resourceinstanceid) for key, value in defaults.items(): setattr(resourceinstance, key, value) except Resource.DoesNotExist: resourceinstance = Resource(**new_values) if resource["tiles"] != []: reporter.update_tiles(len(resource["tiles"])) def update_or_create_tile(src_tile): tile = None src_tile["parenttile_id"] = uuid.UUID( str(src_tile["parenttile_id"]) ) if src_tile["parenttile_id"] else None defaults = { "resourceinstance": resourceinstance, "parenttile_id": str(src_tile["parenttile_id"]) if src_tile["parenttile_id"] else None, "nodegroup_id": str(src_tile["nodegroup_id"]) if src_tile["nodegroup_id"] else None, "data": src_tile["data"], } new_values = { "tileid": uuid.UUID(str(src_tile["tileid"])) } new_values.update(defaults) if overwrite == "overwrite": tile = Tile(**new_values) else: try: tile = Tile.objects.get(tileid=uuid.UUID( str(src_tile["tileid"]))) for key, value in defaults.items(): setattr(tile, key, value) except Tile.DoesNotExist: tile = Tile(**new_values) if tile is not None: resourceinstance.tiles.append(tile) reporter.update_tiles_saved() for child in src_tile["tiles"]: update_or_create_tile(child) for tile in resource["tiles"]: tile["tiles"] = [ child for child in resource["tiles"] if child["parenttile_id"] == tile["tileid"] ] for tile in [ k for k in resource["tiles"] if k["parenttile_id"] is None ]: update_or_create_tile(tile) resourceinstance.save(index=(not prevent_indexing)) reporter.update_resources_saved()
def import_business_data(self, business_data=None, mapping=None, overwrite='append', bulk=False): # errors = businessDataValidator(self.business_data) def process_resourceid(resourceid, overwrite): # Test if resourceid is a UUID. try: resourceinstanceid = uuid.UUID(resourceid) # If resourceid is a UUID check if it is already an arches resource. try: ret = Resource.objects.filter(resourceinstanceid=resourceid) # If resourceid is an arches resource and overwrite is true, delete the existing arches resource. if overwrite == 'overwrite': Resource(str(ret[0].resourceinstanceid)).delete() resourceinstanceid = resourceinstanceid # If resourceid is not a UUID create one. except: resourceinstanceid = resourceinstanceid except: # Get resources with the given legacyid ret = Resource.objects.filter(legacyid=resourceid) # If more than one resource is returned than make resource = None. This should never actually happen. if len(ret) > 1: resourceinstanceid = None # If no resource is returned with the given legacyid then create an archesid for the resource. elif len(ret) == 0: resourceinstanceid = uuid.uuid4() # If a resource is returned with the give legacyid then return its archesid else: if overwrite == 'overwrite': Resource(str(ret[0].resourceinstanceid)).delete() resourceinstanceid = ret[0].resourceinstanceid return resourceinstanceid try: with transaction.atomic(): save_count = 0 try: resourceinstanceid = process_resourceid(business_data[0]['ResourceID'], overwrite) except KeyError: print '*'*80 print 'ERROR: No column \'ResourceID\' found in business data file. Please add a \'ResourceID\' column with a unique resource identifier.' print '*'*80 sys.exit() blanktilecache = {} populated_nodegroups = {} populated_nodegroups[resourceinstanceid] = [] previous_row_resourceid = None populated_tiles = [] single_cardinality_nodegroups = [str(nodegroupid) for nodegroupid in NodeGroup.objects.values_list('nodegroupid', flat=True).filter(cardinality = '1')] node_datatypes = {str(nodeid): datatype for nodeid, datatype in Node.objects.values_list('nodeid', 'datatype').filter(~Q(datatype='semantic'), graph__isresource=True)} all_nodes = Node.objects.all() datatype_factory = DataTypeFactory() # This code can probably be moved into it's own module. resourceids = [] non_contiguous_resource_ids = [] previous_row_for_validation = None for row_number, row in enumerate(business_data): # Check contiguousness of csv file. if row['ResourceID'] != previous_row_for_validation and row['ResourceID'] in resourceids: non_contiguous_resource_ids.append(row['ResourceID']) else: resourceids.append(row['ResourceID']) previous_row_for_validation = row['ResourceID'] if len(non_contiguous_resource_ids) > 0: print '*'*80 print 'ERROR: Resources in your csv file are non-contiguous. Please sort your csv file by ResourceID and try import again.' print '*'*80 sys.exit() def cache(blank_tile): if blank_tile.data != {}: for key in blank_tile.data.keys(): if key not in blanktilecache: blanktilecache[str(key)] = blank_tile else: for nodegroup, tile in blank_tile.tiles.iteritems(): for key in tile[0].data.keys(): if key not in blanktilecache: blanktilecache[str(key)] = blank_tile def column_names_to_targetids(row, mapping, row_number): errors = [] new_row = [] if 'ADDITIONAL' in row or 'MISSING' in row: errors.append({'type': 'WARNING', 'message': 'No resource created for ResourceID {0}. Line {1} has additional or missing columns.'.format(row['ResourceID'], str(int(row_number.split('on line ')[1])))}) if len(errors) > 0: self.errors += errors for key, value in row.iteritems(): if value != '': for row in mapping['nodes']: if key.upper() == row['file_field_name'].upper(): new_row.append({row['arches_nodeid']: value}) return new_row def transform_value(datatype, value, source): ''' Transforms values from probably string/wkt representation to specified datatype in arches. This code could probably move to somehwere where it can be accessed by other importers. ''' request = '' if datatype != '': errors = [] datatype_instance = datatype_factory.get_instance(datatype) try: value = datatype_instance.transform_import_values(value) errors = datatype_instance.validate(value, source) except Exception as e: errors.append({'type': 'ERROR', 'message': 'datatype: {0} value: {1} {2} - {3}'.format(datatype_instance.datatype_model.classname, value, source, e)}) if len(errors) > 0: self.errors += errors else: print _('No datatype detected for {0}'.format(value)) return {'value': value, 'request': request} def get_blank_tile(source_data): if len(source_data) > 0: if source_data[0] != {}: key = str(source_data[0].keys()[0]) if key not in blanktilecache: blank_tile = Tile.get_blank_tile(key) cache(blank_tile) else: blank_tile = blanktilecache[key] else: blank_tile = None else: blank_tile = None # return deepcopy(blank_tile) return cPickle.loads(cPickle.dumps(blank_tile, -1)) resources = [] for row_number, row in enumerate(business_data): row_number = 'on line ' + unicode(row_number + 2) #to represent the row in a csv accounting for the header and 0 index if row['ResourceID'] != previous_row_resourceid and previous_row_resourceid is not None: save_count = save_count + 1 self.save_resource(populated_tiles, resourceinstanceid, legacyid, resources, target_resource_model, bulk, save_count) # reset values for next resource instance populated_tiles = [] resourceinstanceid = process_resourceid(row['ResourceID'], overwrite) populated_nodegroups[resourceinstanceid] = [] source_data = column_names_to_targetids(row, mapping, row_number) if len(source_data) > 0: if source_data[0].keys(): try: target_resource_model = all_nodes.get(nodeid=source_data[0].keys()[0]).graph_id except: print '*'*80 print 'ERROR: No resource model found. Please make sure the resource model this business data is mapped to has been imported into Arches.' print '*'*80 sys.exit() target_tile = get_blank_tile(source_data) def populate_tile(source_data, target_tile): ''' source_data = [{nodeid:value},{nodeid:value},{nodeid:value} . . .] All nodes in source_data belong to the same resource. A dictionary of nodeids would not allow for multiple values for the same nodeid. Grouping is enforced by having all grouped attributes in the same row. ''' need_new_tile = False # Set target tileid to None because this will be a new tile, a new tileid will be created on save. target_tile.tileid = uuid.uuid4() target_tile.resourceinstance_id = resourceinstanceid # Check the cardinality of the tile and check if it has been populated. # If cardinality is one and the tile is populated the tile should not be populated again. if str(target_tile.nodegroup_id) in single_cardinality_nodegroups: target_tile_cardinality = '1' else: target_tile_cardinality = 'n' if str(target_tile.nodegroup_id) not in populated_nodegroups[resourceinstanceid]: # Check if we are populating a parent tile by inspecting the target_tile.data array. if target_tile.data != {}: # Iterate through the target_tile nodes and begin populating by iterating througth source_data array. # The idea is to populate as much of the target_tile as possible, before moving on to the next target_tile. for target_key in target_tile.data.keys(): for source_tile in source_data: for source_key in source_tile.keys(): # Check for source and target key match. if source_key == target_key: if target_tile.data[source_key] == None: # If match populate target_tile node with transformed value. value = transform_value(node_datatypes[source_key], source_tile[source_key], row_number) target_tile.data[source_key] = value['value'] # target_tile.request = value['request'] # Delete key from source_tile so we do not populate another tile based on the same data. del source_tile[source_key] # Cleanup source_data array to remove source_tiles that are now '{}' from the code above. source_data[:] = [item for item in source_data if item != {}] # Check if we are populating a child tile(s) by inspecting the target_tiles.tiles array. elif target_tile.tiles != None: populated_child_nodegroups = [] for nodegroupid, childtile in target_tile.tiles.iteritems(): prototype_tile = childtile.pop() if str(prototype_tile.nodegroup_id) in single_cardinality_nodegroups: child_tile_cardinality = '1' else: child_tile_cardinality = 'n' def populate_child_tiles(source_data): prototype_tile_copy = cPickle.loads(cPickle.dumps(prototype_tile, -1)) prototype_tile_copy.tileid = uuid.uuid4() prototype_tile_copy.parenttile = target_tile prototype_tile_copy.resourceinstance_id = resourceinstanceid if str(prototype_tile_copy.nodegroup_id) not in populated_child_nodegroups: for target_key in prototype_tile_copy.data.keys(): for source_column in source_data: for source_key in source_column.keys(): if source_key == target_key: if prototype_tile_copy.data[source_key] == None: value = transform_value(node_datatypes[source_key], source_column[source_key], row_number) prototype_tile_copy.data[source_key] = value['value'] # target_tile.request = value['request'] del source_column[source_key] else: populate_child_tiles(source_data) if prototype_tile_copy.data != {}: if len([item for item in prototype_tile_copy.data.values() if item != None]) > 0: if str(prototype_tile_copy.nodegroup_id) not in populated_child_nodegroups: childtile.append(prototype_tile_copy) if prototype_tile_copy != None: if child_tile_cardinality == '1': populated_child_nodegroups.append(str(prototype_tile_copy.nodegroup_id)) source_data[:] = [item for item in source_data if item != {}] populate_child_tiles(source_data) populated_tiles.append(target_tile) if len(source_data)>0: need_new_tile = True if target_tile_cardinality == '1': populated_nodegroups[resourceinstanceid].append(str(target_tile.nodegroup_id)) if need_new_tile: new_tile = get_blank_tile(source_data) if new_tile != None: populate_tile(source_data, new_tile) # mock_request_object = HttpRequest() if target_tile != None and len(source_data) > 0: populate_tile(source_data, target_tile) previous_row_resourceid = row['ResourceID'] legacyid = row['ResourceID'] if 'legacyid' in locals(): self.save_resource(populated_tiles, resourceinstanceid, legacyid, resources, target_resource_model, bulk, save_count) if bulk: Resource.bulk_save(resources=resources) print _('%s total resource saved' % (save_count + 1)) except Exception as e: exc_type, exc_value, exc_traceback = sys.exc_info() formatted = traceback.format_exception(exc_type, exc_value, exc_traceback) if len(formatted): for message in formatted: print message finally: pass
def post(self, request): if self.action == 'update_tile': json = request.POST.get('data', None) accepted_provisional = request.POST.get('accepted_provisional', None) if accepted_provisional is not None: accepted_provisional_edit = JSONDeserializer().deserialize( accepted_provisional) if json is not None: data = JSONDeserializer().deserialize(json) data[ 'resourceinstance_id'] = '' if 'resourceinstance_id' not in data else data[ 'resourceinstance_id'] if data['resourceinstance_id'] == '': data['resourceinstance_id'] = uuid.uuid4() try: models.ResourceInstance.objects.get( pk=data['resourceinstance_id']) except ObjectDoesNotExist: resource = Resource() graphid = models.Node.objects.filter( nodegroup=data['nodegroup_id'])[0].graph_id resource.graph_id = graphid try: resource.save(user=request.user) data['resourceinstance_id'] = resource.pk resource.index() except ModelInactiveError as e: message = _( 'Unable to save. Please verify the model status is active' ) return JSONResponse( { 'status': 'false', 'message': [_(e.title), _(str(message))] }, status=500) tile_id = data['tileid'] resource_instance = models.ResourceInstance.objects.get( pk=data['resourceinstance_id']) is_active = resource_instance.graph.isactive if tile_id is not None and tile_id != '': try: old_tile = Tile.objects.get(pk=tile_id) clean_resource_cache(old_tile) except ObjectDoesNotExist as e: return self.handle_save_error( e, _('This tile is no longer available'), _('It was likely deleted by another user')) tile = Tile(data) if tile.filter_by_perm( request.user, 'write_nodegroup') and is_active is True: try: with transaction.atomic(): try: if accepted_provisional is None: try: tile.save(request=request) except TileValidationError as e: resource_tiles = models.TileModel.objects.filter( resourceinstance=tile. resourceinstance) if resource_tiles.count() == 0: Resource.objects.get( pk=tile.resourceinstance_id ).delete(request.user) title = _( 'Unable to save. Please verify your input is valid' ) return self.handle_save_error( e, tile_id, title=title) except ModelInactiveError as e: message = _( 'Unable to save. Please verify the model status is active' ) return JSONResponse( { 'status': 'false', 'message': [_(e.title), _(str(message))] }, status=500) else: if accepted_provisional is not None: provisional_editor = User.objects.get( pk=accepted_provisional_edit[ "user"]) prov_edit_log_details = { "user": request.user, "action": "accept edit", "edit": accepted_provisional_edit, "provisional_editor": provisional_editor } tile.save(request=request, provisional_edit_log_details= prov_edit_log_details) if tile.provisionaledits is not None and str( request.user.id ) in tile.provisionaledits: tile.data = tile.provisionaledits[str( request.user.id)]['value'] except Exception as e: return self.handle_save_error(e, tile_id) tile.after_update_all() clean_resource_cache(tile) update_system_settings_cache(tile) except Exception as e: return self.handle_save_error(e, tile_id) return JSONResponse(tile) elif is_active is False: response = { 'status': 'false', 'message': [ _('Request Failed'), _('Unable to Save. Verify model status is active') ] } return JSONResponse(response, status=500) else: response = { 'status': 'false', 'message': [_('Request Failed'), _('Permission Denied')] } return JSONResponse(response, status=500) if self.action == 'reorder_tiles': json = request.body if json is not None: data = JSONDeserializer().deserialize(json) if 'tiles' in data and len(data['tiles']) > 0: sortorder = 0 with transaction.atomic(): for tile in data['tiles']: t = Tile(tile) if t.filter_by_perm(request.user, 'write_nodegroup'): t.sortorder = sortorder t.save(update_fields=['sortorder'], request=request) sortorder = sortorder + 1 return JSONResponse(data) if self.action == 'delete_provisional_tile': user = request.POST.get('user', None) tileid = request.POST.get('tileid', None) users = request.POST.get('users', None) tile = Tile.objects.get(tileid=tileid) is_provisional = tile.is_provisional() if tileid is not None and user is not None: provisionaledits = self.delete_provisional_edit( tile, user, request) elif tileid is not None and users is not None: users = jsonparser.loads(users) for user in users: self.delete_provisional_edit(tile, user, request) if is_provisional == True: return JSONResponse({'result': 'delete'}) else: return JSONResponse({'result': 'success'}) return HttpResponseNotFound()
def migrate(settings=None): if not settings: from django.conf import settings suffix = '_altered_nodes.csv' errors = [] for path in settings.ADDITIONAL_RESOURCE_GRAPH_LOCATIONS: if os.path.exists(path): print '\nLOADING NODE MIGRATION INFO (%s)' % (path) print '--------------' for f in listdir(path): if isfile(join(path, f)) and f.endswith(suffix): path_to_file = join(path, f) basepath = path_to_file[:-18] name = basepath.split(os.sep)[-1] migrations = get_list_dict( basepath + '_altered_nodes.csv', [ 'OLDENTITYTYPEID', 'NEWENTITYTYPEID', 'GROUPROOTNODEOLD', 'GROUPROOTNODENEW' ]) # Identify nodes which must be migrated resource_entity_type = 'HERITAGE_RESOURCE_GROUP.E27' mapping_schema = Entity.get_mapping_schema( resource_entity_type) # group migrations by groupRootNodeNew groups = groupby( migrations, lambda x: (x['GROUPROOTNODEOLD'], x['GROUPROOTNODENEW'])) for group_root_node_ids, group_migrations in groups: #Convert group_migrations to a list as we need to iterate it multiple times group_migrations_list = [] for group_migration in group_migrations: group_migrations_list.append(group_migration) group_root_node_id = group_root_node_ids[0] new_group_root_node_id = group_root_node_ids[1] #Find all entities with the old group root node group_root_entities = models.Entities.objects.filter( entitytypeid=group_root_node_id) print "ENTITIES COUNT: ", group_root_entities.count() for group_root_entity_model in group_root_entities.iterator( ): # Create a new subgraph for each of the migration steps, then merge them together at the group root node #get full resource graph for the root entity try: group_root_entity = Entity( group_root_entity_model.pk) except: print "Faulty group entity's ID %s and entitytype %s" % ( group_root_entity_model.pk, group_root_entity_model.entitytypeid) continue new_group_root_entity = Entity( ).create_from_mapping( resource_entity_type, mapping_schema[new_group_root_node_id] ['steps'], new_group_root_node_id, '') if group_migrations_list[0][ 'NEWENTITYTYPEID'] != new_group_root_node_id: # create a node for the new group root group_root_is_new_data_node = False else: group_root_is_new_data_node = True # get the root resource graph for this entity resource_model = get_resource_for_entity( group_root_entity, resource_entity_type) if not resource_model: continue resource = Resource().get(resource_model.entityid) for group_migration in group_migrations_list: # get individual entities to be migrated in the source group old_entities = group_root_entity.find_entities_by_type_id( group_migration['OLDENTITYTYPEID']) for old_entity in old_entities: date_on = False # Create the corresponding entity in the new schema new_entity = Entity() #Disturbance dates need to be mapped to different nodes depending on the value of the now obsolete DISTURBANCE_DATE_TYPE.E55 if group_migration['OLDENTITYTYPEID'] in [ 'DISTURBANCE_DATE_END.E49', 'DISTURBANCE_DATE_START.E49' ]: date_type_node = group_root_entity.find_entities_by_type_id( 'DISTURBANCE_DATE_TYPE.E55') if date_type_node: if date_type_node[ 0].label == 'Occurred before': new_entity_type_id = 'DISTURBANCE_DATE_OCCURRED_BEFORE.E61' elif date_type_node[ 0].label == 'Occurred on': if group_migration[ 'OLDENTITYTYPEID'] == 'DISTURBANCE_DATE_START.E49': date_on = True else: new_entity_type_id = 'DISTURBANCE_DATE_OCCURRED_ON.E61' else: new_entity_type_id = group_migration[ 'NEWENTITYTYPEID'] else: new_entity_type_id = group_migration[ 'NEWENTITYTYPEID'] old_value = old_entity.value if old_entity.businesstablename == 'domains': # in some cases we move from domains to strings. newEntityType = models.EntityTypes.objects.get( entitytypeid=new_entity_type_id) if newEntityType.businesstablename == 'strings': old_value = old_entity.label if not date_on: new_entity.create_from_mapping( resource_entity_type, mapping_schema[new_entity_type_id] ['steps'], new_entity_type_id, old_value) # In some cases a newly created data node is the new group root. In this case we should discard the previously created new group root and use this one instead. if new_group_root_node_id == new_entity_type_id: new_group_root_entity = new_entity group_root_is_new_data_node = True # UNUSED # # If there is a node to be inserted, do it here # # if 'INSERT_NODE_RULE' in group_migration: # # entityttypeid_to_insert = group_migration['INSERT_NODE_RULE'][1][1] # # value_to_insert = group_migration['INSERT_NODE_RULE'][1][2] # # # # inserted_entity = Entity() # # inserted_entity.create_from_mapping(resource_entity_type, mapping_schema[entityttypeid_to_insert]['steps'], entityttypeid_to_insert, value_to_insert) # # # # new_entity.merge(inserted_entity) # If there is a node in common with the existing node further down the chain than the group root node, merge there # follow links back from the parent shouldnt_merge_with_group_root = group_root_is_new_data_node and new_group_root_node_id == new_entity_type_id if not shouldnt_merge_with_group_root: has_merged = False reversed_steps = mapping_schema[ new_entity_type_id]['steps'][::-1] for step in reversed_steps: # find the entitytypedomain in the new_group_root_entity if not has_merged: mergeable_nodes = new_group_root_entity.find_entities_by_type_id( step['entitytypedomain']) if len(mergeable_nodes) > 0: new_group_root_entity.merge_at( new_entity, step[ 'entitytypedomain'] ) has_merged = True new_entity = None # gc.collect() if not has_merged: logging.warning( "Unable to merge newly created entity" ) # merge the new group root entity into the resource resource.merge_at(new_group_root_entity, resource_entity_type) logging.warning("SAVING RESOURCE, %s", resource) # save the resource resource.trim() try: resource._save() resource = None except Exception as e: logging.warning("Error saving resource") logging.warning(e) errors.append("Error saving %s. Error was %s" % (resource, e)) group_root_entity.clear() group_root_entity = None new_group_root_entity.clear() new_group_root_entity = None # end for group root # resource.index() # logging.warning("SAVED RESOURCE, %s", resource) utils.write_to_file( os.path.join(settings.PACKAGE_ROOT, 'logs', 'migration_errors.txt'), '') if len(errors) > 0: # utils.write_to_file(os.path.join(settings.PACKAGE_ROOT, 'logs', 'migration_errors.txt'), '\n'.join(errors)) print "\n\nERROR: There were errors migrating some resources. See below" print errors
def get(self, request, graphid=None, resourceid=None, view_template='views/resource/editor.htm', main_script='views/resource/editor', nav_menu=True): if self.action == 'copy': return self.copy(request, resourceid) resource_instance_exists = False try: resource_instance = Resource.objects.get(pk=resourceid) resource_instance_exists = True graphid = resource_instance.graph_id except ObjectDoesNotExist: resource_instance = Resource() resource_instance.resourceinstanceid = resourceid resource_instance.graph_id = graphid if resourceid is not None: if request.is_ajax() and request.GET.get('search') == 'true': html = render_to_string('views/search/search-base-manager.htm', {}, request) return HttpResponse(html) resource_graphs = models.GraphModel.objects.exclude(pk=settings.SYSTEM_SETTINGS_RESOURCE_MODEL_ID).exclude(isresource=False).exclude(isactive=False) graph = Graph.objects.get(graphid=graphid) relationship_type_values = get_resource_relationship_types() form = Form(resource_instance.pk) datatypes = models.DDataType.objects.all() widgets = models.Widget.objects.all() map_layers = models.MapLayer.objects.all() map_markers = models.MapMarker.objects.all() map_sources = models.MapSource.objects.all() geocoding_providers = models.Geocoder.objects.all() forms = graph.form_set.filter(visible=True) forms_x_cards = models.FormXCard.objects.filter(form__in=forms) forms_w_cards = [] required_widgets = [] for form_x_card in forms_x_cards: if request.user.has_perm('read_nodegroup', form_x_card.card.nodegroup): forms_w_cards.append(form_x_card.form) widget_datatypes = [v.datatype for k, v in graph.nodes.iteritems()] widgets = widgets.filter(datatype__in=widget_datatypes) if resource_instance_exists == True: displayname = Resource.objects.get(pk=resourceid).displayname if displayname == 'undefined': displayname = 'Unnamed Resource' else: displayname = 'Unnamed Resource' date_nodes = models.Node.objects.filter(datatype='date', graph__isresource=True, graph__isactive=True) searchable_datatypes = [d.pk for d in models.DDataType.objects.filter(issearchable=True)] searchable_nodes = models.Node.objects.filter(graph__isresource=True, graph__isactive=True, datatype__in=searchable_datatypes, issearchable=True) resource_cards = models.CardModel.objects.filter(graph__isresource=True, graph__isactive=True) context = self.get_context_data( main_script=main_script, resource_type=graph.name, relationship_types=relationship_type_values, iconclass=graph.iconclass, form=JSONSerializer().serialize(form), forms=JSONSerializer().serialize(forms_w_cards), datatypes_json=JSONSerializer().serialize(datatypes, exclude=['iconclass', 'modulename', 'classname']), datatypes=datatypes, widgets=widgets, date_nodes=date_nodes, map_layers=map_layers, map_markers=map_markers, map_sources=map_sources, geocoding_providers = geocoding_providers, widgets_json=JSONSerializer().serialize(widgets), resourceid=resourceid, resource_graphs=resource_graphs, graph_json=JSONSerializer().serialize(graph, exclude=['iconclass', 'functions', 'functions_x_graphs', 'name', 'description', 'deploymentfile', 'author', 'deploymentdate', 'version', 'isresource', 'isactive', 'iconclass', 'ontology']), displayname=displayname, resource_cards=JSONSerializer().serialize(resource_cards, exclude=['description','instructions','active','isvisible']), searchable_nodes=JSONSerializer().serialize(searchable_nodes, exclude=['description', 'ontologyclass','isrequired', 'issearchable', 'istopnode']), saved_searches=JSONSerializer().serialize(settings.SAVED_SEARCHES), resource_instance_exists=resource_instance_exists, user_is_reviewer=json.dumps(request.user.groups.filter(name='Resource Reviewer').exists()), active_report_count = models.Report.objects.filter(graph_id=resource_instance.graph_id, active=True).count(), userid=request.user.id ) if graph.iconclass: context['nav']['icon'] = graph.iconclass context['nav']['title'] = graph.name context['nav']['menu'] = nav_menu if resourceid == settings.RESOURCE_INSTANCE_ID: context['nav']['help'] = (_('Managing System Settings'),'help/base-help.htm') context['help'] = 'system-settings-help' else: context['nav']['help'] = (_('Using the Resource Editor'),'help/base-help.htm') context['help'] = 'resource-editor-help' return render(request, view_template, context) return HttpResponseNotFound()
def convert_resource(resourceid, target_entitytypeid): # find the resource logging.warning("Loading resource: %s", resourceid) resource = Resource() resource.get(resourceid) # change its entitytype resource_model = models.Entities.objects.get(pk=resourceid) logging.warning("Found resource: %s", resource_model) # update the ruleid for any steps from the root entity to its first children (subsequent entities use the same rules) # get first relations relations = models.Relations.objects.filter(entityiddomain=resourceid) for relation in relations: try: rule = relation.ruleid #now find the rule which maps to the same target, with the same property, from the new entity type new_rule = models.Rules.objects.get( entitytypedomain=target_entitytypeid, entitytyperange=rule.entitytyperange, propertyid=rule.propertyid) relation.ruleid = new_rule relation.save() except Exception as err: logging.warning("Unable to convert rule for relation: %s to %s", relation.ruleid.entitytypedomain, relation.ruleid.entitytyperange) logging.warning("error: %s", err) logging.info("Converted resource %s to %s", resource.entityid, target_entitytypeid) # remove from the search index resource.delete_index() # update type resource.entitytypeid = target_entitytypeid resource.save() # reindex resource.index()
def test_set_entity_value(self): python_object = { "entityid": "", "entitytypeid": "CAR.E1", "value": "", "property": "P1", "child_entities": [] } entity1 = Resource(python_object) entity1.save() self.assertNotEqual(python_object['entityid'], entity1.entityid) entity2 = Resource().get(entity1.entityid) entity2.set_entity_value('HORSEPOWER.E1', '300') entity2.save() entity3 = Resource().get(entity2.entityid) self.assertEqual( int(entity3.child_entities[0].child_entities[0].child_entities[0]. value), 300)
def post(self, request, graphid=None): try: body = json.loads(request.body) file_data = body['file_data'] column_name_to_node_data_map = body['column_name_to_node_data_map'] nodegroup_data = {} for node_data in column_name_to_node_data_map.values(): nodegroup_id = node_data.get('nodegroup_id') if nodegroup_id: if not nodegroup_data.get(nodegroup_id): nodegroup_data[nodegroup_id] = [] nodegroup_data[nodegroup_id].append(node_data['node_id']) for file_datum in file_data: for row_data in file_datum['data']: resource_instance = Resource(graph_id=graphid) resource_instance.save() parsed_data = row_data['parsed_data'] tile_data = {} for nodegroup_id in nodegroup_data.keys(): if not tile_data.get(nodegroup_id): tile_data[nodegroup_id] = {} for node_id in nodegroup_data[nodegroup_id]: tile_data[nodegroup_id][node_id] = parsed_data.get( node_id) for nodegroup_id in tile_data.keys(): tile = TileProxyModel( data=tile_data[nodegroup_id], resourceinstance=resource_instance, nodegroup_id=nodegroup_id, # nodegroup_id = 'f7c974a0-29f4-11eb-8487-aae9fe8789ac', # Related Observations ) tile.save() file_datum['created_resources'][row_data['row_id']] = { 'resourceinstance_id': str(resource_instance.pk), 'row_id': row_data['row_id'], 'tile_data': tile_data, } return JSONResponse({ 'file_data': file_data, }, status=200) except Exception as e: if settings.DEBUG is True: exc_type, exc_value, exc_traceback = sys.exc_info() formatted = traceback.format_exception(exc_type, exc_value, exc_traceback) if len(formatted): for message in formatted: print(message) return JSONResponse( {"error": "resource data could not be saved: %s" % e}, status=500, reason=e)
def resource_manager(request, resourcetypeid='', form_id='default', resourceid=''): if resourceid != '': resource = Resource(resourceid) elif resourcetypeid != '': resource = Resource({'entitytypeid': resourcetypeid}) if form_id == 'default': form_id = resource.form_groups[0]['forms'][0]['id'] form = resource.get_form(form_id) if request.method == 'DELETE': resource.delete_index() se = SearchEngineFactory().create() realtionships = resource.get_related_resources(return_entities=False) for realtionship in realtionships: se.delete(index='resource_relations', doc_type='all', id=realtionship.resourcexid) realtionship.delete() resource.delete() return JSONResponse({'success': True}) if request.method == 'POST': data = JSONDeserializer().deserialize(request.POST.get('formdata', {})) form.update(data, request.FILES) with transaction.atomic(): if resourceid != '': resource.delete_index() resource.save(user=request.user) resource.index() resourceid = resource.entityid return redirect('resource_manager', resourcetypeid=resourcetypeid, form_id=form_id, resourceid=resourceid) min_max_dates = models.Dates.objects.aggregate(Min('val'), Max('val')) if request.method == 'GET': if form != None: lang = request.GET.get('lang', settings.LANGUAGE_CODE) form.load(lang) return render( request, 'resource-manager.htm', { 'form': form, 'formdata': JSONSerializer().serialize(form.data), 'form_template': 'views/forms/' + form_id + '.htm', 'form_id': form_id, 'resourcetypeid': resourcetypeid, 'resourceid': resourceid, 'main_script': 'resource-manager', 'active_page': 'ResourceManger', 'resource': resource, 'resource_name': resource.get_primary_name(), 'resource_type_name': resource.get_type_name(), 'form_groups': resource.form_groups, 'min_date': min_max_dates['val__min'].year if min_max_dates['val__min'] != None else 0, 'max_date': min_max_dates['val__max'].year if min_max_dates['val__min'] != None else 1, 'timefilterdata': JSONSerializer().serialize(Concept.get_time_filter_data()), }) else: return HttpResponseNotFound('<h1>Arches form not found.</h1>')
def setUpClass(cls): models.ResourceInstance.objects.all().delete() cls.client = Client() cls.client.login(username="******", password="******") with open( os.path.join( "tests/fixtures/resource_graphs/Resource Test Model.json"), "rU") as f: archesfile = JSONDeserializer().deserialize(f) resource_graph_importer(archesfile["graph"]) cls.search_model_graphid = "c9b37a14-17b3-11eb-a708-acde48001122" cls.search_model_cultural_period_nodeid = "c9b3882e-17b3-11eb-a708-acde48001122" cls.search_model_creation_date_nodeid = "c9b38568-17b3-11eb-a708-acde48001122" cls.search_model_destruction_date_nodeid = "c9b3828e-17b3-11eb-a708-acde48001122" cls.search_model_name_nodeid = "c9b37b7c-17b3-11eb-a708-acde48001122" cls.search_model_sensitive_info_nodeid = "c9b38aea-17b3-11eb-a708-acde48001122" cls.search_model_geom_nodeid = "c9b37f96-17b3-11eb-a708-acde48001122" cls.user = User.objects.create_user("test", "*****@*****.**", "password") cls.user.groups.add(Group.objects.get(name="Guest")) nodegroup = models.NodeGroup.objects.get( pk=cls.search_model_destruction_date_nodeid) assign_perm("no_access_to_nodegroup", cls.user, nodegroup) # Add a concept that defines a min and max date concept = { "id": "00000000-0000-0000-0000-000000000001", "legacyoid": "ARCHES", "nodetype": "ConceptScheme", "values": [], "subconcepts": [{ "values": [ { "value": "Mock concept", "language": "en-US", "category": "label", "type": "prefLabel", "id": "", "conceptid": "" }, { "value": "1950", "language": "en-US", "category": "note", "type": "min_year", "id": "", "conceptid": "" }, { "value": "1980", "language": "en-US", "category": "note", "type": "max_year", "id": "", "conceptid": "" }, ], "relationshiptype": "hasTopConcept", "nodetype": "Concept", "id": "", "legacyoid": "", "subconcepts": [], "parentconcepts": [], "relatedconcepts": [], }], } post_data = JSONSerializer().serialize(concept) content_type = "application/x-www-form-urlencoded" response = cls.client.post( reverse( "concept", kwargs={"conceptid": "00000000-0000-0000-0000-000000000001"}), post_data, content_type) response_json = json.loads(response.content) valueid = response_json["subconcepts"][0]["values"][0]["id"] cls.conceptid = response_json["subconcepts"][0]["id"] # Add resource with Name, Cultural Period, Creation Date and Geometry cls.test_resource = Resource(graph_id=cls.search_model_graphid) # Add Name tile = Tile(data={cls.search_model_name_nodeid: "Test Name 1"}, nodegroup_id=cls.search_model_name_nodeid) cls.test_resource.tiles.append(tile) # Add Cultural Period tile = Tile(data={cls.search_model_cultural_period_nodeid: [valueid]}, nodegroup_id=cls.search_model_cultural_period_nodeid) cls.test_resource.tiles.append(tile) # Add Creation Date tile = Tile(data={cls.search_model_creation_date_nodeid: "1941-01-01"}, nodegroup_id=cls.search_model_creation_date_nodeid) cls.test_resource.tiles.append(tile) # Add Gometry cls.geom = { "type": "FeatureCollection", "features": [{ "geometry": { "type": "Point", "coordinates": [0, 0] }, "type": "Feature", "properties": {} }], } tile = Tile(data={cls.search_model_geom_nodeid: cls.geom}, nodegroup_id=cls.search_model_geom_nodeid) cls.test_resource.tiles.append(tile) cls.test_resource.save() # add delay to allow for indexes to be updated time.sleep(1)
def get(self, request, graphid=None, resourceid=None, view_template='views/resource/editor.htm', main_script='views/resource/editor', nav_menu=True): if self.action == 'copy': return self.copy(request, resourceid) resource_instance_exists = False try: resource_instance = Resource.objects.get(pk=resourceid) resource_instance_exists = True graphid = resource_instance.graph_id except ObjectDoesNotExist: resource_instance = Resource() resource_instance.resourceinstanceid = resourceid resource_instance.graph_id = graphid if resourceid is not None: if request.is_ajax() and request.GET.get('search') == 'true': html = render_to_string('views/search/search-base-manager.htm', {}, request) return HttpResponse(html) resource_graphs = models.GraphModel.objects.exclude( pk=settings.SYSTEM_SETTINGS_RESOURCE_MODEL_ID).exclude( isresource=False).exclude(isactive=False) graph = Graph.objects.get(graphid=graphid) relationship_type_values = get_resource_relationship_types() form = Form(resource_instance.pk) datatypes = models.DDataType.objects.all() widgets = models.Widget.objects.all() map_layers = models.MapLayer.objects.all() map_markers = models.MapMarker.objects.all() map_sources = models.MapSource.objects.all() geocoding_providers = models.Geocoder.objects.all() forms = graph.form_set.filter(visible=True) forms_x_cards = models.FormXCard.objects.filter(form__in=forms) forms_w_cards = [] required_widgets = [] for form_x_card in forms_x_cards: if request.user.has_perm('read_nodegroup', form_x_card.card.nodegroup): forms_w_cards.append(form_x_card.form) widget_datatypes = [v.datatype for k, v in graph.nodes.iteritems()] widgets = widgets.filter(datatype__in=widget_datatypes) if resource_instance_exists == True: displayname = Resource.objects.get(pk=resourceid).displayname if displayname == 'undefined': displayname = 'Unnamed Resource' else: displayname = 'Unnamed Resource' date_nodes = models.Node.objects.filter(datatype='date', graph__isresource=True, graph__isactive=True) searchable_datatypes = [ d.pk for d in models.DDataType.objects.filter(issearchable=True) ] searchable_nodes = models.Node.objects.filter( graph__isresource=True, graph__isactive=True, datatype__in=searchable_datatypes, issearchable=True) resource_cards = models.CardModel.objects.filter( graph__isresource=True, graph__isactive=True) context = self.get_context_data( main_script=main_script, resource_type=graph.name, relationship_types=relationship_type_values, iconclass=graph.iconclass, form=JSONSerializer().serialize(form), forms=JSONSerializer().serialize(forms_w_cards), datatypes_json=JSONSerializer().serialize( datatypes, exclude=['iconclass', 'modulename', 'classname']), datatypes=datatypes, widgets=widgets, date_nodes=date_nodes, map_layers=map_layers, map_markers=map_markers, map_sources=map_sources, geocoding_providers=geocoding_providers, widgets_json=JSONSerializer().serialize(widgets), resourceid=resourceid, resource_graphs=resource_graphs, graph_json=JSONSerializer().serialize( graph, exclude=[ 'iconclass', 'functions', 'functions_x_graphs', 'name', 'description', 'deploymentfile', 'author', 'deploymentdate', 'version', 'isresource', 'isactive', 'iconclass', 'ontology' ]), displayname=displayname, resource_cards=JSONSerializer().serialize(resource_cards, exclude=[ 'description', 'instructions', 'active', 'isvisible' ]), searchable_nodes=JSONSerializer().serialize( searchable_nodes, exclude=[ 'description', 'ontologyclass', 'isrequired', 'issearchable', 'istopnode' ]), saved_searches=JSONSerializer().serialize( settings.SAVED_SEARCHES), resource_instance_exists=resource_instance_exists, user_is_reviewer=json.dumps( request.user.groups.filter( name='Resource Reviewer').exists()), active_report_count=models.Report.objects.filter( graph_id=resource_instance.graph_id, active=True).count(), userid=request.user.id) if graph.iconclass: context['nav']['icon'] = graph.iconclass context['nav']['title'] = graph.name context['nav']['menu'] = nav_menu if resourceid == settings.RESOURCE_INSTANCE_ID: context['nav']['help'] = (_('Managing System Settings'), 'help/base-help.htm') context['help'] = 'system-settings-help' else: context['nav']['help'] = (_('Using the Resource Editor'), 'help/base-help.htm') context['help'] = 'resource-editor-help' return render(request, view_template, context) return HttpResponseNotFound()
def post(self, request): if self.action == "update_tile": json = request.POST.get("data", None) accepted_provisional = request.POST.get("accepted_provisional", None) if accepted_provisional is not None: accepted_provisional_edit = JSONDeserializer().deserialize( accepted_provisional) if json is not None: data = JSONDeserializer().deserialize(json) data[ "resourceinstance_id"] = "" if "resourceinstance_id" not in data else data[ "resourceinstance_id"] if data["resourceinstance_id"] == "": data["resourceinstance_id"] = uuid.uuid4() try: models.ResourceInstance.objects.get( pk=data["resourceinstance_id"]) except ObjectDoesNotExist: try: resource = Resource( uuid.UUID(str(data["resourceinstance_id"]))) except ValueError: resource = Resource() graphid = models.Node.objects.filter( nodegroup=data["nodegroup_id"])[0].graph_id resource.graph_id = graphid try: resource.save(user=request.user) data["resourceinstance_id"] = resource.pk resource.index() except ModelInactiveError as e: message = _( "Unable to save. Please verify the model status is active" ) return JSONResponse( { "status": "false", "message": [_(e.title), _(str(message))] }, status=500) tile_id = data["tileid"] resource_instance = models.ResourceInstance.objects.get( pk=data["resourceinstance_id"]) is_active = resource_instance.graph.isactive if tile_id is not None and tile_id != "": try: old_tile = Tile.objects.get(pk=tile_id) except ObjectDoesNotExist as e: return self.handle_save_error( e, _("This tile is no longer available"), _("It was likely deleted by another user")) tile = Tile(data) if tile.filter_by_perm( request.user, "write_nodegroup") and is_active is True: try: with transaction.atomic(): try: if accepted_provisional is None: try: tile.save(request=request) except TileValidationError as e: resource_tiles = models.TileModel.objects.filter( resourceinstance=tile. resourceinstance) if resource_tiles.count() == 0: Resource.objects.get( pk=tile.resourceinstance_id ).delete(request.user) title = _( "Unable to save. Please verify your input is valid" ) return self.handle_save_error( e, tile_id, title=title) except ModelInactiveError as e: message = _( "Unable to save. Please verify the model status is active" ) return JSONResponse( { "status": "false", "message": [_(e.title), _(str(message))] }, status=500) else: if accepted_provisional is not None: provisional_editor = User.objects.get( pk=accepted_provisional_edit[ "user"]) prov_edit_log_details = { "user": request.user, "action": "accept edit", "edit": accepted_provisional_edit, "provisional_editor": provisional_editor, } tile.save(request=request, provisional_edit_log_details= prov_edit_log_details) if tile.provisionaledits is not None and str( request.user.id ) in tile.provisionaledits: tile.data = tile.provisionaledits[str( request.user.id)]["value"] except Exception as e: return self.handle_save_error(e, tile_id) tile.after_update_all() update_system_settings_cache(tile) except Exception as e: return self.handle_save_error(e, tile_id) return JSONResponse(tile) elif is_active is False: response = { "status": "false", "message": [ _("Request Failed"), _("Unable to Save. Verify model status is active") ] } return JSONResponse(response, status=500) else: return JSONErrorResponse(_("Request Failed"), _("Permission Denied")) if self.action == "reorder_tiles": json = request.body if json is not None: data = JSONDeserializer().deserialize(json) if "tiles" in data and len(data["tiles"]) > 0: sortorder = 0 with transaction.atomic(): for tile in data["tiles"]: t = Tile(tile) if t.filter_by_perm(request.user, "write_nodegroup"): t.sortorder = sortorder t.save(update_fields=["sortorder"], request=request) sortorder = sortorder + 1 return JSONResponse(data) if self.action == "delete_provisional_tile": user = request.POST.get("user", None) tileid = request.POST.get("tileid", None) users = request.POST.get("users", None) tile = Tile.objects.get(tileid=tileid) is_provisional = tile.is_provisional() if tileid is not None and user is not None: provisionaledits = self.delete_provisional_edit( tile, user, request) elif tileid is not None and users is not None: users = jsonparser.loads(users) for user in users: self.delete_provisional_edit(tile, user, request) if is_provisional == True: return JSONResponse({"result": "delete"}) else: return JSONResponse({"result": "success"}) return HttpResponseNotFound()
def import_business_data(self, business_data=None, mapping=None, overwrite='append', bulk=False, create_concepts=False, create_collections=False): # errors = businessDataValidator(self.business_data) def get_display_nodes(graphid): display_nodeids = [] functions = FunctionXGraph.objects.filter(function_id='60000000-0000-0000-0000-000000000001', graph_id=graphid) for function in functions: f = function.config del f['triggering_nodegroups'] for k,v in f.iteritems(): v['node_ids'] = [] v['string_template'] = v['string_template'].replace('<', '').replace('>', '').split(', ') if v['nodegroup_id'] != '': nodes = Node.objects.filter(nodegroup_id=v['nodegroup_id']) for node in nodes: if node.name in v['string_template']: display_nodeids.append(str(node.nodeid)) for k,v in f.iteritems(): if v['string_template'] != ['']: print 'The {0} {1} in the {2} display function.'.format(', '.join(v['string_template']), 'nodes participate' if len(v['string_template']) > 1 else 'node participates', k) else: print 'No nodes participate in the {0} display function.'.format(k) return display_nodeids def process_resourceid(resourceid, overwrite): # Test if resourceid is a UUID. try: resourceinstanceid = uuid.UUID(resourceid) # If resourceid is a UUID check if it is already an arches resource. try: ret = Resource.objects.filter(resourceinstanceid=resourceid) # If resourceid is an arches resource and overwrite is true, delete the existing arches resource. if overwrite == 'overwrite': Resource.objects.get(pk=str(ret[0].resourceinstanceid)).delete() resourceinstanceid = resourceinstanceid # If resourceid is not a UUID create one. except: resourceinstanceid = resourceinstanceid except: # Get resources with the given legacyid ret = Resource.objects.filter(legacyid=resourceid) # If more than one resource is returned than make resource = None. This should never actually happen. if len(ret) > 1: resourceinstanceid = None # If no resource is returned with the given legacyid then create an archesid for the resource. elif len(ret) == 0: resourceinstanceid = uuid.uuid4() # If a resource is returned with the give legacyid then return its archesid else: if overwrite == 'overwrite': Resource.objects.get(pk=str(ret[0].resourceinstanceid)).delete() resourceinstanceid = ret[0].resourceinstanceid return resourceinstanceid try: with transaction.atomic(): save_count = 0 try: resourceinstanceid = process_resourceid(business_data[0]['ResourceID'], overwrite) except KeyError: print '*'*80 print 'ERROR: No column \'ResourceID\' found in business data file. Please add a \'ResourceID\' column with a unique resource identifier.' print '*'*80 sys.exit() blanktilecache = {} populated_nodegroups = {} populated_nodegroups[resourceinstanceid] = [] previous_row_resourceid = None populated_tiles = [] target_resource_model = None single_cardinality_nodegroups = [str(nodegroupid) for nodegroupid in NodeGroup.objects.values_list('nodegroupid', flat=True).filter(cardinality = '1')] node_datatypes = {str(nodeid): datatype for nodeid, datatype in Node.objects.values_list('nodeid', 'datatype').filter(~Q(datatype='semantic'), graph__isresource=True)} display_nodes = get_display_nodes(mapping['resource_model_id']) all_nodes = Node.objects.all() datatype_factory = DataTypeFactory() concepts_to_create = {} new_concepts = {} required_nodes = {} for node in Node.objects.filter(isrequired=True, graph_id=mapping['resource_model_id']).values_list('nodeid', 'name'): required_nodes[str(node[0])] = node[1] # This code can probably be moved into it's own module. resourceids = [] non_contiguous_resource_ids = [] previous_row_for_validation = None for row_number, row in enumerate(business_data): # Check contiguousness of csv file. if row['ResourceID'] != previous_row_for_validation and row['ResourceID'] in resourceids: non_contiguous_resource_ids.append(row['ResourceID']) else: resourceids.append(row['ResourceID']) previous_row_for_validation = row['ResourceID'] if create_concepts == True: for node in mapping['nodes']: if node['data_type'] in ['concept', 'concept-list', 'domain-value', 'domain-value-list'] and node['file_field_name'] in row.keys(): # print row[node['file_field_name']] concept = [] for val in csv.reader([row[node['file_field_name']]], delimiter=',', quotechar='"'): concept.append(val) concept = concept[0] # check if collection is in concepts_to_create, add collection to concepts_to_create if it's not and add first child concept if node['arches_nodeid'] not in concepts_to_create: concepts_to_create[node['arches_nodeid']] = {} for concept_value in concept: concepts_to_create[node['arches_nodeid']][str(uuid.uuid4())] = concept_value # if collection in concepts to create then add child concept to collection elif row[node['file_field_name']] not in concepts_to_create[node['arches_nodeid']].values(): for concept_value in concept: concepts_to_create[node['arches_nodeid']][str(uuid.uuid4())] = concept_value if len(non_contiguous_resource_ids) > 0: print '*'*80 for non_contiguous_resource_id in non_contiguous_resource_ids: print 'ResourceID: ' + non_contiguous_resource_id print 'ERROR: The preceding ResourceIDs are non-contiguous in your csv file. Please sort your csv file by ResourceID and try import again.' print '*'*80 sys.exit() def create_reference_data(new_concepts, create_collections): errors = [] candidates = Concept().get(id='00000000-0000-0000-0000-000000000006') for arches_nodeid, concepts in new_concepts.iteritems(): collectionid = str(uuid.uuid4()) topconceptid = str(uuid.uuid4()) node = Node.objects.get(nodeid=arches_nodeid) # if node.datatype is concept or concept-list create concepts and collections if node.datatype in ['concept', 'concept-list']: # create collection if create_collections = create, otherwise append to collection already assigned to node if create_collections == True: collection_legacyoid = node.name + '_' + str(node.graph_id) + '_import' # check to see that there is not already a collection for this node if node.config['rdmCollection'] != None: errors.append({'type': 'WARNING', 'message': 'A collection already exists for the {0} node. Use the add option to add concepts to this collection.'.format(node.name)}) if len(errors) > 0: self.errors += errors collection = None else: # if there is no collection assigned to this node, create one and assign it to the node try: # check to see that a collection with this legacyid does not already exist collection = Concept().get(legacyoid=collection_legacyoid) errors.append({'type': 'WARNING', 'message': 'A collection with the legacyid {0} already exists.'.format(node.name + '_' + str(node.graph_id) + '_import')}) if len(errors) > 0: self.errors += errors except: collection = Concept({ 'id': collectionid, 'legacyoid': collection_legacyoid, 'nodetype': 'Collection' }) collection.addvalue({'id': str(uuid.uuid4()), 'value': node.name + '_import', 'language': settings.LANGUAGE_CODE, 'type': 'prefLabel'}) node.config['rdmCollection'] = collectionid node.save() collection.save() else: # if create collection = add check that there is a collection associated with node, if no collection associated with node create a collection and associated with the node try: collection = Concept().get(id=node.config['rdmCollection']) except: collection = Concept({ 'id': collectionid, 'legacyoid': node.name + '_' + str(node.graph_id) + '_import', 'nodetype': 'Collection' }) collection.addvalue({'id': str(uuid.uuid4()), 'value': node.name + '_import', 'language': settings.LANGUAGE_CODE, 'type': 'prefLabel'}) node.config['rdmCollection'] = collectionid node.save() collection.save() if collection != None: topconcept_legacyoid = node.name + '_' + str(node.graph_id) # Check if top concept already exists, if not create it and add to candidates scheme try: topconcept = Concept().get(legacyoid=topconcept_legacyoid) except: topconcept = Concept({ 'id': topconceptid, 'legacyoid': topconcept_legacyoid, 'nodetype': 'Concept' }) topconcept.addvalue({'id': str(uuid.uuid4()), 'value': node.name + '_import', 'language': settings.LANGUAGE_CODE, 'type': 'prefLabel'}) topconcept.save() candidates.add_relation(topconcept, 'narrower') # create child concepts and relate to top concept and collection accordingly for conceptid, value in concepts.iteritems(): concept_legacyoid = value + '_' + node.name + '_' + str(node.graph_id) # check if concept already exists, if not create and add to topconcept and collection try: conceptid = [concept for concept in topconcept.get_child_concepts(topconcept.id) if concept[1] == value][0][0] concept = Concept().get(id=conceptid) except: concept = Concept({ 'id': conceptid, 'legacyoid': concept_legacyoid, 'nodetype': 'Concept' }) concept.addvalue({'id': str(uuid.uuid4()), 'value': value, 'language': settings.LANGUAGE_CODE, 'type': 'prefLabel'}) concept.save() collection.add_relation(concept, 'member') topconcept.add_relation(concept, 'narrower') #if node.datatype is domain or domain-list create options array in node.config elif node.datatype in ['domain-value', 'domain-value-list']: for domainid, value in new_concepts[arches_nodeid].iteritems(): # check if value already exists in domain if value not in [t['text'] for t in node.config['options']]: domainvalue = { "text": value, "selected": False, "id": domainid } node.config['options'].append(domainvalue) node.save() if create_concepts == True: create_reference_data(concepts_to_create, create_collections) # if concepts are created on import concept_lookup must be instatiated afterward concept_lookup = ConceptLookup() def cache(blank_tile): if blank_tile.data != {}: for key in blank_tile.data.keys(): if key not in blanktilecache: blanktilecache[str(key)] = blank_tile else: for nodegroup, tile in blank_tile.tiles.iteritems(): for key in tile[0].data.keys(): if key not in blanktilecache: blanktilecache[str(key)] = blank_tile def column_names_to_targetids(row, mapping, row_number): errors = [] new_row = [] if 'ADDITIONAL' in row or 'MISSING' in row: errors.append({'type': 'WARNING', 'message': 'No resource created for ResourceID {0}. Line {1} has additional or missing columns.'.format(row['ResourceID'], str(int(row_number.split('on line ')[1])))}) if len(errors) > 0: self.errors += errors for key, value in row.iteritems(): if value != '': for row in mapping['nodes']: if key.upper() == row['file_field_name'].upper(): new_row.append({row['arches_nodeid']: value}) return new_row def transform_value(datatype, value, source, nodeid): ''' Transforms values from probably string/wkt representation to specified datatype in arches. This code could probably move to somehwere where it can be accessed by other importers. ''' request = '' if datatype != '': errors = [] datatype_instance = datatype_factory.get_instance(datatype) if datatype in ['concept', 'domain-value', 'concept-list', 'domain-value-list']: try: uuid.UUID(value) except: if datatype in ['domain-value', 'domain-value-list']: collection_id = nodeid else: collection_id = Node.objects.get(nodeid=nodeid).config['rdmCollection'] if collection_id != None: value = concept_lookup.lookup_labelid_from_label(value, collection_id) try: value = datatype_instance.transform_import_values(value, nodeid) errors = datatype_instance.validate(value, row_number, source) except Exception as e: errors.append({'type': 'ERROR', 'message': 'datatype: {0} value: {1} {2} - {3}'.format(datatype_instance.datatype_model.classname, value, source, str(e) + ' or is not a prefLabel in the given collection.')}) if len(errors) > 0: error_types = [error['type'] for error in errors] if 'ERROR' in error_types: value = None self.errors += errors else: print _('No datatype detected for {0}'.format(value)) return {'value': value, 'request': request} def get_blank_tile(source_data): if len(source_data) > 0: if source_data[0] != {}: key = str(source_data[0].keys()[0]) if key not in blanktilecache: blank_tile = Tile.get_blank_tile(key) cache(blank_tile) else: blank_tile = blanktilecache[key] else: blank_tile = None else: blank_tile = None # return deepcopy(blank_tile) return cPickle.loads(cPickle.dumps(blank_tile, -1)) def check_required_nodes(tile, parent_tile, required_nodes, all_nodes): # Check that each required node in a tile is populated. errors = [] if len(required_nodes) > 0: if bool(tile.data): for target_k, target_v in tile.data.iteritems(): if target_k in required_nodes.keys() and target_v is None: populated_tiles.pop(populated_tiles.index(parent_tile)) errors.append({'type': 'WARNING', 'message': 'The {0} node is required and must be populated in order to populate the {1} nodes. This data was not imported.'.format(required_nodes[target_k], ', '.join(all_nodes.filter(nodegroup_id=str(target_tile.nodegroup_id)).values_list('name', flat=True)))}) elif bool(tile.tiles): for tile_k, tile_v in tile.tiles.iteritems(): if len(tile_v) > 0: for t in tile_v: check_required_nodes(t, parent_tile, required_nodes, all_nodes) if len(errors) > 0: self.errors += errors resources = [] for row_number, row in enumerate(business_data): row_number = 'on line ' + unicode(row_number + 2) #to represent the row in a csv accounting for the header and 0 index if row['ResourceID'] != previous_row_resourceid and previous_row_resourceid is not None: save_count = save_count + 1 self.save_resource(populated_tiles, resourceinstanceid, legacyid, resources, target_resource_model, bulk, save_count) # reset values for next resource instance populated_tiles = [] resourceinstanceid = process_resourceid(row['ResourceID'], overwrite) populated_nodegroups[resourceinstanceid] = [] source_data = column_names_to_targetids(row, mapping, row_number) missing_display_nodes = [n for n in display_nodes if n not in [list(b) for b in zip(*[a.keys() for a in source_data])][0]] if len(missing_display_nodes) > 0: errors = [] for mdn in missing_display_nodes: mdn_name = all_nodes.filter(nodeid=mdn).values_list('name', flat=True)[0] errors.append({'type': 'WARNING', 'message': '{0} {1} is null or not mapped and participates in a {2} display value function.'.format(mdn_name, row_number, mapping['resource_model_name'])}) if len(errors) > 0: self.errors += errors if len(source_data) > 0: if source_data[0].keys(): try: target_resource_model = all_nodes.get(nodeid=source_data[0].keys()[0]).graph_id except: print '*'*80 print 'ERROR: No resource model found. Please make sure the resource model this business data is mapped to has been imported into Arches.' print '*'*80 sys.exit() target_tile = get_blank_tile(source_data) if 'TileID' in row and row['TileID'] is not None: target_tile.tileid = row['TileID'] if 'NodeGroupID' in row and row['NodeGroupID'] is not None: target_tile.nodegroupid = row['NodeGroupID'] def populate_tile(source_data, target_tile): ''' source_data = [{nodeid:value},{nodeid:value},{nodeid:value} . . .] All nodes in source_data belong to the same resource. A dictionary of nodeids would not allow for multiple values for the same nodeid. Grouping is enforced by having all grouped attributes in the same row. ''' need_new_tile = False # Set target tileid to None because this will be a new tile, a new tileid will be created on save. target_tile.tileid = uuid.uuid4() if 'TileID' in row and row['TileID'] is not None: target_tile.tileid = row['TileID'] target_tile.resourceinstance_id = resourceinstanceid # Check the cardinality of the tile and check if it has been populated. # If cardinality is one and the tile is populated the tile should not be populated again. if str(target_tile.nodegroup_id) in single_cardinality_nodegroups and 'TileiD' not in row: target_tile_cardinality = '1' else: target_tile_cardinality = 'n' if str(target_tile.nodegroup_id) not in populated_nodegroups[resourceinstanceid]: # Check if we are populating a parent tile by inspecting the target_tile.data array. if target_tile.data != {}: # Iterate through the target_tile nodes and begin populating by iterating througth source_data array. # The idea is to populate as much of the target_tile as possible, before moving on to the next target_tile. for target_key in target_tile.data.keys(): for source_tile in source_data: for source_key in source_tile.keys(): # Check for source and target key match. if source_key == target_key: if target_tile.data[source_key] == None: # If match populate target_tile node with transformed value. value = transform_value(node_datatypes[source_key], source_tile[source_key], row_number, source_key) target_tile.data[source_key] = value['value'] # target_tile.request = value['request'] # Delete key from source_tile so we do not populate another tile based on the same data. del source_tile[source_key] # Cleanup source_data array to remove source_tiles that are now '{}' from the code above. source_data[:] = [item for item in source_data if item != {}] # Check if we are populating a child tile(s) by inspecting the target_tiles.tiles array. elif target_tile.tiles != None: populated_child_nodegroups = [] for nodegroupid, childtile in target_tile.tiles.iteritems(): prototype_tile = childtile.pop() if str(prototype_tile.nodegroup_id) in single_cardinality_nodegroups: child_tile_cardinality = '1' else: child_tile_cardinality = 'n' def populate_child_tiles(source_data): prototype_tile_copy = cPickle.loads(cPickle.dumps(prototype_tile, -1)) tileid = row['TileID'] if 'TileID' in row else uuid.uuid4() prototype_tile_copy.tileid = tileid prototype_tile_copy.parenttile = target_tile parenttileid = row['ParentTileID'] if 'ParentTileID' in row and row['ParentTileID'] is not None else None if parenttileid is not None: prototype_tile_copy.parenttile.tileid = parenttileid prototype_tile_copy.resourceinstance_id = resourceinstanceid if str(prototype_tile_copy.nodegroup_id) not in populated_child_nodegroups: for target_key in prototype_tile_copy.data.keys(): for source_column in source_data: for source_key in source_column.keys(): if source_key == target_key: if prototype_tile_copy.data[source_key] == None: value = transform_value(node_datatypes[source_key], source_column[source_key], row_number, source_key) prototype_tile_copy.data[source_key] = value['value'] # print prototype_tile_copy.data[source_key] # print '&'*80 # target_tile.request = value['request'] del source_column[source_key] else: populate_child_tiles(source_data) if prototype_tile_copy.data != {}: if len([item for item in prototype_tile_copy.data.values() if item != None]) > 0: if str(prototype_tile_copy.nodegroup_id) not in populated_child_nodegroups: childtile.append(prototype_tile_copy) if prototype_tile_copy != None: if child_tile_cardinality == '1' and 'NodeGroupID' not in row: populated_child_nodegroups.append(str(prototype_tile_copy.nodegroup_id)) source_data[:] = [item for item in source_data if item != {}] populate_child_tiles(source_data) if not target_tile.is_blank(): populated_tiles.append(target_tile) if len(source_data)>0: need_new_tile = True if target_tile_cardinality == '1' and 'NodeGroupID' not in row: populated_nodegroups[resourceinstanceid].append(str(target_tile.nodegroup_id)) if need_new_tile: new_tile = get_blank_tile(source_data) if new_tile != None: populate_tile(source_data, new_tile) # mock_request_object = HttpRequest() if target_tile != None and len(source_data) > 0: populate_tile(source_data, target_tile) # Check that required nodes are populated. If not remove tile from populated_tiles array. check_required_nodes(target_tile, target_tile, required_nodes, all_nodes) previous_row_resourceid = row['ResourceID'] legacyid = row['ResourceID'] if 'legacyid' in locals(): self.save_resource(populated_tiles, resourceinstanceid, legacyid, resources, target_resource_model, bulk, save_count) if bulk: Resource.bulk_save(resources=resources) print _('%s total resource saved' % (save_count + 1)) except Exception as e: exc_type, exc_value, exc_traceback = sys.exc_info() formatted = traceback.format_exception(exc_type, exc_value, exc_traceback) if len(formatted): for message in formatted: print message finally: pass
def setUpClass(cls): delete_terms_index() delete_concepts_index() delete_search_index() prepare_terms_index(create=True) prepare_concepts_index(create=True) prepare_search_index(create=True) cls.client = Client() cls.client.login(username="******", password="******") models.ResourceInstance.objects.all().delete() with open( os.path.join( "tests/fixtures/resource_graphs/Search Test Model.json"), "rU") as f: archesfile = JSONDeserializer().deserialize(f) ResourceGraphImporter(archesfile["graph"]) cls.search_model_graphid = "d291a445-fa5f-11e6-afa8-14109fd34195" cls.search_model_cultural_period_nodeid = "7a182580-fa60-11e6-96d1-14109fd34195" cls.search_model_creation_date_nodeid = "1c1d05f5-fa60-11e6-887f-14109fd34195" cls.search_model_destruction_date_nodeid = "e771b8a1-65fe-11e7-9163-14109fd34195" cls.search_model_name_nodeid = "2fe14de3-fa61-11e6-897b-14109fd34195" cls.search_model_sensitive_info_nodeid = "57446fae-65ff-11e7-b63a-14109fd34195" cls.search_model_geom_nodeid = "3ebc6785-fa61-11e6-8c85-14109fd34195" cls.user = User.objects.create_user( "unpriviliged_user", "*****@*****.**", "test") cls.user.groups.add(Group.objects.get(name="Guest")) nodegroup = models.NodeGroup.objects.get( pk=cls.search_model_destruction_date_nodeid) assign_perm("no_access_to_nodegroup", cls.user, nodegroup) # Add a concept that defines a min and max date concept = { "id": "00000000-0000-0000-0000-000000000001", "legacyoid": "ARCHES", "nodetype": "ConceptScheme", "values": [], "subconcepts": [{ "values": [ { "value": "Mock concept", "language": "en-US", "category": "label", "type": "prefLabel", "id": "", "conceptid": "" }, { "value": "1950", "language": "en-US", "category": "note", "type": "min_year", "id": "", "conceptid": "" }, { "value": "1980", "language": "en-US", "category": "note", "type": "max_year", "id": "", "conceptid": "" }, ], "relationshiptype": "hasTopConcept", "nodetype": "Concept", "id": "", "legacyoid": "", "subconcepts": [], "parentconcepts": [], "relatedconcepts": [], }], } post_data = JSONSerializer().serialize(concept) content_type = "application/x-www-form-urlencoded" response = cls.client.post( reverse( "concept", kwargs={"conceptid": "00000000-0000-0000-0000-000000000001"}), post_data, content_type) response_json = json.loads(response.content) valueid = response_json["subconcepts"][0]["values"][0]["id"] cls.conceptid = response_json["subconcepts"][0]["id"] # add resource instance with only a cultural period defined cls.cultural_period_resource = Resource( graph_id=cls.search_model_graphid) tile = Tile(data={cls.search_model_cultural_period_nodeid: [valueid]}, nodegroup_id=cls.search_model_cultural_period_nodeid) cls.cultural_period_resource.tiles.append(tile) cls.cultural_period_resource.save() # add resource instance with a creation and destruction date defined cls.date_resource = Resource(graph_id=cls.search_model_graphid) tile = Tile(data={cls.search_model_creation_date_nodeid: "1941-01-01"}, nodegroup_id=cls.search_model_creation_date_nodeid) cls.date_resource.tiles.append(tile) tile = Tile( data={cls.search_model_destruction_date_nodeid: "1948-01-01"}, nodegroup_id=cls.search_model_destruction_date_nodeid) cls.date_resource.tiles.append(tile) tile = Tile(data={cls.search_model_name_nodeid: "testing 123"}, nodegroup_id=cls.search_model_name_nodeid) cls.date_resource.tiles.append(tile) cls.date_resource.save() # add resource instance with a creation date and a cultural period defined cls.date_and_cultural_period_resource = Resource( graph_id=cls.search_model_graphid) tile = Tile(data={cls.search_model_creation_date_nodeid: "1942-01-01"}, nodegroup_id=cls.search_model_creation_date_nodeid) cls.date_and_cultural_period_resource.tiles.append(tile) tile = Tile(data={cls.search_model_cultural_period_nodeid: [valueid]}, nodegroup_id=cls.search_model_cultural_period_nodeid) cls.date_and_cultural_period_resource.tiles.append(tile) cls.date_and_cultural_period_resource.save() # add resource instance with with no dates or periods defined cls.name_resource = Resource(graph_id=cls.search_model_graphid) tile = Tile(data={cls.search_model_name_nodeid: "some test name"}, nodegroup_id=cls.search_model_name_nodeid) cls.name_resource.tiles.append(tile) geom = { "type": "FeatureCollection", "features": [{ "geometry": { "type": "Point", "coordinates": [0, 0] }, "type": "Feature", "properties": {} }], } tile = Tile(data={cls.search_model_geom_nodeid: geom}, nodegroup_id=cls.search_model_geom_nodeid) cls.name_resource.tiles.append(tile) cls.name_resource.save() # add delay to allow for indexes to be updated time.sleep(1)
def import_relations(self, relations=None): def get_resourceid_from_legacyid(legacyid): ret = Resource.objects.filter(legacyid=legacyid) if len(ret) > 1 or len(ret) == 0: return None else: return ret[0].resourceinstanceid for relation_count, relation in enumerate(relations): relation_count = relation_count + 2 if relation_count % 500 == 0: print("{0} relations saved".format(str(relation_count))) def validate_resourceinstanceid(resourceinstanceid, key): # Test if resourceinstancefrom is a uuid it is for a resource or if it is not a uuid that get_resourceid_from_legacyid found a resourceid. try: # Test if resourceinstanceid from relations file is a UUID. newresourceinstanceid = uuid.UUID(resourceinstanceid) try: # If resourceinstanceid is a UUID then test that it is assoicated with a resource instance Resource.objects.get(resourceinstanceid=resourceinstanceid) except: # If resourceinstanceid is not associated with a resource instance then set resourceinstanceid to None newresourceinstanceid = None except: # If resourceinstanceid is not UUID then assume it's a legacyid and pass it into get_resourceid_from_legacyid function newresourceinstanceid = get_resourceid_from_legacyid(resourceinstanceid) # If resourceinstancefrom is None then either: # 1.) a legacyid was passed in and get_resourceid_from_legacyid could not find a resource or found multiple resources with the indicated legacyid or # 2.) a uuid was passed in and it is not associated with a resource instance if newresourceinstanceid is None: errors = [] # self.errors.append({'datatype':'legacyid', 'value':relation[key], 'source':'', 'message':'either multiple resources or no resource have this legacyid\n'}) errors.append( { "type": "ERROR", "message": "Relation not created, either zero or multiple resources found with legacyid: {0}".format( relation[key] ), } ) if len(errors) > 0: self.errors += errors return newresourceinstanceid resourceinstancefrom = validate_resourceinstanceid(relation["resourceinstanceidfrom"], "resourceinstanceidfrom") resourceinstanceto = validate_resourceinstanceid(relation["resourceinstanceidto"], "resourceinstanceidto") if relation["datestarted"] == "" or relation["datestarted"] == "None": relation["datestarted"] = None if relation["dateended"] == "" or relation["dateended"] == "None": relation["dateended"] = None if resourceinstancefrom is not None and resourceinstanceto is not None: relation = ResourceXResource( resourceinstanceidfrom=Resource(resourceinstancefrom), resourceinstanceidto=Resource(resourceinstanceto), relationshiptype=str(relation["relationshiptype"]), datestarted=relation["datestarted"], dateended=relation["dateended"], notes=relation["notes"], ) relation.save() self.report_errors()
def get( self, request, graphid=None, resourceid=None, view_template="views/resource/editor.htm", main_script="views/resource/editor", nav_menu=True, ): if self.action == "copy": return self.copy(request, resourceid) resource_instance_exists = False try: resource_instance = Resource.objects.get(pk=resourceid) resource_instance_exists = True graphid = resource_instance.graph_id except ObjectDoesNotExist: resource_instance = Resource() resource_instance.resourceinstanceid = resourceid resource_instance.graph_id = graphid if resourceid is not None: resource_graphs = (models.GraphModel.objects.exclude( pk=settings.SYSTEM_SETTINGS_RESOURCE_MODEL_ID).exclude( isresource=False).exclude(isactive=False)) graph = Graph.objects.get(graphid=graphid) relationship_type_values = get_resource_relationship_types() datatypes = models.DDataType.objects.all() widgets = models.Widget.objects.all() map_layers = models.MapLayer.objects.all() map_markers = models.MapMarker.objects.all() map_sources = models.MapSource.objects.all() geocoding_providers = models.Geocoder.objects.all() required_widgets = [] widget_datatypes = [v.datatype for k, v in graph.nodes.items()] widgets = widgets.filter(datatype__in=widget_datatypes) if resource_instance_exists == True: displayname = Resource.objects.get(pk=resourceid).displayname if displayname == "undefined": displayname = "Unnamed Resource" else: displayname = "Unnamed Resource" date_nodes = models.Node.objects.filter(datatype="date", graph__isresource=True, graph__isactive=True) searchable_datatypes = [ d.pk for d in models.DDataType.objects.filter(issearchable=True) ] searchable_nodes = models.Node.objects.filter( graph__isresource=True, graph__isactive=True, datatype__in=searchable_datatypes, issearchable=True) resource_cards = models.CardModel.objects.filter( graph__isresource=True, graph__isactive=True) context = self.get_context_data( main_script=main_script, resource_type=graph.name, relationship_types=relationship_type_values, iconclass=graph.iconclass, datatypes_json=JSONSerializer().serialize( datatypes, exclude=["iconclass", "modulename", "classname"]), datatypes=datatypes, widgets=widgets, date_nodes=date_nodes, map_layers=map_layers, map_markers=map_markers, map_sources=map_sources, geocoding_providers=geocoding_providers, widgets_json=JSONSerializer().serialize(widgets), resourceid=resourceid, resource_graphs=resource_graphs, graph_json=JSONSerializer().serialize( graph, exclude=[ "iconclass", "functions", "functions_x_graphs", "name", "description", "deploymentfile", "author", "deploymentdate", "version", "isresource", "isactive", "iconclass", "ontology", ], ), displayname=displayname, resource_cards=JSONSerializer().serialize(resource_cards, exclude=[ "description", "instructions", "active", "isvisible" ]), searchable_nodes=JSONSerializer().serialize( searchable_nodes, exclude=[ "description", "ontologyclass", "isrequired", "issearchable", "istopnode" ]), saved_searches=JSONSerializer().serialize( settings.SAVED_SEARCHES), resource_instance_exists=resource_instance_exists, user_is_reviewer=json.dumps( request.user.groups.filter( name="Resource Reviewer").exists()), userid=request.user.id, ) if graph.iconclass: context["nav"]["icon"] = graph.iconclass context["nav"]["title"] = graph.name context["nav"]["menu"] = nav_menu if resourceid == settings.RESOURCE_INSTANCE_ID: context["nav"]["help"] = (_("Managing System Settings"), "help/base-help.htm") context["help"] = "system-settings-help" else: context["nav"]["help"] = (_("Using the Resource Editor"), "help/base-help.htm") context["help"] = "resource-editor-help" return render(request, view_template, context) return HttpResponseNotFound()
def post(self, request, resourceid=None): lang = request.GET.get("lang", settings.LANGUAGE_CODE) se = SearchEngineFactory().create() res = dict(request.POST) relationship_type = res["relationship_properties[relationship_type]"][0] datefrom = res["relationship_properties[datefrom]"][0] dateto = res["relationship_properties[dateto]"][0] dateto = None if dateto == "" else dateto datefrom = None if datefrom == "" else datefrom notes = res["relationship_properties[notes]"][0] root_resourceinstanceid = res["root_resourceinstanceid"] instances_to_relate = [] relationships_to_update = [] if "instances_to_relate[]" in res: instances_to_relate = res["instances_to_relate[]"] if "relationship_ids[]" in res: relationships_to_update = res["relationship_ids[]"] def get_relatable_resources(graphid): """ Takes the graphid of a resource, finds the graphs root node, and returns the relatable graphids """ nodes = models.Node.objects.filter(graph_id=graphid) top_node = [node for node in nodes if node.istopnode == True][0] relatable_resources = [str(node.graph_id) for node in top_node.get_relatable_resources()] return relatable_resources def confirm_relationship_permitted(to_id, from_id): resource_instance_to = models.ResourceInstance.objects.filter(resourceinstanceid=to_id)[0] resource_instance_from = models.ResourceInstance.objects.filter(resourceinstanceid=from_id)[0] relatable_to = get_relatable_resources(resource_instance_to.graph_id) relatable_from = get_relatable_resources(resource_instance_from.graph_id) relatable_to_is_valid = str(resource_instance_to.graph_id) in relatable_from relatable_from_is_valid = str(resource_instance_from.graph_id) in relatable_to return relatable_to_is_valid is True and relatable_from_is_valid is True for instanceid in instances_to_relate: permitted = confirm_relationship_permitted(instanceid, root_resourceinstanceid[0]) if permitted is True: rr = models.ResourceXResource( resourceinstanceidfrom=Resource(root_resourceinstanceid[0]), resourceinstanceidto=Resource(instanceid), notes=notes, relationshiptype=relationship_type, datestarted=datefrom, dateended=dateto, ) try: rr.save() except ModelInactiveError as e: message = _("Unable to save. Please verify the model status is active") return JSONResponse({"status": "false", "message": [_(e.title), _(str(message))]}, status=500) else: print("relationship not permitted") for relationshipid in relationships_to_update: rr = models.ResourceXResource.objects.get(pk=relationshipid) rr.notes = notes rr.relationshiptype = relationship_type rr.datestarted = datefrom rr.dateended = dateto try: rr.save() except ModelInactiveError as e: message = _("Unable to save. Please verify the model status is active") return JSONResponse({"status": "false", "message": [_(e.title), _(str(message))]}, status=500) start = request.GET.get("start", 0) se.es.indices.refresh(index=se._add_prefix("resource_relations")) resource = Resource.objects.get(pk=root_resourceinstanceid[0]) page = 1 if request.GET.get("page") == "" else int(request.GET.get("page", 1)) related_resources = resource.get_related_resources(lang=lang, start=start, limit=1000, page=page) ret = [] if related_resources is not None: ret = self.paginate_related_resources(related_resources, page, request) return JSONResponse(ret, indent=4)
def create_indices(): Resource().prepare_resource_relations_index(create=True) for res_config in settings.RESOURCE_TYPE_CONFIGS().values(): Resource().prepare_search_index(res_config['resourcetypeid'], create=True)
def get(self, request, graphid=None, resourceid=None, view_template='views/resource/editor.htm', main_script='views/resource/editor', nav_menu=True): if self.action == 'copy': return self.copy(request, resourceid) if graphid is not None: resource_instance = Resource() resource_instance.graph_id = graphid resource_instance.save(**{'request': request}) resource_instance.index() return redirect('resource_editor', resourceid=resource_instance.pk) if resourceid is not None: resource_instance = models.ResourceInstance.objects.get( pk=resourceid) resource_graphs = Graph.objects.exclude( pk=settings.SYSTEM_SETTINGS_RESOURCE_MODEL_ID).exclude( isresource=False).exclude(isactive=False) graph = Graph.objects.get(graphid=resource_instance.graph.pk) relationship_type_values = get_resource_relationship_types() form = Form(resource_instance.pk) datatypes = models.DDataType.objects.all() widgets = models.Widget.objects.all() map_layers = models.MapLayer.objects.all() map_sources = models.MapSource.objects.all() geocoding_providers = models.Geocoder.objects.all() forms = resource_instance.graph.form_set.filter(visible=True) forms_x_cards = models.FormXCard.objects.filter(form__in=forms) forms_w_cards = [] for form_x_card in forms_x_cards: cm = models.CardModel.objects.get(pk=form_x_card.card_id) if request.user.has_perm('read_nodegroup', cm.nodegroup): forms_w_cards.append(form_x_card.form) displayname = Resource.objects.get(pk=resourceid).displayname if displayname == 'undefined': displayname = 'Unnamed Resource' date_nodes = models.Node.objects.filter(datatype='date', graph__isresource=True, graph__isactive=True) searchable_datatypes = [ d.pk for d in models.DDataType.objects.filter(issearchable=True) ] searchable_nodes = models.Node.objects.filter( graph__isresource=True, graph__isactive=True, datatype__in=searchable_datatypes, issearchable=True) resource_cards = models.CardModel.objects.filter( graph__isresource=True, graph__isactive=True) context = self.get_context_data( main_script=main_script, resource_type=resource_instance.graph.name, relationship_types=relationship_type_values, iconclass=resource_instance.graph.iconclass, form=JSONSerializer().serialize(form), forms=JSONSerializer().serialize(forms_w_cards), datatypes_json=JSONSerializer().serialize(datatypes), widgets=widgets, date_nodes=date_nodes, map_layers=map_layers, map_sources=map_sources, geocoding_providers=geocoding_providers, widgets_json=JSONSerializer().serialize(widgets), resourceid=resourceid, resource_graphs=resource_graphs, graph_json=JSONSerializer().serialize(graph), displayname=displayname, resource_cards=JSONSerializer().serialize(resource_cards), searchable_nodes=JSONSerializer().serialize(searchable_nodes), saved_searches=JSONSerializer().serialize( settings.SAVED_SEARCHES), ) if graph.iconclass: context['nav']['icon'] = graph.iconclass context['nav']['title'] = graph.name context['nav']['menu'] = nav_menu if resourceid == settings.RESOURCE_INSTANCE_ID: context['nav']['help'] = (_('Managing System Settings'), 'help/system-settings-help.htm') else: context['nav']['help'] = (_('Using the Resource Editor'), 'help/resource-editor-help.htm') return render(request, view_template, context) return HttpResponseNotFound()
def post(self, request): if self.action == 'update_tile': json = request.POST.get('data', None) accepted_provisional = request.POST.get('accepted_provisional', None) if accepted_provisional != None: accepted_provisional_edit = JSONDeserializer().deserialize( accepted_provisional) if json != None: data = JSONDeserializer().deserialize(json) if data['resourceinstance_id'] == '': data['resourceinstance_id'] = uuid.uuid4() try: models.ResourceInstance.objects.get( pk=data['resourceinstance_id']) except ObjectDoesNotExist: resource = Resource() graphid = models.Node.objects.filter( nodegroup=data['nodegroup_id'])[0].graph_id resource.graph_id = graphid resource.save(user=request.user) data['resourceinstance_id'] = resource.pk resource.index() tile_id = data['tileid'] if tile_id != None and tile_id != '': try: old_tile = Tile.objects.get(pk=tile_id) clean_resource_cache(old_tile) except ObjectDoesNotExist: return JSONResponse( { 'status': 'false', 'message': [ _('This tile is no longer available'), _('It was likely deleted by another user') ] }, status=500) tile = Tile(data) if tile.filter_by_perm(request.user, 'write_nodegroup'): with transaction.atomic(): try: if accepted_provisional == None: tile.save(request=request) else: if accepted_provisional is not None: provisional_editor = User.objects.get( pk=accepted_provisional_edit["user"]) tile.save( provisional_edit_log_details={ "user": request.user, "action": "accept edit", "edit": accepted_provisional_edit, "provisional_editor": provisional_editor }) if tile_id == '4345f530-aa90-48cf-b4b3-92d1185ca439': import couchdb import json as json_json couch = couchdb.Server(settings.COUCHDB_URL) for project in models.MobileSurveyModel.objects.all( ): db = couch['project_' + str(project.id)] #tile = models.TileModel.objects.get(pk='4345f530-aa90-48cf-b4b3-92d1185ca439') tile_json = json_json.loads( JSONSerializer().serialize(tile)) tile_json['_id'] = tile_json['tileid'] for row in db.view('_all_docs', include_docs=True): if 'tileid' in row.doc and tile_json[ '_id'] == row.doc['_id']: tile_json['_rev'] = row.doc['_rev'] db.save(tile_json) if tile.provisionaledits is not None and str( request.user.id) in tile.provisionaledits: tile.data = tile.provisionaledits[str( request.user.id)]['value'] except ValidationError as e: return JSONResponse( { 'status': 'false', 'message': e.args }, status=500) tile.after_update_all() clean_resource_cache(tile) update_system_settings_cache(tile) return JSONResponse(tile) else: return JSONResponse( { 'status': 'false', 'message': [_('Request Failed'), _('Permission Denied')] }, status=500) if self.action == 'reorder_tiles': json = request.body if json != None: data = JSONDeserializer().deserialize(json) if 'tiles' in data and len(data['tiles']) > 0: sortorder = 0 with transaction.atomic(): for tile in data['tiles']: t = Tile(tile) if t.filter_by_perm(request.user, 'write_nodegroup'): t.sortorder = sortorder t.save(update_fields=['sortorder'], request=request) sortorder = sortorder + 1 return JSONResponse(data) if self.action == 'delete_provisional_tile': user = request.POST.get('user', None) tileid = request.POST.get('tileid', None) users = request.POST.get('users', None) tile = Tile.objects.get(tileid=tileid) is_provisional = tile.is_provisional() if tileid is not None and user is not None: provisionaledits = self.delete_provisional_edit( tile, user, reviewer=request.user) elif tileid is not None and users is not None: users = jsonparser.loads(users) for user in users: self.delete_provisional_edit(tile, user, reviewer=request.user) if is_provisional == True: return JSONResponse({'result': 'delete'}) else: return JSONResponse({'result': 'success'}) return HttpResponseNotFound()