def write_resources(self, filename, split_types=False): # if JSONL file is desired, use this code to write the resource line by # line, and also introduce multiprocessing to speed things up. restypes = [ i.entitytypeid for i in EntityTypes.objects.filter(isresource=True) ] start = time.time() total_count = 0 if self.multiprocessing is True: process_count = cpu_count() print "cpu count:", cpu_count() print "number of parallel processes:", process_count pool = Pool(cpu_count()) ## if there is only one output file, open it here if split_types is False: outfile = filename openout = open(outfile, "w") json_resources = [] for restype in restypes: if split_types is True: outfile = filename.replace("all", restype) openout = open(outfile, "w") resources = Entities.objects.filter(entitytypeid=restype) resct = len(resources) total_count += resct if resct == 0: print "Writing {} {} resources".format(resct, restype) continue print "Writing {} {} resources --> {}".format( resct, restype, outfile) if self.jsonl is True and self.multiprocessing is True: resids = [r.entityid for r in resources] for conn in connections.all(): conn.close() joined_input = [(self, r) for r in resids] for res in pool.imap(write_one_resource_wrapper, joined_input): openout.write(res + "\n") elif self.jsonl is True: for resource in resources: try: a_resource = Resource().get(resource.entityid) a_resource.form_groups = None jsonres = JSONSerializer().serialize(a_resource, separators=(',', ':')) except Exception as e: if e not in errors: errors.append(e) openout.write(jsonres + "\n") else: errors = [] for resource in resources: try: a_resource = Resource().get(resource.entityid) a_resource.form_groups = None json_resources.append(a_resource) except Exception as e: if e not in errors: errors.append(e) if len(errors) > 0: print errors[0], ':', len(errors) if split_types is True: openout.write((JSONSerializer().serialize( {'resources': json_resources}, separators=(',', ':')))) json_resources = [] if split_types is False: if self.jsonl is False: openout.write( (JSONSerializer().serialize({'resources': json_resources}, separators=(',', ':')))) openout.close() print "\n{} resources exported".format(total_count) print "elapsed time:", time.time() - start
def get(self, request, graphid=None, resourceid=None, view_template='views/resource/editor.htm', main_script='views/resource/editor', nav_menu=True): if self.action == 'copy': return self.copy(request, resourceid) resource_instance_exists = False try: resource_instance = Resource.objects.get(pk=resourceid) resource_instance_exists = True graphid = resource_instance.graph_id except ObjectDoesNotExist: resource_instance = Resource() resource_instance.resourceinstanceid = resourceid resource_instance.graph_id = graphid if resourceid is not None: if request.is_ajax() and request.GET.get('search') == 'true': html = render_to_string('views/search/search-base-manager.htm', {}, request) return HttpResponse(html) resource_graphs = models.GraphModel.objects.exclude( pk=settings.SYSTEM_SETTINGS_RESOURCE_MODEL_ID).exclude( isresource=False).exclude(isactive=False) graph = Graph.objects.get(graphid=graphid) relationship_type_values = get_resource_relationship_types() form = Form(resource_instance.pk) datatypes = models.DDataType.objects.all() widgets = models.Widget.objects.all() map_layers = models.MapLayer.objects.all() map_markers = models.MapMarker.objects.all() map_sources = models.MapSource.objects.all() geocoding_providers = models.Geocoder.objects.all() forms = graph.form_set.filter(visible=True) forms_x_cards = models.FormXCard.objects.filter(form__in=forms) forms_w_cards = [] required_widgets = [] for form_x_card in forms_x_cards: if request.user.has_perm('read_nodegroup', form_x_card.card.nodegroup): forms_w_cards.append(form_x_card.form) widget_datatypes = [v.datatype for k, v in graph.nodes.iteritems()] widgets = widgets.filter(datatype__in=widget_datatypes) if resource_instance_exists == True: displayname = Resource.objects.get(pk=resourceid).displayname if displayname == 'undefined': displayname = 'Unnamed Resource' else: displayname = 'Unnamed Resource' date_nodes = models.Node.objects.filter(datatype='date', graph__isresource=True, graph__isactive=True) searchable_datatypes = [ d.pk for d in models.DDataType.objects.filter(issearchable=True) ] searchable_nodes = models.Node.objects.filter( graph__isresource=True, graph__isactive=True, datatype__in=searchable_datatypes, issearchable=True) resource_cards = models.CardModel.objects.filter( graph__isresource=True, graph__isactive=True) context = self.get_context_data( main_script=main_script, resource_type=graph.name, relationship_types=relationship_type_values, iconclass=graph.iconclass, form=JSONSerializer().serialize(form), forms=JSONSerializer().serialize(forms_w_cards), datatypes_json=JSONSerializer().serialize( datatypes, exclude=['iconclass', 'modulename', 'classname']), datatypes=datatypes, widgets=widgets, date_nodes=date_nodes, map_layers=map_layers, map_markers=map_markers, map_sources=map_sources, geocoding_providers=geocoding_providers, widgets_json=JSONSerializer().serialize(widgets), resourceid=resourceid, resource_graphs=resource_graphs, graph_json=JSONSerializer().serialize( graph, exclude=[ 'iconclass', 'functions', 'functions_x_graphs', 'name', 'description', 'deploymentfile', 'author', 'deploymentdate', 'version', 'isresource', 'isactive', 'iconclass', 'ontology' ]), displayname=displayname, resource_cards=JSONSerializer().serialize(resource_cards, exclude=[ 'description', 'instructions', 'active', 'isvisible' ]), searchable_nodes=JSONSerializer().serialize( searchable_nodes, exclude=[ 'description', 'ontologyclass', 'isrequired', 'issearchable', 'istopnode' ]), saved_searches=JSONSerializer().serialize( settings.SAVED_SEARCHES), resource_instance_exists=resource_instance_exists, user_is_reviewer=json.dumps( request.user.groups.filter( name='Resource Reviewer').exists()), active_report_count=models.Report.objects.filter( graph_id=resource_instance.graph_id, active=True).count(), userid=request.user.id) if graph.iconclass: context['nav']['icon'] = graph.iconclass context['nav']['title'] = graph.name context['nav']['menu'] = nav_menu if resourceid == settings.RESOURCE_INSTANCE_ID: context['nav']['help'] = (_('Managing System Settings'), 'help/base-help.htm') context['help'] = 'system-settings-help' else: context['nav']['help'] = (_('Using the Resource Editor'), 'help/base-help.htm') context['help'] = 'resource-editor-help' return render(request, view_template, context) return HttpResponseNotFound()
def import_relations(self, relations=None): def get_resourceid_from_legacyid(legacyid): ret = Resource.objects.filter(legacyid=legacyid) if len(ret) > 1 or len(ret) == 0: return None else: return ret[0].resourceinstanceid for relation_count, relation in enumerate(relations): relation_count = relation_count + 2 if relation_count % 500 == 0: print("{0} relations saved".format(str(relation_count))) def validate_resourceinstanceid(resourceinstanceid, key): # Test if resourceinstancefrom is a uuid it is for a resource or if it is not a uuid that get_resourceid_from_legacyid found a resourceid. try: # Test if resourceinstanceid from relations file is a UUID. newresourceinstanceid = uuid.UUID(resourceinstanceid) try: # If resourceinstanceid is a UUID then test that it is assoicated with a resource instance Resource.objects.get(resourceinstanceid=resourceinstanceid) except: # If resourceinstanceid is not associated with a resource instance then set resourceinstanceid to None newresourceinstanceid = None except: # If resourceinstanceid is not UUID then assume it's a legacyid and pass it into get_resourceid_from_legacyid function newresourceinstanceid = get_resourceid_from_legacyid(resourceinstanceid) # If resourceinstancefrom is None then either: # 1.) a legacyid was passed in and get_resourceid_from_legacyid could not find a resource or found multiple resources with the indicated legacyid or # 2.) a uuid was passed in and it is not associated with a resource instance if newresourceinstanceid is None: errors = [] # self.errors.append({'datatype':'legacyid', 'value':relation[key], 'source':'', 'message':'either multiple resources or no resource have this legacyid\n'}) errors.append( { "type": "ERROR", "message": "Relation not created, either zero or multiple resources found with legacyid: {0}".format( relation[key] ), } ) if len(errors) > 0: self.errors += errors return newresourceinstanceid resourceinstancefrom = validate_resourceinstanceid(relation["resourceinstanceidfrom"], "resourceinstanceidfrom") resourceinstanceto = validate_resourceinstanceid(relation["resourceinstanceidto"], "resourceinstanceidto") if relation["datestarted"] == "" or relation["datestarted"] == "None": relation["datestarted"] = None if relation["dateended"] == "" or relation["dateended"] == "None": relation["dateended"] = None if resourceinstancefrom is not None and resourceinstanceto is not None: relation = ResourceXResource( resourceinstanceidfrom=Resource(resourceinstancefrom), resourceinstanceidto=Resource(resourceinstanceto), relationshiptype=str(relation["relationshiptype"]), datestarted=relation["datestarted"], dateended=relation["dateended"], notes=relation["notes"], ) relation.save() self.report_errors()
def post(self, request): if self.action == 'update_tile': json = request.POST.get('data', None) accepted_provisional = request.POST.get('accepted_provisional', None) if accepted_provisional is not None: accepted_provisional_edit = JSONDeserializer().deserialize( accepted_provisional) if json is not None: data = JSONDeserializer().deserialize(json) data[ 'resourceinstance_id'] = '' if 'resourceinstance_id' not in data else data[ 'resourceinstance_id'] if data['resourceinstance_id'] == '': data['resourceinstance_id'] = uuid.uuid4() try: models.ResourceInstance.objects.get( pk=data['resourceinstance_id']) except ObjectDoesNotExist: resource = Resource() graphid = models.Node.objects.filter( nodegroup=data['nodegroup_id'])[0].graph_id resource.graph_id = graphid try: resource.save(user=request.user) data['resourceinstance_id'] = resource.pk resource.index() except ModelInactiveError as e: message = _( 'Unable to save. Please verify the model status is active' ) return JSONResponse( { 'status': 'false', 'message': [_(e.title), _(str(message))] }, status=500) tile_id = data['tileid'] resource_instance = models.ResourceInstance.objects.get( pk=data['resourceinstance_id']) is_active = resource_instance.graph.isactive if tile_id is not None and tile_id != '': try: old_tile = Tile.objects.get(pk=tile_id) clean_resource_cache(old_tile) except ObjectDoesNotExist as e: return self.handle_save_error( e, _('This tile is no longer available'), _('It was likely deleted by another user')) tile = Tile(data) if tile.filter_by_perm( request.user, 'write_nodegroup') and is_active is True: try: with transaction.atomic(): try: if accepted_provisional is None: try: tile.save(request=request) except TileValidationError as e: resource_tiles = models.TileModel.objects.filter( resourceinstance=tile. resourceinstance) if resource_tiles.count() == 0: Resource.objects.get( pk=tile.resourceinstance_id ).delete(request.user) title = _( 'Unable to save. Please verify your input is valid' ) return self.handle_save_error( e, tile_id, title=title) except ModelInactiveError as e: message = _( 'Unable to save. Please verify the model status is active' ) return JSONResponse( { 'status': 'false', 'message': [_(e.title), _(str(message))] }, status=500) else: if accepted_provisional is not None: provisional_editor = User.objects.get( pk=accepted_provisional_edit[ "user"]) prov_edit_log_details = { "user": request.user, "action": "accept edit", "edit": accepted_provisional_edit, "provisional_editor": provisional_editor } tile.save(request=request, provisional_edit_log_details= prov_edit_log_details) if tile.provisionaledits is not None and str( request.user.id ) in tile.provisionaledits: tile.data = tile.provisionaledits[str( request.user.id)]['value'] except Exception as e: return self.handle_save_error(e, tile_id) tile.after_update_all() clean_resource_cache(tile) update_system_settings_cache(tile) except Exception as e: return self.handle_save_error(e, tile_id) return JSONResponse(tile) elif is_active is False: response = { 'status': 'false', 'message': [ _('Request Failed'), _('Unable to Save. Verify model status is active') ] } return JSONResponse(response, status=500) else: response = { 'status': 'false', 'message': [_('Request Failed'), _('Permission Denied')] } return JSONResponse(response, status=500) if self.action == 'reorder_tiles': json = request.body if json is not None: data = JSONDeserializer().deserialize(json) if 'tiles' in data and len(data['tiles']) > 0: sortorder = 0 with transaction.atomic(): for tile in data['tiles']: t = Tile(tile) if t.filter_by_perm(request.user, 'write_nodegroup'): t.sortorder = sortorder t.save(update_fields=['sortorder'], request=request) sortorder = sortorder + 1 return JSONResponse(data) if self.action == 'delete_provisional_tile': user = request.POST.get('user', None) tileid = request.POST.get('tileid', None) users = request.POST.get('users', None) tile = Tile.objects.get(tileid=tileid) is_provisional = tile.is_provisional() if tileid is not None and user is not None: provisionaledits = self.delete_provisional_edit( tile, user, request) elif tileid is not None and users is not None: users = jsonparser.loads(users) for user in users: self.delete_provisional_edit(tile, user, request) if is_provisional == True: return JSONResponse({'result': 'delete'}) else: return JSONResponse({'result': 'success'}) return HttpResponseNotFound()
def resource_manager(request, resourcetypeid='', form_id='default', resourceid=''): if resourceid != '': resource = Resource(resourceid) elif resourcetypeid != '': resource = Resource({'entitytypeid': resourcetypeid}) if form_id == 'default': form_id = resource.form_groups[0]['forms'][0]['id'] form = resource.get_form(form_id) if request.method == 'DELETE': resource.delete_index() se = SearchEngineFactory().create() realtionships = resource.get_related_resources(return_entities=False) for realtionship in realtionships: se.delete(index='resource_relations', doc_type='all', id=realtionship.resourcexid) realtionship.delete() resource.delete() return JSONResponse({'success': True}) if request.method == 'POST': data = JSONDeserializer().deserialize(request.POST.get('formdata', {})) form.update(data, request.FILES) with transaction.atomic(): if resourceid != '': resource.delete_index() resource.save(user=request.user) resource.index() resourceid = resource.entityid return redirect('resource_manager', resourcetypeid=resourcetypeid, form_id=form_id, resourceid=resourceid) min_max_dates = models.Dates.objects.aggregate(Min('val'), Max('val')) if request.method == 'GET': if form != None: lang = request.GET.get('lang', settings.LANGUAGE_CODE) form.load(lang) return render( request, 'resource-manager.htm', { 'form': form, 'formdata': JSONSerializer().serialize(form.data), 'form_template': 'views/forms/' + form_id + '.htm', 'form_id': form_id, 'resourcetypeid': resourcetypeid, 'resourceid': resourceid, 'main_script': 'resource-manager', 'active_page': 'ResourceManger', 'resource': resource, 'resource_name': resource.get_primary_name(), 'resource_type_name': resource.get_type_name(), 'form_groups': resource.form_groups, 'min_date': min_max_dates['val__min'].year if min_max_dates['val__min'] != None else 0, 'max_date': min_max_dates['val__max'].year if min_max_dates['val__min'] != None else 1, 'timefilterdata': JSONSerializer().serialize(Concept.get_time_filter_data()), }) else: return HttpResponseNotFound('<h1>Arches form not found.</h1>')
def setUpClass(cls): delete_terms_index() delete_concepts_index() delete_search_index() prepare_terms_index(create=True) prepare_concepts_index(create=True) prepare_search_index(create=True) cls.client = Client() cls.client.login(username="******", password="******") models.ResourceInstance.objects.all().delete() with open( os.path.join( "tests/fixtures/resource_graphs/Search Test Model.json"), "rU") as f: archesfile = JSONDeserializer().deserialize(f) ResourceGraphImporter(archesfile["graph"]) cls.search_model_graphid = "d291a445-fa5f-11e6-afa8-14109fd34195" cls.search_model_cultural_period_nodeid = "7a182580-fa60-11e6-96d1-14109fd34195" cls.search_model_creation_date_nodeid = "1c1d05f5-fa60-11e6-887f-14109fd34195" cls.search_model_destruction_date_nodeid = "e771b8a1-65fe-11e7-9163-14109fd34195" cls.search_model_name_nodeid = "2fe14de3-fa61-11e6-897b-14109fd34195" cls.search_model_sensitive_info_nodeid = "57446fae-65ff-11e7-b63a-14109fd34195" cls.search_model_geom_nodeid = "3ebc6785-fa61-11e6-8c85-14109fd34195" cls.user = User.objects.create_user( "unpriviliged_user", "*****@*****.**", "test") cls.user.groups.add(Group.objects.get(name="Guest")) nodegroup = models.NodeGroup.objects.get( pk=cls.search_model_destruction_date_nodeid) assign_perm("no_access_to_nodegroup", cls.user, nodegroup) # Add a concept that defines a min and max date concept = { "id": "00000000-0000-0000-0000-000000000001", "legacyoid": "ARCHES", "nodetype": "ConceptScheme", "values": [], "subconcepts": [{ "values": [ { "value": "Mock concept", "language": "en-US", "category": "label", "type": "prefLabel", "id": "", "conceptid": "" }, { "value": "1950", "language": "en-US", "category": "note", "type": "min_year", "id": "", "conceptid": "" }, { "value": "1980", "language": "en-US", "category": "note", "type": "max_year", "id": "", "conceptid": "" }, ], "relationshiptype": "hasTopConcept", "nodetype": "Concept", "id": "", "legacyoid": "", "subconcepts": [], "parentconcepts": [], "relatedconcepts": [], }], } post_data = JSONSerializer().serialize(concept) content_type = "application/x-www-form-urlencoded" response = cls.client.post( reverse( "concept", kwargs={"conceptid": "00000000-0000-0000-0000-000000000001"}), post_data, content_type) response_json = json.loads(response.content) valueid = response_json["subconcepts"][0]["values"][0]["id"] cls.conceptid = response_json["subconcepts"][0]["id"] # add resource instance with only a cultural period defined cls.cultural_period_resource = Resource( graph_id=cls.search_model_graphid) tile = Tile(data={cls.search_model_cultural_period_nodeid: [valueid]}, nodegroup_id=cls.search_model_cultural_period_nodeid) cls.cultural_period_resource.tiles.append(tile) cls.cultural_period_resource.save() # add resource instance with a creation and destruction date defined cls.date_resource = Resource(graph_id=cls.search_model_graphid) tile = Tile(data={cls.search_model_creation_date_nodeid: "1941-01-01"}, nodegroup_id=cls.search_model_creation_date_nodeid) cls.date_resource.tiles.append(tile) tile = Tile( data={cls.search_model_destruction_date_nodeid: "1948-01-01"}, nodegroup_id=cls.search_model_destruction_date_nodeid) cls.date_resource.tiles.append(tile) tile = Tile(data={cls.search_model_name_nodeid: "testing 123"}, nodegroup_id=cls.search_model_name_nodeid) cls.date_resource.tiles.append(tile) cls.date_resource.save() # add resource instance with a creation date and a cultural period defined cls.date_and_cultural_period_resource = Resource( graph_id=cls.search_model_graphid) tile = Tile(data={cls.search_model_creation_date_nodeid: "1942-01-01"}, nodegroup_id=cls.search_model_creation_date_nodeid) cls.date_and_cultural_period_resource.tiles.append(tile) tile = Tile(data={cls.search_model_cultural_period_nodeid: [valueid]}, nodegroup_id=cls.search_model_cultural_period_nodeid) cls.date_and_cultural_period_resource.tiles.append(tile) cls.date_and_cultural_period_resource.save() # add resource instance with with no dates or periods defined cls.name_resource = Resource(graph_id=cls.search_model_graphid) tile = Tile(data={cls.search_model_name_nodeid: "some test name"}, nodegroup_id=cls.search_model_name_nodeid) cls.name_resource.tiles.append(tile) geom = { "type": "FeatureCollection", "features": [{ "geometry": { "type": "Point", "coordinates": [0, 0] }, "type": "Feature", "properties": {} }], } tile = Tile(data={cls.search_model_geom_nodeid: geom}, nodegroup_id=cls.search_model_geom_nodeid) cls.name_resource.tiles.append(tile) cls.name_resource.save() # add delay to allow for indexes to be updated time.sleep(1)
def create_indices(): Resource().prepare_resource_relations_index(create=True) for res_config in settings.RESOURCE_TYPE_CONFIGS().values(): Resource().prepare_search_index(res_config['resourcetypeid'], create=True)
def test_delete_of_entity(self): val = models.Values.objects.get(value='Legal') python_object = { "entityid": "", "entitytypeid": "PERSON.E1", "value": "", "property": "P1", "child_entities": [{ "entityid": "", "entitytypeid": "NAME.E1", "value": "Alexei", "property": "P1", "child_entities": [{ "entityid": "", "entitytypeid": "NAME_TYPE.E1", "value": val.pk, "property": "P1", "child_entities": [] }] }, { "entityid": "", "entitytypeid": "LOCATION.E1", "value": "", "property": "P1", "child_entities": [{ "entityid": "", "entitytypeid": "PERIOD.E1", "value": "", "property": "P1", "child_entities": [{ "entityid": "", "entitytypeid": "ADDRESS.E1", "value": "859", "property": "P1", "child_entities": [] }] }] }] } entity = Resource(python_object) entity.save() count_of_entities_before_delete = models.Entities.objects.count() count_of_relations_before_delete = models.Relations.objects.count() count_of_strings_before_delete = models.Strings.objects.count() count_of_numbers_before_delete = models.Numbers.objects.count() count_of_domains_before_delete = models.Domains.objects.count() entity.delete() count_of_entities_after_delete = models.Entities.objects.count() count_of_relations_after_delete = models.Relations.objects.count() count_of_strings_after_delete = models.Strings.objects.count() count_of_numbers_after_delete = models.Numbers.objects.count() count_of_domains_after_delete = models.Domains.objects.count() with self.assertRaises(models.Entities.DoesNotExist): Resource().get(entity.entityid) self.assertEqual( count_of_entities_before_delete - count_of_entities_after_delete, 6) self.assertEqual( count_of_relations_before_delete - count_of_relations_after_delete, 5) self.assertEqual( count_of_strings_before_delete - count_of_strings_after_delete, 1) self.assertEqual( count_of_numbers_before_delete - count_of_numbers_after_delete, 1) self.assertEqual( count_of_domains_before_delete - count_of_domains_after_delete, 1)
def post(self, request): if self.action == 'update_tile': json = request.POST.get('data', None) accepted_provisional = request.POST.get('accepted_provisional', None) if accepted_provisional != None: accepted_provisional_edit = JSONDeserializer().deserialize( accepted_provisional) if json != None: data = JSONDeserializer().deserialize(json) if data['resourceinstance_id'] == '': data['resourceinstance_id'] = uuid.uuid4() try: models.ResourceInstance.objects.get( pk=data['resourceinstance_id']) except ObjectDoesNotExist: resource = Resource() graphid = models.Node.objects.filter( nodegroup=data['nodegroup_id'])[0].graph_id resource.graph_id = graphid resource.save(user=request.user) data['resourceinstance_id'] = resource.pk resource.index() tile_id = data['tileid'] if tile_id != None and tile_id != '': try: old_tile = Tile.objects.get(pk=tile_id) clean_resource_cache(old_tile) except ObjectDoesNotExist: return JSONResponse( { 'status': 'false', 'message': [ _('This tile is no longer available'), _('It was likely deleted by another user') ] }, status=500) tile = Tile(data) if tile.filter_by_perm(request.user, 'write_nodegroup'): with transaction.atomic(): try: if accepted_provisional == None: tile.save(request=request) else: if accepted_provisional is not None: provisional_editor = User.objects.get( pk=accepted_provisional_edit["user"]) tile.save( provisional_edit_log_details={ "user": request.user, "action": "accept edit", "edit": accepted_provisional_edit, "provisional_editor": provisional_editor }) if tile_id == '4345f530-aa90-48cf-b4b3-92d1185ca439': import couchdb import json as json_json couch = couchdb.Server(settings.COUCHDB_URL) for project in models.MobileSurveyModel.objects.all( ): db = couch['project_' + str(project.id)] #tile = models.TileModel.objects.get(pk='4345f530-aa90-48cf-b4b3-92d1185ca439') tile_json = json_json.loads( JSONSerializer().serialize(tile)) tile_json['_id'] = tile_json['tileid'] for row in db.view('_all_docs', include_docs=True): if 'tileid' in row.doc and tile_json[ '_id'] == row.doc['_id']: tile_json['_rev'] = row.doc['_rev'] db.save(tile_json) if tile.provisionaledits is not None and str( request.user.id) in tile.provisionaledits: tile.data = tile.provisionaledits[str( request.user.id)]['value'] except ValidationError as e: return JSONResponse( { 'status': 'false', 'message': e.args }, status=500) tile.after_update_all() clean_resource_cache(tile) update_system_settings_cache(tile) return JSONResponse(tile) else: return JSONResponse( { 'status': 'false', 'message': [_('Request Failed'), _('Permission Denied')] }, status=500) if self.action == 'reorder_tiles': json = request.body if json != None: data = JSONDeserializer().deserialize(json) if 'tiles' in data and len(data['tiles']) > 0: sortorder = 0 with transaction.atomic(): for tile in data['tiles']: t = Tile(tile) if t.filter_by_perm(request.user, 'write_nodegroup'): t.sortorder = sortorder t.save(update_fields=['sortorder'], request=request) sortorder = sortorder + 1 return JSONResponse(data) if self.action == 'delete_provisional_tile': user = request.POST.get('user', None) tileid = request.POST.get('tileid', None) users = request.POST.get('users', None) tile = Tile.objects.get(tileid=tileid) is_provisional = tile.is_provisional() if tileid is not None and user is not None: provisionaledits = self.delete_provisional_edit( tile, user, reviewer=request.user) elif tileid is not None and users is not None: users = jsonparser.loads(users) for user in users: self.delete_provisional_edit(tile, user, reviewer=request.user) if is_provisional == True: return JSONResponse({'result': 'delete'}) else: return JSONResponse({'result': 'success'}) return HttpResponseNotFound()
def import_business_data_without_mapping(self, business_data, reporter, overwrite="append", prevent_indexing=False): errors = [] for resource in business_data["resources"]: if resource["resourceinstance"] is not None: if GraphModel.objects.filter(graphid=str( resource["resourceinstance"]["graph_id"])).count() > 0: resourceinstanceid = uuid.UUID( str(resource["resourceinstance"] ["resourceinstanceid"])) defaults = { "graph_id": uuid.UUID(str( resource["resourceinstance"]["graph_id"])), "legacyid": resource["resourceinstance"]["legacyid"], } new_values = { "resourceinstanceid": resourceinstanceid, "createdtime": datetime.datetime.now() } new_values.update(defaults) if overwrite == "overwrite": resourceinstance = Resource(**new_values) else: try: resourceinstance = Resource.objects.get( resourceinstanceid=resourceinstanceid) for key, value in defaults.items(): setattr(resourceinstance, key, value) except Resource.DoesNotExist: resourceinstance = Resource(**new_values) if resource["tiles"] != []: reporter.update_tiles(len(resource["tiles"])) def update_or_create_tile(src_tile): tile = None src_tile["parenttile_id"] = uuid.UUID( str(src_tile["parenttile_id"]) ) if src_tile["parenttile_id"] else None defaults = { "resourceinstance": resourceinstance, "parenttile_id": str(src_tile["parenttile_id"]) if src_tile["parenttile_id"] else None, "nodegroup_id": str(src_tile["nodegroup_id"]) if src_tile["nodegroup_id"] else None, "data": src_tile["data"], } new_values = { "tileid": uuid.UUID(str(src_tile["tileid"])) } new_values.update(defaults) if overwrite == "overwrite": tile = Tile(**new_values) else: try: tile = Tile.objects.get(tileid=uuid.UUID( str(src_tile["tileid"]))) for key, value in defaults.items(): setattr(tile, key, value) except Tile.DoesNotExist: tile = Tile(**new_values) if tile is not None: resourceinstance.tiles.append(tile) reporter.update_tiles_saved() for child in src_tile["tiles"]: update_or_create_tile(child) for tile in resource["tiles"]: tile["tiles"] = [ child for child in resource["tiles"] if child["parenttile_id"] == tile["tileid"] ] for tile in [ k for k in resource["tiles"] if k["parenttile_id"] is None ]: update_or_create_tile(tile) resourceinstance.save(index=(not prevent_indexing)) reporter.update_resources_saved()
def import_business_data(self, business_data, mapping=None, overwrite="append", prevent_indexing=False): reporter = ResourceImportReporter(business_data) try: if mapping is None or mapping == "": self.import_business_data_without_mapping( business_data, reporter, overwrite=overwrite, prevent_indexing=prevent_indexing) else: blanktilecache = {} target_nodegroup_cardinalities = {} for nodegroup in JSONSerializer().serializeToPython( NodeGroup.objects.all()): target_nodegroup_cardinalities[ nodegroup["nodegroupid"]] = nodegroup["cardinality"] for resource in business_data["resources"]: reporter.update_tiles(len(resource["tiles"])) parenttileids = [] populated_tiles = [] resourceinstanceid = uuid.uuid4() populated_nodegroups = [] target_resource_model = mapping["resource_model_id"] for tile in resource["tiles"]: if tile["data"] != {}: def get_tiles(tile): if tile["parenttile_id"] is not None: if tile["parenttile_id"] not in parenttileids: parenttileids.append( tile["parenttile_id"]) ret = [] for sibling_tile in resource["tiles"]: if sibling_tile[ "parenttile_id"] == tile[ "parenttile_id"]: ret.append(sibling_tile) else: ret = None else: ret = [tile] # deletes nodes that don't have values if ret is not None: for tile in ret: for key, value in tile["data"].items(): if value == "": del tile["data"][key] return ret tiles = get_tiles(tile) if tiles is not None: mapped_tiles = self.replace_source_nodeid( tiles, mapping) blank_tile = self.get_blank_tile( tiles, blanktilecache, tiles, resourceinstanceid) def populate_tile(sourcetilegroup, target_tile): need_new_tile = False target_tile_cardinality = target_nodegroup_cardinalities[ str(target_tile.nodegroup_id)] if str(target_tile.nodegroup_id ) not in populated_nodegroups: if target_tile.data != {}: for source_tile in sourcetilegroup: for tiledata in source_tile[ "data"]: for nodeid in list( tiledata.keys()): if nodeid in target_tile.data: if target_tile.data[ nodeid] is None: target_tile.data[ nodeid] = tiledata[ nodeid] for key in list( tiledata .keys( )): if key == nodeid: del tiledata[ nodeid] for tiledata in source_tile[ "data"]: if tiledata == {}: source_tile[ "data"].remove( tiledata) elif target_tile.tiles is not None: populated_child_tiles = [] populated_child_nodegroups = [] for childtile in target_tile.tiles: childtile_empty = True child_tile_cardinality = target_nodegroup_cardinalities[ str(childtile.nodegroup_id )] if str( childtile.nodegroup_id ) not in populated_child_nodegroups: prototype_tile = childtile prototype_tile.tileid = None for source_tile in sourcetilegroup: if prototype_tile.nodegroup_id not in populated_child_nodegroups: prototype_tile_copy = deepcopy( prototype_tile) for data in source_tile[ "data"]: for nodeid in list( data. keys( )): if nodeid in list( prototype_tile . data . keys( )): if prototype_tile.data[ nodeid] is None: prototype_tile_copy.data[ nodeid] = data[ nodeid] for key in list( data . keys( ) ): if key == nodeid: del data[ nodeid] if child_tile_cardinality == "1": populated_child_nodegroups.append( prototype_tile . nodegroup_id ) for data in source_tile[ "data"]: if data == {}: source_tile[ "data"].remove( data ) for key in list( prototype_tile_copy .data.keys( )): if prototype_tile_copy.data[ key] is not None: childtile_empty = False if prototype_tile_copy.data == {} or childtile_empty: prototype_tile_copy = None if prototype_tile_copy is not None: populated_child_tiles.append( prototype_tile_copy ) else: break target_tile.tiles = populated_child_tiles if target_tile.data: if target_tile.data == {} and target_tile.tiles == {}: target_tile = None populated_tiles.append(target_tile) for source_tile in sourcetilegroup: if source_tile["data"]: for data in source_tile[ "data"]: if len(data) > 0: need_new_tile = True if need_new_tile: if self.get_blank_tile( sourcetilegroup, blanktilecache, tiles, resourceinstanceid ) is not None: populate_tile( sourcetilegroup, self.get_blank_tile( sourcetilegroup, blanktilecache, tiles, resourceinstanceid), ) if target_tile_cardinality == "1": populated_nodegroups.append( str(target_tile.nodegroup_id)) else: target_tile = None if blank_tile is not None: populate_tile(mapped_tiles, blank_tile) newresourceinstance = Resource( resourceinstanceid=resourceinstanceid, graph_id=target_resource_model, legacyid=None, createdtime=datetime.datetime.now(), ) newresourceinstance.tiles = populated_tiles newresourceinstance.save(index=(not prevent_indexing)) reporter.update_resources_saved() except (KeyError, TypeError) as e: print(e) finally: reporter.report_results()
def handle(self, *args, **options): lookedat = 0 edited = 0 schema = Resource.get_mapping_schema('HERITAGE_RESOURCE_GROUP.E27') for E27 in archesmodels.Entities.objects.filter( entitytypeid='HERITAGE_RESOURCE_GROUP.E27'): lookedat += 1 resource = Resource().get(E27.entityid) #print "NEW RESOURCE" #print resource culturals = [] parents = [] certainties = [] for entity in resource.flatten(): if str(entity.entitytypeid) == 'CULTURAL_PERIOD.E55': #print "CULTURAL", entity culturals.append(entity) parent = entity.get_parent() if str(parent.entitytypeid) == 'PHASE_TYPE_ASSIGNMENT.E17': parents.append(parent) #print parent if str(entity.entitytypeid ) == 'CULTURAL_PERIOD_CERTAINTY_TYPE.E55': #print "CERTAINTY", entity certainties.append(entity) if culturals and parents: edited += 1 print '.', # if len(culturals) == 1: # continue #print "Going to do something here." for parent in parents: parent._delete() for i, cultural in enumerate(culturals): allres = [] #print cultural.value newres = Resource() newres.create_from_mapping( 'HERITAGE_RESOURCE_GROUP.E27', schema['CULTURAL_SUBPERIOD.E55']['steps'], 'CULTURAL_SUBPERIOD.E55', cultural.value) allres.append(newres) certainty = certainties[i] #print certainty.value newres = Resource() newres.create_from_mapping( 'HERITAGE_RESOURCE_GROUP.E27', schema['CULTURAL_SUBPERIOD_CERTAINTY_TYPE.E55'] ['steps'], 'CULTURAL_SUBPERIOD_CERTAINTY_TYPE.E55', certainty.value) allres.append(newres) mapping_graph = allres[0] for mapping in allres[1:]: mapping_graph.merge(mapping) #print "Mapping graph:" #print mapping_graph.flatten() resource.merge_at(mapping_graph, mapping_graph.entitytypeid) resource.save() #print resource.flatten() #break print "" print "Looked through %s resources and edited %s" % (lookedat, edited)
def resource_list_to_entities(self, resource_list, archesjson=False): '''Takes a collection of imported resource records and saves them as arches entities''' start = time() d = datetime.datetime.now() load_id = 'LOADID:{0}-{1}-{2}-{3}-{4}-{5}'.format( d.year, d.month, d.day, d.hour, d.minute, d.microsecond ) #Should we append the timestamp to the exported filename? ret = {'successfully_saved': 0, 'failed_to_save': []} schema = None current_entitiy_type = None legacyid_to_entityid = {} errors = [] progress_interval = 250 for count, resource in enumerate(resource_list): if count >= progress_interval and count % progress_interval == 0: print count, 'of', len(resource_list), 'loaded' if archesjson == False: masterGraph = None if current_entitiy_type != resource.entitytypeid: schema = Resource.get_mapping_schema(resource.entitytypeid) master_graph = self.build_master_graph(resource, schema) self.pre_save(master_graph) try: uuid.UUID(resource.resource_id) entityid = resource.resource_id except (ValueError): entityid = '' master_graph.save(user=self.user, note=load_id, resource_uuid=entityid) master_graph.index() resource.entityid = master_graph.entityid legacyid_to_entityid[ resource.resource_id] = master_graph.entityid else: new_resource = Resource(resource) new_resource.save(user=self.user, note=load_id, resource_uuid=new_resource.entityid) try: new_resource.index() except: print 'Could not index resource. This may be because the valueid of a concept is not in the database.' legacyid_to_entityid[ new_resource.entityid] = new_resource.entityid ret['successfully_saved'] += 1 ret['legacyid_to_entityid'] = legacyid_to_entityid elapsed = (time() - start) print len(resource_list), 'resources loaded' if len(resource_list) > 0: print 'total time to etl = %s' % (elapsed) print 'average time per entity = %s' % (elapsed / len(resource_list)) print 'Load Identifier =', load_id print '***You can reverse this load with the following command:' print 'python manage.py packages -o remove_resources --load_id', load_id return ret
def resource_list_to_entities(self, resource_list, archesjson=False, append=False, filename='', load_id=None): '''Takes a collection of imported resource records and saves them as arches entities''' start = time() d = datetime.datetime.now() if load_id is None: load_id = 'LOADID:{0}-{1}-{2}-{3}-{4}-{5}'.format( d.year, d.month, d.day, d.hour, d.minute, d.microsecond ) #Should we append the timestamp to the exported filename? ret = { 'successfully_saved': 0, 'failed_to_save': [], 'load_id': load_id } schema = None current_entitiy_type = None legacyid_to_entityid = {} errors = [] progress_interval = 250 def chunks(l, n): """Yield successive n-sized chunks from l. Thanks to: https://stackoverflow.com/a/312464/3873885""" for i in xrange(0, len(l), n): yield l[i:i + n] elapsed = 0 chunktimes = list() for m, resource_list_chunk in enumerate( chunks(resource_list, progress_interval)): startchunk = time() multiplier = m + 1 with transaction.atomic(): for count, resource in enumerate(resource_list_chunk): real_ct = count + 1 if archesjson == False: masterGraph = None if current_entitiy_type != resource.entitytypeid: schema = Resource.get_mapping_schema( resource.entitytypeid) current_entitiy_type = resource.entitytypeid master_graph = self.build_master_graph( resource, schema) self.pre_save(master_graph) try: uuid.UUID(resource.resource_id) entityid = resource.resource_id except ValueError: entityid = '' if append: try: resource_to_delete = Resource(entityid) resource_to_delete.delete_index() except ObjectDoesNotExist: print 'Entity ', entityid, ' does not exist. Nothing to delete' try: master_graph.save(user=self.user, note=load_id, resource_uuid=entityid) except Exception as e: print 'Could not save resource {}.\nERROR: {}'.format( master_graph.entityid, e) resource.entityid = master_graph.entityid #new_resource = Resource().get(resource.entityid) #assert new_resource == master_graph try: master_graph.index() except Exception as e: print 'Could not index resource {}.\nERROR: {}'.format( resource.entityid, e) legacyid_to_entityid[ resource.resource_id] = master_graph.entityid else: new_resource = Resource(resource) try: new_resource.save( user=self.user, note=load_id, resource_uuid=new_resource.entityid) except Exception as e: print 'Could not save resource {}.\nERROR: {}'.format( resource['entityid'], e) # with open(resource['entityid']+".json", "wb") as f: # json.dump(resource, f, indent=1) continue new_resource = Resource().get(new_resource.entityid) try: new_resource.index() except Exception as e: print 'Could not index resource {}.\nERROR: {}'.format( resource.entityid, e) legacyid_to_entityid[ new_resource.entityid] = new_resource.entityid ret['successfully_saved'] += 1 endchunk = time() - startchunk chunktimes.append(endchunk) chunktime_avg = sum(chunktimes) / len(chunktimes) remtime = ((len(resource_list) - (multiplier * progress_interval)) * chunktime_avg / progress_interval) / 60 if real_ct == progress_interval: print "{} of {} loaded in {}m. remaining time estimate: {}m".format( progress_interval * multiplier, len(resource_list), round(sum(chunktimes) / 60, 2), round(remtime, 2)) else: print progress_interval * multiplier + real_ct ret['legacyid_to_entityid'] = legacyid_to_entityid elapsed = (time() - start) print len(resource_list), 'resources loaded' if len(resource_list) > 0: print 'total time to etl = %s' % (elapsed) print 'average time per entity = %s' % (elapsed / len(resource_list)) print 'Load Identifier =', load_id print '***You can reverse this load with the following command:' print 'python manage.py packages -o remove_resources --load_id', load_id log_msg = "\n~~~~~\n{}\nfile: {}\nresources: {}\nloadid: {}".format( d.strftime("%d/%m/%Y - %H:%M"), filename, len(resource_list), load_id) with open(settings.BULK_UPLOAD_LOG_FILE, "a") as loadlog: loadlog.write(log_msg) return ret
def setUpClass(cls): models.ResourceInstance.objects.all().delete() cls.client = Client() cls.client.login(username="******", password="******") with open( os.path.join( "tests/fixtures/resource_graphs/Resource Test Model.json"), "rU") as f: archesfile = JSONDeserializer().deserialize(f) resource_graph_importer(archesfile["graph"]) cls.search_model_graphid = "c9b37a14-17b3-11eb-a708-acde48001122" cls.search_model_cultural_period_nodeid = "c9b3882e-17b3-11eb-a708-acde48001122" cls.search_model_creation_date_nodeid = "c9b38568-17b3-11eb-a708-acde48001122" cls.search_model_destruction_date_nodeid = "c9b3828e-17b3-11eb-a708-acde48001122" cls.search_model_name_nodeid = "c9b37b7c-17b3-11eb-a708-acde48001122" cls.search_model_sensitive_info_nodeid = "c9b38aea-17b3-11eb-a708-acde48001122" cls.search_model_geom_nodeid = "c9b37f96-17b3-11eb-a708-acde48001122" cls.user = User.objects.create_user("test", "*****@*****.**", "password") cls.user.groups.add(Group.objects.get(name="Guest")) nodegroup = models.NodeGroup.objects.get( pk=cls.search_model_destruction_date_nodeid) assign_perm("no_access_to_nodegroup", cls.user, nodegroup) # Add a concept that defines a min and max date concept = { "id": "00000000-0000-0000-0000-000000000001", "legacyoid": "ARCHES", "nodetype": "ConceptScheme", "values": [], "subconcepts": [{ "values": [ { "value": "Mock concept", "language": "en-US", "category": "label", "type": "prefLabel", "id": "", "conceptid": "" }, { "value": "1950", "language": "en-US", "category": "note", "type": "min_year", "id": "", "conceptid": "" }, { "value": "1980", "language": "en-US", "category": "note", "type": "max_year", "id": "", "conceptid": "" }, ], "relationshiptype": "hasTopConcept", "nodetype": "Concept", "id": "", "legacyoid": "", "subconcepts": [], "parentconcepts": [], "relatedconcepts": [], }], } post_data = JSONSerializer().serialize(concept) content_type = "application/x-www-form-urlencoded" response = cls.client.post( reverse( "concept", kwargs={"conceptid": "00000000-0000-0000-0000-000000000001"}), post_data, content_type) response_json = json.loads(response.content) valueid = response_json["subconcepts"][0]["values"][0]["id"] cls.conceptid = response_json["subconcepts"][0]["id"] # Add resource with Name, Cultural Period, Creation Date and Geometry cls.test_resource = Resource(graph_id=cls.search_model_graphid) # Add Name tile = Tile(data={cls.search_model_name_nodeid: "Test Name 1"}, nodegroup_id=cls.search_model_name_nodeid) cls.test_resource.tiles.append(tile) # Add Cultural Period tile = Tile(data={cls.search_model_cultural_period_nodeid: [valueid]}, nodegroup_id=cls.search_model_cultural_period_nodeid) cls.test_resource.tiles.append(tile) # Add Creation Date tile = Tile(data={cls.search_model_creation_date_nodeid: "1941-01-01"}, nodegroup_id=cls.search_model_creation_date_nodeid) cls.test_resource.tiles.append(tile) # Add Gometry cls.geom = { "type": "FeatureCollection", "features": [{ "geometry": { "type": "Point", "coordinates": [0, 0] }, "type": "Feature", "properties": {} }], } tile = Tile(data={cls.search_model_geom_nodeid: cls.geom}, nodegroup_id=cls.search_model_geom_nodeid) cls.test_resource.tiles.append(tile) cls.test_resource.save() # add delay to allow for indexes to be updated time.sleep(1)
def setUpClass(cls): se = SearchEngineFactory().create() se.delete_index(index='terms,concepts') se.delete_index(index='resources') cls.client = Client() cls.client.login(username='******', password='******') models.ResourceInstance.objects.all().delete() with open( os.path.join( 'tests/fixtures/resource_graphs/Resource Test Model.json'), 'rU') as f: archesfile = JSONDeserializer().deserialize(f) resource_graph_importer(archesfile['graph']) cls.search_model_graphid = 'e503a445-fa5f-11e6-afa8-14109fd34195' cls.search_model_cultural_period_nodeid = '7a182580-fa60-11e6-96d1-14109fd34195' cls.search_model_creation_date_nodeid = '1c1d05f5-fa60-11e6-887f-14109fd34195' cls.search_model_destruction_date_nodeid = 'e771b8a1-65fe-11e7-9163-14109fd34195' cls.search_model_name_nodeid = '2fe14de3-fa61-11e6-897b-14109fd34195' cls.search_model_sensitive_info_nodeid = '57446fae-65ff-11e7-b63a-14109fd34195' cls.search_model_geom_nodeid = '3ebc6785-fa61-11e6-8c85-14109fd34195' cls.user = User.objects.create_user('test', '*****@*****.**', 'test') cls.user.save() cls.user.groups.add(Group.objects.get(name='Guest')) nodegroup = models.NodeGroup.objects.get( pk=cls.search_model_destruction_date_nodeid) assign_perm('no_access_to_nodegroup', cls.user, nodegroup) # Add a concept that defines a min and max date concept = { "id": "00000000-0000-0000-0000-000000000001", "legacyoid": "ARCHES", "nodetype": "ConceptScheme", "values": [], "subconcepts": [{ "values": [{ "value": "Mock concept", "language": "en-US", "category": "label", "type": "prefLabel", "id": "", "conceptid": "" }, { "value": "1950", "language": "en-US", "category": "note", "type": "min_year", "id": "", "conceptid": "" }, { "value": "1980", "language": "en-US", "category": "note", "type": "max_year", "id": "", "conceptid": "" }], "relationshiptype": "hasTopConcept", "nodetype": "Concept", "id": "", "legacyoid": "", "subconcepts": [], "parentconcepts": [], "relatedconcepts": [] }] } post_data = JSONSerializer().serialize(concept) content_type = 'application/x-www-form-urlencoded' response = cls.client.post( reverse( 'concept', kwargs={'conceptid': '00000000-0000-0000-0000-000000000001'}), post_data, content_type) response_json = json.loads(response.content) valueid = response_json['subconcepts'][0]['values'][0]['id'] cls.conceptid = response_json['subconcepts'][0]['id'] # Add resource with Name, Cultural Period, Creation Date and Geometry cls.test_resource = Resource(graph_id=cls.search_model_graphid) # Add Name tile = Tile(data={cls.search_model_name_nodeid: 'Test Name 1'}, nodegroup_id=cls.search_model_name_nodeid) cls.test_resource.tiles.append(tile) # Add Cultural Period tile = Tile(data={cls.search_model_cultural_period_nodeid: [valueid]}, nodegroup_id=cls.search_model_cultural_period_nodeid) cls.test_resource.tiles.append(tile) # Add Creation Date tile = Tile(data={cls.search_model_creation_date_nodeid: '1941-01-01'}, nodegroup_id=cls.search_model_creation_date_nodeid) cls.test_resource.tiles.append(tile) # Add Gometry cls.geom = { "type": "FeatureCollection", "features": [{ "geometry": { "type": "Point", "coordinates": [0, 0] }, "type": "Feature", "properties": {} }] } tile = Tile(data={cls.search_model_geom_nodeid: cls.geom}, nodegroup_id=cls.search_model_geom_nodeid) cls.test_resource.tiles.append(tile) cls.test_resource.save() # add delay to allow for indexes to be updated time.sleep(1)
def post(self, request): if self.action == "update_tile": json = request.POST.get("data", None) accepted_provisional = request.POST.get("accepted_provisional", None) if accepted_provisional is not None: accepted_provisional_edit = JSONDeserializer().deserialize( accepted_provisional) if json is not None: data = JSONDeserializer().deserialize(json) data[ "resourceinstance_id"] = "" if "resourceinstance_id" not in data else data[ "resourceinstance_id"] if data["resourceinstance_id"] == "": data["resourceinstance_id"] = uuid.uuid4() try: models.ResourceInstance.objects.get( pk=data["resourceinstance_id"]) except ObjectDoesNotExist: try: resource = Resource( uuid.UUID(str(data["resourceinstance_id"]))) except ValueError: resource = Resource() graphid = models.Node.objects.filter( nodegroup=data["nodegroup_id"])[0].graph_id resource.graph_id = graphid try: resource.save(user=request.user) data["resourceinstance_id"] = resource.pk resource.index() except ModelInactiveError as e: message = _( "Unable to save. Please verify the model status is active" ) return JSONResponse( { "status": "false", "message": [_(e.title), _(str(message))] }, status=500) tile_id = data["tileid"] resource_instance = models.ResourceInstance.objects.get( pk=data["resourceinstance_id"]) is_active = resource_instance.graph.isactive if tile_id is not None and tile_id != "": try: old_tile = Tile.objects.get(pk=tile_id) except ObjectDoesNotExist as e: return self.handle_save_error( e, _("This tile is no longer available"), _("It was likely deleted by another user")) tile = Tile(data) if tile.filter_by_perm( request.user, "write_nodegroup") and is_active is True: try: with transaction.atomic(): try: if accepted_provisional is None: try: tile.save(request=request) except TileValidationError as e: resource_tiles = models.TileModel.objects.filter( resourceinstance=tile. resourceinstance) if resource_tiles.count() == 0: Resource.objects.get( pk=tile.resourceinstance_id ).delete(request.user) title = _( "Unable to save. Please verify your input is valid" ) return self.handle_save_error( e, tile_id, title=title) except ModelInactiveError as e: message = _( "Unable to save. Please verify the model status is active" ) return JSONResponse( { "status": "false", "message": [_(e.title), _(str(message))] }, status=500) else: if accepted_provisional is not None: provisional_editor = User.objects.get( pk=accepted_provisional_edit[ "user"]) prov_edit_log_details = { "user": request.user, "action": "accept edit", "edit": accepted_provisional_edit, "provisional_editor": provisional_editor, } tile.save(request=request, provisional_edit_log_details= prov_edit_log_details) if tile.provisionaledits is not None and str( request.user.id ) in tile.provisionaledits: tile.data = tile.provisionaledits[str( request.user.id)]["value"] except Exception as e: return self.handle_save_error(e, tile_id) tile.after_update_all() update_system_settings_cache(tile) except Exception as e: return self.handle_save_error(e, tile_id) return JSONResponse(tile) elif is_active is False: response = { "status": "false", "message": [ _("Request Failed"), _("Unable to Save. Verify model status is active") ] } return JSONResponse(response, status=500) else: return JSONErrorResponse(_("Request Failed"), _("Permission Denied")) if self.action == "reorder_tiles": json = request.body if json is not None: data = JSONDeserializer().deserialize(json) if "tiles" in data and len(data["tiles"]) > 0: sortorder = 0 with transaction.atomic(): for tile in data["tiles"]: t = Tile(tile) if t.filter_by_perm(request.user, "write_nodegroup"): t.sortorder = sortorder t.save(update_fields=["sortorder"], request=request) sortorder = sortorder + 1 return JSONResponse(data) if self.action == "delete_provisional_tile": user = request.POST.get("user", None) tileid = request.POST.get("tileid", None) users = request.POST.get("users", None) tile = Tile.objects.get(tileid=tileid) is_provisional = tile.is_provisional() if tileid is not None and user is not None: provisionaledits = self.delete_provisional_edit( tile, user, request) elif tileid is not None and users is not None: users = jsonparser.loads(users) for user in users: self.delete_provisional_edit(tile, user, request) if is_provisional == True: return JSONResponse({"result": "delete"}) else: return JSONResponse({"result": "success"}) return HttpResponseNotFound()
def create_indexes(): Resource().prepare_resource_relations_index(create=True) Resource().prepare_search_index('PERSON.E1', create=True)
def get( self, request, graphid=None, resourceid=None, view_template="views/resource/editor.htm", main_script="views/resource/editor", nav_menu=True, ): if self.action == "copy": return self.copy(request, resourceid) resource_instance_exists = False try: resource_instance = Resource.objects.get(pk=resourceid) resource_instance_exists = True graphid = resource_instance.graph_id except ObjectDoesNotExist: resource_instance = Resource() resource_instance.resourceinstanceid = resourceid resource_instance.graph_id = graphid if resourceid is not None: resource_graphs = (models.GraphModel.objects.exclude( pk=settings.SYSTEM_SETTINGS_RESOURCE_MODEL_ID).exclude( isresource=False).exclude(isactive=False)) graph = Graph.objects.get(graphid=graphid) relationship_type_values = get_resource_relationship_types() datatypes = models.DDataType.objects.all() widgets = models.Widget.objects.all() map_layers = models.MapLayer.objects.all() map_markers = models.MapMarker.objects.all() map_sources = models.MapSource.objects.all() geocoding_providers = models.Geocoder.objects.all() required_widgets = [] widget_datatypes = [v.datatype for k, v in graph.nodes.items()] widgets = widgets.filter(datatype__in=widget_datatypes) if resource_instance_exists == True: displayname = Resource.objects.get(pk=resourceid).displayname if displayname == "undefined": displayname = "Unnamed Resource" else: displayname = "Unnamed Resource" date_nodes = models.Node.objects.filter(datatype="date", graph__isresource=True, graph__isactive=True) searchable_datatypes = [ d.pk for d in models.DDataType.objects.filter(issearchable=True) ] searchable_nodes = models.Node.objects.filter( graph__isresource=True, graph__isactive=True, datatype__in=searchable_datatypes, issearchable=True) resource_cards = models.CardModel.objects.filter( graph__isresource=True, graph__isactive=True) context = self.get_context_data( main_script=main_script, resource_type=graph.name, relationship_types=relationship_type_values, iconclass=graph.iconclass, datatypes_json=JSONSerializer().serialize( datatypes, exclude=["iconclass", "modulename", "classname"]), datatypes=datatypes, widgets=widgets, date_nodes=date_nodes, map_layers=map_layers, map_markers=map_markers, map_sources=map_sources, geocoding_providers=geocoding_providers, widgets_json=JSONSerializer().serialize(widgets), resourceid=resourceid, resource_graphs=resource_graphs, graph_json=JSONSerializer().serialize( graph, exclude=[ "iconclass", "functions", "functions_x_graphs", "name", "description", "deploymentfile", "author", "deploymentdate", "version", "isresource", "isactive", "iconclass", "ontology", ], ), displayname=displayname, resource_cards=JSONSerializer().serialize(resource_cards, exclude=[ "description", "instructions", "active", "isvisible" ]), searchable_nodes=JSONSerializer().serialize( searchable_nodes, exclude=[ "description", "ontologyclass", "isrequired", "issearchable", "istopnode" ]), saved_searches=JSONSerializer().serialize( settings.SAVED_SEARCHES), resource_instance_exists=resource_instance_exists, user_is_reviewer=json.dumps( request.user.groups.filter( name="Resource Reviewer").exists()), userid=request.user.id, ) if graph.iconclass: context["nav"]["icon"] = graph.iconclass context["nav"]["title"] = graph.name context["nav"]["menu"] = nav_menu if resourceid == settings.RESOURCE_INSTANCE_ID: context["nav"]["help"] = (_("Managing System Settings"), "help/base-help.htm") context["help"] = "system-settings-help" else: context["nav"]["help"] = (_("Using the Resource Editor"), "help/base-help.htm") context["help"] = "resource-editor-help" return render(request, view_template, context) return HttpResponseNotFound()
def resource_manager(request, resourcetypeid='', form_id='default', resourceid=''): if resourceid != '': resource = Resource(resourceid) elif resourcetypeid != '': resource = Resource({'entitytypeid': resourcetypeid}) if form_id == 'default': form_id = resource.form_groups[0]['forms'][0]['id'] form = resource.get_form(form_id) if request.method == 'DELETE': resource.delete_index() se = SearchEngineFactory().create() realtionships = resource.get_related_resources(return_entities=False) for realtionship in realtionships: se.delete(index='resource_relations', doc_type='all', id=realtionship.resourcexid) realtionship.delete() resource.delete() return JSONResponse({'success': True}) if request.method == 'POST': data = JSONDeserializer().deserialize(request.POST.get('formdata', {})) form.set_user(request.user) form.update(data, request.FILES) with transaction.atomic(): if resourceid != '': resource.delete_index() resource.save(user=request.user) resource.index() resourceid = resource.entityid if request.is_ajax(): return JSONResponse({ "url": reverse('resource_manager', kwargs={ 'resourcetypeid': resourcetypeid, 'form_id': form_id, 'resourceid': resourceid }) }) return redirect('resource_manager', resourcetypeid=resourcetypeid, form_id=form_id, resourceid=resourceid) min_max_dates = models.Dates.objects.aggregate(Min('val'), Max('val')) if request.method == 'GET': if form != None: ## geom will be a geojson FeatureCollection or 'null' geom = JSONSerializer().serialize(resource.get_geom()) lang = request.GET.get('lang', request.LANGUAGE_CODE) form.load(lang) return render_to_response('resource-manager.htm', { 'form': form, 'formdata': JSONSerializer().serialize(form.data), 'form_template': 'views/forms/' + form_id + '.htm', 'form_id': form_id, 'resourcetypeid': resourcetypeid, 'resourceid': resourceid, 'main_script': 'resource-manager', 'active_page': 'ResourceManger', 'resource': resource, 'resource_name': resource.get_primary_name(), 'resource_type_name': resource.get_type_name(), 'form_groups': resource.form_groups, 'min_date': min_max_dates['val__min'].year if min_max_dates['val__min'] != None else 0, 'max_date': min_max_dates['val__max'].year if min_max_dates['val__min'] != None else 1, 'timefilterdata': JSONSerializer().serialize(Concept.get_time_filter_data()), 'resource_icon': settings.RESOURCE_TYPE_CONFIGS()[resourcetypeid]['icon_class'], 'resource_geom': geom, 'child_resource': 'HERITAGE_FEATURE.E24' if resourcetypeid == 'HERITAGE_RESOURCE_GROUP.E27' else 'HERITAGE_COMPONENT.B2' }, context_instance=RequestContext(request)) else: return HttpResponseNotFound('<h1>Arches form not found.</h1>')
def read_resource(self, data, use_ids=False, resourceid=None, graphid=None): if graphid is None and self.graphtree is None: raise Exception("No graphid supplied to read_resource") elif self.graphtree is None: self.graphtree = self.process_graph(graphid) # Ensure we've reset from any previous call self.errors = {} self.idcache = {} self.resources = [] self.resource = None self.use_ids = use_ids if not isinstance(data, list): data = [data] # Force use_ids if there is more than one record being passed in if len(data) > 1: self.use_ids = True # Maybe calculate sort order for this node's tiles try: self.shouldSortTiles = settings.JSON_LD_SORT except: self.shouldSortTiles = False for jsonld_document in data: jsonld_document = expand(jsonld_document)[0] # Possibly bail very early if jsonld_document["@type"][0] != self.graphtree["class"]: raise ValueError( "Instance does not have same top level class as model") if self.use_ids: resourceinstanceid = self.get_resource_id( jsonld_document["@id"]) if resourceinstanceid is None: self.logger.error( "The @id of the resource was not supplied, was null or URI was not correctly formatted" ) raise Exception( "The @id of the resource was not supplied, was null or URI was not correctly formatted" ) self.logger.debug( "Using resource instance ID found: {0}".format( resourceinstanceid)) else: self.logger.debug( "`use_ids` setting is set to False, ignoring @id from the data if any" ) self.resource = Resource() if resourceid is not None: self.resource.pk = resourceid self.resource.graph_id = graphid self.resources.append(self.resource) ### --- Process Instance --- # now walk the instance and align to the tree if "@id" in jsonld_document: result = {"data": [jsonld_document["@id"]]} else: result = {"data": [None]} self.root_json_document = jsonld_document self.data_walk(jsonld_document, self.graphtree, result)
def setUpClass(cls): se = SearchEngineFactory().create() se.delete_index(index='strings') se.delete_index(index='resource') cls.client = Client() cls.client.login(username='******', password='******') models.ResourceInstance.objects.all().delete() with open(os.path.join('tests/fixtures/resource_graphs/Search Test Model.json'), 'rU') as f: archesfile = JSONDeserializer().deserialize(f) ResourceGraphImporter(archesfile['graph']) cls.search_model_graphid = 'e503a445-fa5f-11e6-afa8-14109fd34195' cls.search_model_cultural_period_nodeid = '7a182580-fa60-11e6-96d1-14109fd34195' cls.search_model_creation_date_nodeid = '1c1d05f5-fa60-11e6-887f-14109fd34195' cls.search_model_name_nodeid = '2fe14de3-fa61-11e6-897b-14109fd34195' # Add a concept that defines a min and max date concept = { "id": "00000000-0000-0000-0000-000000000001", "legacyoid": "ARCHES", "nodetype": "ConceptScheme", "values": [], "subconcepts": [ { "values": [ { "value": "ANP TEST", "language": "en-US", "category": "label", "type": "prefLabel", "id": "", "conceptid": "" }, { "value": "1950", "language": "en-US", "category": "note", "type": "min_year", "id": "", "conceptid": "" }, { "value": "1980", "language": "en-US", "category": "note", "type": "max_year", "id": "", "conceptid": "" } ], "relationshiptype": "hasTopConcept", "nodetype": "Concept", "id": "", "legacyoid": "", "subconcepts": [], "parentconcepts": [], "relatedconcepts": [] } ] } post_data = JSONSerializer().serialize(concept) content_type = 'application/x-www-form-urlencoded' response = cls.client.post(reverse('concept', kwargs={'conceptid':'00000000-0000-0000-0000-000000000001'}), post_data, content_type) response_json = json.loads(response.content) valueid = response_json['subconcepts'][0]['values'][0]['id'] # add resource instance with only a cultural period defined cls.cultural_period_resource = Resource(graph_id=cls.search_model_graphid) tile = Tile(data={cls.search_model_cultural_period_nodeid: [valueid]},nodegroup_id=cls.search_model_cultural_period_nodeid) cls.cultural_period_resource.tiles.append(tile) cls.cultural_period_resource.save() # add resource instance with only a creation date defined cls.date_resource = Resource(graph_id=cls.search_model_graphid) tile = Tile(data={cls.search_model_creation_date_nodeid: '1941-01-01'},nodegroup_id=cls.search_model_creation_date_nodeid) cls.date_resource.tiles.append(tile) tile = Tile(data={cls.search_model_name_nodeid: 'testing 123'},nodegroup_id=cls.search_model_name_nodeid) cls.date_resource.tiles.append(tile) cls.date_resource.save() # add resource instance with with no dates or periods defined cls.name_resource = Resource(graph_id=cls.search_model_graphid) tile = Tile(data={cls.search_model_name_nodeid: 'some test name'},nodegroup_id=cls.search_model_name_nodeid) cls.name_resource.tiles.append(tile) cls.name_resource.save() # add delay to allow for indexes to be updated time.sleep(1)
def make_instance_public(self, resourceinstanceid, graphid=None): resource = Resource(resourceinstanceid) resource.graph_id = graphid if graphid else str(models.ResourceInstance.objects.get(pk=resourceinstanceid).graph_id) resource.remove_resource_instance_permissions() return self.get_instance_permissions(resource)
def get(self, request, graphid=None, resourceid=None, view_template='views/resource/editor.htm', main_script='views/resource/editor', nav_menu=True): if self.action == 'copy': return self.copy(request, resourceid) if graphid is not None: resource_instance = Resource() resource_instance.graph_id = graphid resource_instance.save(**{'request': request}) resource_instance.index() return redirect('resource_editor', resourceid=resource_instance.pk) if resourceid is not None: resource_instance = models.ResourceInstance.objects.get( pk=resourceid) resource_graphs = Graph.objects.exclude( pk=settings.SYSTEM_SETTINGS_RESOURCE_MODEL_ID).exclude( isresource=False).exclude(isactive=False) graph = Graph.objects.get(graphid=resource_instance.graph.pk) relationship_type_values = get_resource_relationship_types() form = Form(resource_instance.pk) datatypes = models.DDataType.objects.all() widgets = models.Widget.objects.all() map_layers = models.MapLayer.objects.all() map_sources = models.MapSource.objects.all() geocoding_providers = models.Geocoder.objects.all() forms = resource_instance.graph.form_set.filter(visible=True) forms_x_cards = models.FormXCard.objects.filter(form__in=forms) forms_w_cards = [] for form_x_card in forms_x_cards: cm = models.CardModel.objects.get(pk=form_x_card.card_id) if request.user.has_perm('read_nodegroup', cm.nodegroup): forms_w_cards.append(form_x_card.form) displayname = Resource.objects.get(pk=resourceid).displayname if displayname == 'undefined': displayname = 'Unnamed Resource' date_nodes = models.Node.objects.filter(datatype='date', graph__isresource=True, graph__isactive=True) searchable_datatypes = [ d.pk for d in models.DDataType.objects.filter(issearchable=True) ] searchable_nodes = models.Node.objects.filter( graph__isresource=True, graph__isactive=True, datatype__in=searchable_datatypes, issearchable=True) resource_cards = models.CardModel.objects.filter( graph__isresource=True, graph__isactive=True) context = self.get_context_data( main_script=main_script, resource_type=resource_instance.graph.name, relationship_types=relationship_type_values, iconclass=resource_instance.graph.iconclass, form=JSONSerializer().serialize(form), forms=JSONSerializer().serialize(forms_w_cards), datatypes_json=JSONSerializer().serialize(datatypes), widgets=widgets, date_nodes=date_nodes, map_layers=map_layers, map_sources=map_sources, geocoding_providers=geocoding_providers, widgets_json=JSONSerializer().serialize(widgets), resourceid=resourceid, resource_graphs=resource_graphs, graph_json=JSONSerializer().serialize(graph), displayname=displayname, resource_cards=JSONSerializer().serialize(resource_cards), searchable_nodes=JSONSerializer().serialize(searchable_nodes), saved_searches=JSONSerializer().serialize( settings.SAVED_SEARCHES), ) if graph.iconclass: context['nav']['icon'] = graph.iconclass context['nav']['title'] = graph.name context['nav']['menu'] = nav_menu if resourceid == settings.RESOURCE_INSTANCE_ID: context['nav']['help'] = (_('Managing System Settings'), 'help/system-settings-help.htm') else: context['nav']['help'] = (_('Using the Resource Editor'), 'help/resource-editor-help.htm') return render(request, view_template, context) return HttpResponseNotFound()
def post(self, request, resourceid=None): lang = request.GET.get('lang', settings.LANGUAGE_CODE) se = SearchEngineFactory().create() res = dict(request.POST) relationship_type = res['relationship_properties[relationship_type]'][ 0] datefrom = res['relationship_properties[datefrom]'][0] dateto = res['relationship_properties[dateto]'][0] dateto = None if dateto == '' else dateto datefrom = None if datefrom == '' else datefrom notes = res['relationship_properties[notes]'][0] root_resourceinstanceid = res['root_resourceinstanceid'] instances_to_relate = [] relationships_to_update = [] if 'instances_to_relate[]' in res: instances_to_relate = res['instances_to_relate[]'] if 'relationship_ids[]' in res: relationships_to_update = res['relationship_ids[]'] def get_relatable_resources(graphid): """ Takes the graphid of a resource, finds the graphs root node, and returns the relatable graphids """ nodes = models.Node.objects.filter(graph_id=graphid) top_node = [node for node in nodes if node.istopnode == True][0] relatable_resources = [ str(node.graph_id) for node in top_node.get_relatable_resources() ] return relatable_resources def confirm_relationship_permitted(to_id, from_id): resource_instance_to = models.ResourceInstance.objects.filter( resourceinstanceid=to_id)[0] resource_instance_from = models.ResourceInstance.objects.filter( resourceinstanceid=from_id)[0] relatable_to = get_relatable_resources( resource_instance_to.graph_id) relatable_from = get_relatable_resources( resource_instance_from.graph_id) relatable_to_is_valid = str( resource_instance_to.graph_id) in relatable_from relatable_from_is_valid = str( resource_instance_from.graph_id) in relatable_to return (relatable_to_is_valid == True and relatable_from_is_valid == True) for instanceid in instances_to_relate: permitted = confirm_relationship_permitted( instanceid, root_resourceinstanceid[0]) if permitted == True: rr = models.ResourceXResource( resourceinstanceidfrom=Resource( root_resourceinstanceid[0]), resourceinstanceidto=Resource(instanceid), notes=notes, relationshiptype=relationship_type, datestarted=datefrom, dateended=dateto) rr.save() else: print 'relationship not permitted' for relationshipid in relationships_to_update: rr = models.ResourceXResource.objects.get(pk=relationshipid) rr.notes = notes rr.relationshiptype = relationship_type rr.datestarted = datefrom rr.dateended = dateto rr.save() start = request.GET.get('start', 0) se.es.indices.refresh(index=se._add_prefix("resource_relations")) resource = Resource.objects.get(pk=root_resourceinstanceid[0]) page = 1 if request.GET.get('page') == '' else int( request.GET.get('page', 1)) related_resources = resource.get_related_resources(lang=lang, start=start, limit=1000, page=page) ret = [] if related_resources is not None: ret = self.paginate_related_resources(related_resources, page, request) return JSONResponse(ret, indent=4)
def migrate(settings=None): if not settings: from django.conf import settings suffix = '_altered_nodes.csv' errors = [] for path in settings.ADDITIONAL_RESOURCE_GRAPH_LOCATIONS: if os.path.exists(path): print '\nLOADING NODE MIGRATION INFO (%s)' % (path) print '--------------' for f in listdir(path): if isfile(join(path, f)) and f.endswith(suffix): path_to_file = join(path, f) basepath = path_to_file[:-18] name = basepath.split(os.sep)[-1] migrations = get_list_dict( basepath + '_altered_nodes.csv', [ 'OLDENTITYTYPEID', 'NEWENTITYTYPEID', 'GROUPROOTNODEOLD', 'GROUPROOTNODENEW' ]) # Identify nodes which must be migrated resource_entity_type = 'HERITAGE_RESOURCE_GROUP.E27' mapping_schema = Entity.get_mapping_schema( resource_entity_type) # group migrations by groupRootNodeNew groups = groupby( migrations, lambda x: (x['GROUPROOTNODEOLD'], x['GROUPROOTNODENEW'])) for group_root_node_ids, group_migrations in groups: #Convert group_migrations to a list as we need to iterate it multiple times group_migrations_list = [] for group_migration in group_migrations: group_migrations_list.append(group_migration) group_root_node_id = group_root_node_ids[0] new_group_root_node_id = group_root_node_ids[1] #Find all entities with the old group root node group_root_entities = models.Entities.objects.filter( entitytypeid=group_root_node_id) print "ENTITIES COUNT: ", group_root_entities.count() for group_root_entity_model in group_root_entities.iterator( ): # Create a new subgraph for each of the migration steps, then merge them together at the group root node #get full resource graph for the root entity try: group_root_entity = Entity( group_root_entity_model.pk) except: print "Faulty group entity's ID %s and entitytype %s" % ( group_root_entity_model.pk, group_root_entity_model.entitytypeid) continue new_group_root_entity = Entity( ).create_from_mapping( resource_entity_type, mapping_schema[new_group_root_node_id] ['steps'], new_group_root_node_id, '') if group_migrations_list[0][ 'NEWENTITYTYPEID'] != new_group_root_node_id: # create a node for the new group root group_root_is_new_data_node = False else: group_root_is_new_data_node = True # get the root resource graph for this entity resource_model = get_resource_for_entity( group_root_entity, resource_entity_type) if not resource_model: continue resource = Resource().get(resource_model.entityid) for group_migration in group_migrations_list: # get individual entities to be migrated in the source group old_entities = group_root_entity.find_entities_by_type_id( group_migration['OLDENTITYTYPEID']) for old_entity in old_entities: date_on = False # Create the corresponding entity in the new schema new_entity = Entity() #Disturbance dates need to be mapped to different nodes depending on the value of the now obsolete DISTURBANCE_DATE_TYPE.E55 if group_migration['OLDENTITYTYPEID'] in [ 'DISTURBANCE_DATE_END.E49', 'DISTURBANCE_DATE_START.E49' ]: date_type_node = group_root_entity.find_entities_by_type_id( 'DISTURBANCE_DATE_TYPE.E55') if date_type_node: if date_type_node[ 0].label == 'Occurred before': new_entity_type_id = 'DISTURBANCE_DATE_OCCURRED_BEFORE.E61' elif date_type_node[ 0].label == 'Occurred on': if group_migration[ 'OLDENTITYTYPEID'] == 'DISTURBANCE_DATE_START.E49': date_on = True else: new_entity_type_id = 'DISTURBANCE_DATE_OCCURRED_ON.E61' else: new_entity_type_id = group_migration[ 'NEWENTITYTYPEID'] else: new_entity_type_id = group_migration[ 'NEWENTITYTYPEID'] old_value = old_entity.value if old_entity.businesstablename == 'domains': # in some cases we move from domains to strings. newEntityType = models.EntityTypes.objects.get( entitytypeid=new_entity_type_id) if newEntityType.businesstablename == 'strings': old_value = old_entity.label if not date_on: new_entity.create_from_mapping( resource_entity_type, mapping_schema[new_entity_type_id] ['steps'], new_entity_type_id, old_value) # In some cases a newly created data node is the new group root. In this case we should discard the previously created new group root and use this one instead. if new_group_root_node_id == new_entity_type_id: new_group_root_entity = new_entity group_root_is_new_data_node = True # UNUSED # # If there is a node to be inserted, do it here # # if 'INSERT_NODE_RULE' in group_migration: # # entityttypeid_to_insert = group_migration['INSERT_NODE_RULE'][1][1] # # value_to_insert = group_migration['INSERT_NODE_RULE'][1][2] # # # # inserted_entity = Entity() # # inserted_entity.create_from_mapping(resource_entity_type, mapping_schema[entityttypeid_to_insert]['steps'], entityttypeid_to_insert, value_to_insert) # # # # new_entity.merge(inserted_entity) # If there is a node in common with the existing node further down the chain than the group root node, merge there # follow links back from the parent shouldnt_merge_with_group_root = group_root_is_new_data_node and new_group_root_node_id == new_entity_type_id if not shouldnt_merge_with_group_root: has_merged = False reversed_steps = mapping_schema[ new_entity_type_id]['steps'][::-1] for step in reversed_steps: # find the entitytypedomain in the new_group_root_entity if not has_merged: mergeable_nodes = new_group_root_entity.find_entities_by_type_id( step['entitytypedomain']) if len(mergeable_nodes) > 0: new_group_root_entity.merge_at( new_entity, step[ 'entitytypedomain'] ) has_merged = True new_entity = None # gc.collect() if not has_merged: logging.warning( "Unable to merge newly created entity" ) # merge the new group root entity into the resource resource.merge_at(new_group_root_entity, resource_entity_type) logging.warning("SAVING RESOURCE, %s", resource) # save the resource resource.trim() try: resource._save() resource = None except Exception as e: logging.warning("Error saving resource") logging.warning(e) errors.append("Error saving %s. Error was %s" % (resource, e)) group_root_entity.clear() group_root_entity = None new_group_root_entity.clear() new_group_root_entity = None # end for group root # resource.index() # logging.warning("SAVED RESOURCE, %s", resource) utils.write_to_file( os.path.join(settings.PACKAGE_ROOT, 'logs', 'migration_errors.txt'), '') if len(errors) > 0: # utils.write_to_file(os.path.join(settings.PACKAGE_ROOT, 'logs', 'migration_errors.txt'), '\n'.join(errors)) print "\n\nERROR: There were errors migrating some resources. See below" print errors
def post(self, request): if self.action == 'update_tile': json = request.POST.get('data', None) accepted_provisional = request.POST.get('accepted_provisional', None) if accepted_provisional != None: accepted_provisional_edit = JSONDeserializer().deserialize( accepted_provisional) if json != None: data = JSONDeserializer().deserialize(json) if data['resourceinstance_id'] == '': data['resourceinstance_id'] = uuid.uuid4() try: models.ResourceInstance.objects.get( pk=data['resourceinstance_id']) except ObjectDoesNotExist: resource = Resource() graphid = models.Node.objects.filter( nodegroup=data['nodegroup_id'])[0].graph_id resource.graph_id = graphid resource.save(user=request.user) data['resourceinstance_id'] = resource.pk resource.index() tile_id = data['tileid'] if tile_id != None and tile_id != '': try: old_tile = Tile.objects.get(pk=tile_id) clean_resource_cache(old_tile) except ObjectDoesNotExist: return JSONResponse( { 'status': 'false', 'message': [ _('This tile is no longer available'), _('It was likely deleted by another user') ] }, status=500) tile = Tile(data) if tile.filter_by_perm(request.user, 'write_nodegroup'): with transaction.atomic(): try: if accepted_provisional == None: try: tile.save(request=request) except TileValidationError as e: resource_tiles = models.TileModel.objects.filter( resourceinstance=tile.resourceinstance) if resource_tiles.count() == 0: Resource.objects.get( pk=tile.resourceinstance_id ).delete(request.user, 'test') return JSONResponse( { 'status': 'false', 'message': [ e.message, _('Unable to Save. Please verify your input is valid' ) ] }, status=500) except Exception as e: message = "Unable to save. A {0} has occurred. Arguments: {1!r}".format( type(e).__name__, e.args) return JSONResponse( { 'status': 'false', 'message': [ message, _('Please contact your system administrator' ) ] }, status=500) else: if accepted_provisional is not None: provisional_editor = User.objects.get( pk=accepted_provisional_edit["user"]) tile.save( provisional_edit_log_details={ "user": request.user, "action": "accept edit", "edit": accepted_provisional_edit, "provisional_editor": provisional_editor }) if tile.provisionaledits is not None and str( request.user.id) in tile.provisionaledits: tile.data = tile.provisionaledits[str( request.user.id)]['value'] except ValidationError as e: return JSONResponse( { 'status': 'false', 'message': e.args }, status=500) except Exception as e: exception_title = 'Saving tile failed' exception_message = str(e) if hasattr(e, 'message') and e.message: exception_message += "({0})".format(e.message) logger.error(exception_title + ''' [Tile id: {tile_id}] \ [Exception message: {message}] \ [Exception trace: {trace}]'''.format( tile_id=tile_id, message=exception_message, trace=traceback.format_exc())) return JSONResponse( { 'status': 'false', 'message': [ _(exception_title), _(str(exception_message)) ] }, status=500) tile.after_update_all() clean_resource_cache(tile) update_system_settings_cache(tile) return JSONResponse(tile) else: return JSONResponse( { 'status': 'false', 'message': [_('Request Failed'), _('Permission Denied')] }, status=500) if self.action == 'reorder_tiles': json = request.body if json != None: data = JSONDeserializer().deserialize(json) if 'tiles' in data and len(data['tiles']) > 0: sortorder = 0 with transaction.atomic(): for tile in data['tiles']: t = Tile(tile) if t.filter_by_perm(request.user, 'write_nodegroup'): t.sortorder = sortorder t.save(update_fields=['sortorder'], request=request) sortorder = sortorder + 1 return JSONResponse(data) if self.action == 'delete_provisional_tile': user = request.POST.get('user', None) tileid = request.POST.get('tileid', None) users = request.POST.get('users', None) tile = Tile.objects.get(tileid=tileid) is_provisional = tile.is_provisional() if tileid is not None and user is not None: provisionaledits = self.delete_provisional_edit( tile, user, reviewer=request.user) elif tileid is not None and users is not None: users = jsonparser.loads(users) for user in users: self.delete_provisional_edit(tile, user, reviewer=request.user) if is_provisional == True: return JSONResponse({'result': 'delete'}) else: return JSONResponse({'result': 'success'}) return HttpResponseNotFound()
def post(self, request, graphid=None): try: body = json.loads(request.body) file_data = body['file_data'] column_name_to_node_data_map = body['column_name_to_node_data_map'] nodegroup_data = {} for node_data in column_name_to_node_data_map.values(): nodegroup_id = node_data.get('nodegroup_id') if nodegroup_id: if not nodegroup_data.get(nodegroup_id): nodegroup_data[nodegroup_id] = [] nodegroup_data[nodegroup_id].append(node_data['node_id']) for file_datum in file_data: for row_data in file_datum['data']: resource_instance = Resource(graph_id=graphid) resource_instance.save() parsed_data = row_data['parsed_data'] tile_data = {} for nodegroup_id in nodegroup_data.keys(): if not tile_data.get(nodegroup_id): tile_data[nodegroup_id] = {} for node_id in nodegroup_data[nodegroup_id]: tile_data[nodegroup_id][node_id] = parsed_data.get( node_id) for nodegroup_id in tile_data.keys(): tile = TileProxyModel( data=tile_data[nodegroup_id], resourceinstance=resource_instance, nodegroup_id=nodegroup_id, # nodegroup_id = 'f7c974a0-29f4-11eb-8487-aae9fe8789ac', # Related Observations ) tile.save() file_datum['created_resources'][row_data['row_id']] = { 'resourceinstance_id': str(resource_instance.pk), 'row_id': row_data['row_id'], 'tile_data': tile_data, } return JSONResponse({ 'file_data': file_data, }, status=200) except Exception as e: if settings.DEBUG is True: exc_type, exc_value, exc_traceback = sys.exc_info() formatted = traceback.format_exception(exc_type, exc_value, exc_traceback) if len(formatted): for message in formatted: print(message) return JSONResponse( {"error": "resource data could not be saved: %s" % e}, status=500, reason=e)
def post(self, request, resourceid=None): lang = request.GET.get("lang", settings.LANGUAGE_CODE) se = SearchEngineFactory().create() res = dict(request.POST) relationship_type = res["relationship_properties[relationship_type]"][0] datefrom = res["relationship_properties[datefrom]"][0] dateto = res["relationship_properties[dateto]"][0] dateto = None if dateto == "" else dateto datefrom = None if datefrom == "" else datefrom notes = res["relationship_properties[notes]"][0] root_resourceinstanceid = res["root_resourceinstanceid"] instances_to_relate = [] relationships_to_update = [] if "instances_to_relate[]" in res: instances_to_relate = res["instances_to_relate[]"] if "relationship_ids[]" in res: relationships_to_update = res["relationship_ids[]"] def get_relatable_resources(graphid): """ Takes the graphid of a resource, finds the graphs root node, and returns the relatable graphids """ nodes = models.Node.objects.filter(graph_id=graphid) top_node = [node for node in nodes if node.istopnode == True][0] relatable_resources = [str(node.graph_id) for node in top_node.get_relatable_resources()] return relatable_resources def confirm_relationship_permitted(to_id, from_id): resource_instance_to = models.ResourceInstance.objects.filter(resourceinstanceid=to_id)[0] resource_instance_from = models.ResourceInstance.objects.filter(resourceinstanceid=from_id)[0] relatable_to = get_relatable_resources(resource_instance_to.graph_id) relatable_from = get_relatable_resources(resource_instance_from.graph_id) relatable_to_is_valid = str(resource_instance_to.graph_id) in relatable_from relatable_from_is_valid = str(resource_instance_from.graph_id) in relatable_to return relatable_to_is_valid is True and relatable_from_is_valid is True for instanceid in instances_to_relate: permitted = confirm_relationship_permitted(instanceid, root_resourceinstanceid[0]) if permitted is True: rr = models.ResourceXResource( resourceinstanceidfrom=Resource(root_resourceinstanceid[0]), resourceinstanceidto=Resource(instanceid), notes=notes, relationshiptype=relationship_type, datestarted=datefrom, dateended=dateto, ) try: rr.save() except ModelInactiveError as e: message = _("Unable to save. Please verify the model status is active") return JSONResponse({"status": "false", "message": [_(e.title), _(str(message))]}, status=500) else: print("relationship not permitted") for relationshipid in relationships_to_update: rr = models.ResourceXResource.objects.get(pk=relationshipid) rr.notes = notes rr.relationshiptype = relationship_type rr.datestarted = datefrom rr.dateended = dateto try: rr.save() except ModelInactiveError as e: message = _("Unable to save. Please verify the model status is active") return JSONResponse({"status": "false", "message": [_(e.title), _(str(message))]}, status=500) start = request.GET.get("start", 0) se.es.indices.refresh(index=se._add_prefix("resource_relations")) resource = Resource.objects.get(pk=root_resourceinstanceid[0]) page = 1 if request.GET.get("page") == "" else int(request.GET.get("page", 1)) related_resources = resource.get_related_resources(lang=lang, start=start, limit=1000, page=page) ret = [] if related_resources is not None: ret = self.paginate_related_resources(related_resources, page, request) return JSONResponse(ret, indent=4)
def post(self, request): if self.action == 'update_tile': json = request.POST.get('data', None) if json != None: data = JSONDeserializer().deserialize(json) try: models.ResourceInstance.objects.get( pk=data['resourceinstance_id']) except ObjectDoesNotExist: resource = Resource() resource.resourceinstanceid = data['resourceinstance_id'] graphid = models.Node.objects.filter( nodegroup=data['nodegroup_id'])[0].graph_id resource.graph_id = graphid resource.save(user=request.user) resource.index() tile_id = data['tileid'] if tile_id != None and tile_id != '': old_tile = Tile.objects.get(pk=tile_id) clean_resource_cache(old_tile) tile = Tile(data) if tile.filter_by_perm(request.user, 'write_nodegroup'): with transaction.atomic(): try: tile.save(request=request) if tile_id == '4345f530-aa90-48cf-b4b3-92d1185ca439': import couchdb import json as json_json couch = couchdb.Server(settings.COUCHDB_URL) for project in models.MobileSurveyModel.objects.all( ): db = couch['project_' + str(project.id)] #tile = models.TileModel.objects.get(pk='4345f530-aa90-48cf-b4b3-92d1185ca439') tile_json = json_json.loads( JSONSerializer().serialize(tile)) tile_json['_id'] = tile_json['tileid'] for row in db.view('_all_docs', include_docs=True): if 'tileid' in row.doc and tile_json[ '_id'] == row.doc['_id']: tile_json['_rev'] = row.doc['_rev'] db.save(tile_json) except ValidationError as e: return JSONResponse( { 'status': 'false', 'message': e.args }, status=500) tile.after_update_all() clean_resource_cache(tile) update_system_settings_cache(tile) return JSONResponse(tile) else: return JSONResponse( { 'status': 'false', 'message': [_('Request Failed'), _('Permission Denied')] }, status=500) if self.action == 'reorder_tiles': json = request.body if json != None: data = JSONDeserializer().deserialize(json) if 'tiles' in data and len(data['tiles']) > 0: sortorder = 0 with transaction.atomic(): for tile in data['tiles']: t = Tile(tile) if t.filter_by_perm(request.user, 'write_nodegroup'): t.sortorder = sortorder t.save(update_fields=['sortorder'], request=request) sortorder = sortorder + 1 return JSONResponse(data) if self.action == 'delete_provisional_tile': data = request.POST if 'tileid' in data: provisionaledits = self.delete_provisional_edit(data, request) return JSONResponse(provisionaledits) else: payload = data.get('payload', None) if payload is not None: edits = jsonparser.loads(payload) for edit in edits['edits']: provisionaledits = self.delete_provisional_edit( edit, request) return JSONResponse({'result': 'success'}) return HttpResponseNotFound()