def add_project_json_ld(self): """ adds project specific information to the JSON-LD object """ project = self.item_gen_cache.get_project_model_object(self.manifest.uuid) if isinstance(project, Project): dc_abstract = ItemKeys.PREDICATES_DCTERMS_ABSTRACT lang_obj = Languages() self.json_ld['description'] = lang_obj.make_json_ld_value_obj(project.short_des, project.sm_localized_json) self.json_ld[dc_abstract] = lang_obj.make_json_ld_value_obj(project.content, project.lg_localized_json) if isinstance(project.edit_status, int): # add editorial status oc_status = 'oc-gen:edit-level-' + str(project.edit_status) parts_json_ld = PartsJsonLD() self.json_ld = parts_json_ld.addto_predicate_list(self.json_ld, 'bibo:status', oc_status, 'uri') if project.edit_status == 0: self.json_ld = parts_json_ld.addto_predicate_list(self.json_ld, 'bibo:status', 'bibo:status/forthcoming', 'uri') elif project.edit_status >= 1 and project.edit_status <= 2: self.json_ld = parts_json_ld.addto_predicate_list(self.json_ld, 'bibo:status', 'bibo:status/nonPeerReviewed', 'uri') else: self.json_ld = parts_json_ld.addto_predicate_list(self.json_ld, 'bibo:status', 'bibo:status/peerReviewed', 'uri')
def add_json_ld_direct_assertion(self, json_ld, assertion): """ adds an JSON-LD for an assertion that is made directly to the item, and is not part of an observation """ if assertion.predicate_uuid in self.NO_OBS_ASSERTION_PREDS: # these predicates describe the item, but not in an observation act_obj = None if assertion.object_type == 'xsd:string': # look for the string uuid in the dict of string objects we already # got from the database if assertion.object_uuid in self.string_obj_dict: string_obj = self.string_obj_dict[assertion.object_uuid] lang_obj = Languages() act_obj = lang_obj.make_json_ld_value_obj( string_obj.content, string_obj.localized_json) else: act_obj = 'string content missing' elif assertion.object_type == 'xsd:date': act_obj = assertion.data_date.date().isoformat() elif assertion.object_type == 'xsd:integer': try: act_obj = int(float(assertion.data_num)) except: act_obj = None elif assertion.object_type in self.NUMERIC_OBJECT_TYPES: act_obj = assertion.data_num else: # the object of is something identified by a URI, not a literal ent = parts_json_ld.get_new_object_item_entity( assertion.object_uuid, assertion.object_type) if ent is not False: act_obj = entity.uri if act_obj is not None: json_ld[assertion.predicate_uuid] = act_obj return json_ld
def add_translation(self, string_uuid, language, script, trans_text ): """ adds translation to a string """ ok = False trans_text = str(trans_text) trans_text = trans_text.strip() try: str_obj = OCstring.objects.get(uuid=string_uuid) except OCstring.DoesNotExist: str_obj = False if str_obj is not False: # found the string object if language != Languages.DEFAULT_LANGUAGE: # editing in another language, so save to localization object lan_obj = Languages() key = lan_obj.get_language_script_key(language, script) str_obj.localized_json = lan_obj.modify_localization_json(str_obj.localized_json, key, trans_text) str_obj.save() else: str_obj.content = trans_text str_obj.save() ok = True return ok
def add_document_json_ld(self): """ adds document specific information to the JSON-LD object """ try: document = OCdocument.objects.get(uuid=self.manifest.uuid) except OCdocument.DoesNotExist: document = None if isinstance(document, OCdocument): rdf_html = ItemKeys.PREDICATES_RDF_HTML lan_obj = Languages() self.json_ld[rdf_html] = lan_obj.make_json_ld_value_obj(document.content, document.localized_json)
def get_comments(self): """ gets comments for the entity (or vocabulary) """ if isinstance(self.uri, str): lequiv = LinkEquivalence() subjs = lequiv.get_identifier_list_variants(self.uri) lequiv = LinkEquivalence() # get all the varients of RDFS:comments comment_uris = lequiv.get_identifier_list_variants('rdfs:comment') commment_annos = LinkAnnotation.objects\ .filter(subject__in=subjs, predicate_uri__in=comment_uris)[:1] if len(commment_annos) > 0: self.comment = commment_annos[0].obj_extra lang = Languages() self.default_comment = lang.get_default_value_str(self.comment)
def __init__(self, id_href=True): # for geo_json_context self.geo_json_context = self.GEO_JSON_CONTEXT_URI context = LastUpdatedOrderedDict() context['rdf'] = 'http://www.w3.org/1999/02/22-rdf-syntax-ns#' context['rdfs'] = 'http://www.w3.org/2000/01/rdf-schema#' context['xsd'] = 'http://www.w3.org/2001/XMLSchema#' context['skos'] = 'http://www.w3.org/2004/02/skos/core#' context['owl'] = 'http://www.w3.org/2002/07/owl#' context['dc-terms'] = 'http://purl.org/dc/terms/' context['dcmi'] = 'http://dublincore.org/documents/dcmi-terms/' context['bibo'] = 'http://purl.org/ontology/bibo/' context['foaf'] = 'http://xmlns.com/foaf/0.1/' context['cidoc-crm'] = 'http://erlangen-crm.org/current/' context['dcat'] = 'http://www.w3.org/ns/dcat#' context['geojson'] = 'https://purl.org/geojson/vocab#' context['cc'] = 'http://creativecommons.org/ns#' context['nmo'] = 'http://nomisma.org/ontology#' context['oc-gen'] = 'http://opencontext.org/vocabularies/oc-general/' context['oc-pred'] = 'http://opencontext.org/predicates/' context['@language'] = Languages().DEFAULT_LANGUAGE context['id'] = '@id' context['label'] = 'rdfs:label' context['uuid'] = 'dc-terms:identifier' context['slug'] = 'oc-gen:slug' context['type'] = '@type' context['category'] = {'@id': 'oc-gen:category', '@type': '@id'} context['owl:sameAs'] = {'@type': '@id'} context['skos:altLabel'] = {'@container': '@language'} context['xsd:string'] = {'@container': '@language'} context['description'] = {'@id': 'dc-terms:description', '@container': '@language'} for pred in settings.TEXT_CONTENT_PREDICATES: if pred not in context: context[pred] = {'@container': '@language'} self.context = context
def add_json_ld_direct_assertion(self, json_ld, assertion): """ adds an JSON-LD for an assertion that is made directly to the item, and is not part of an observation """ if assertion.predicate_uuid in self.NO_OBS_ASSERTION_PREDS: # these predicates describe the item, but not in an observation act_obj = None if assertion.object_type == 'xsd:string': # look for the string uuid in the dict of string objects we already # got from the database if assertion.object_uuid in self.string_obj_dict: string_obj = self.string_obj_dict[assertion.object_uuid] lang_obj = Languages() act_obj = lang_obj.make_json_ld_value_obj(string_obj.content, string_obj.localized_json) else: act_obj = 'string content missing' elif assertion.object_type == 'xsd:date': act_obj = assertion.data_date.date().isoformat() elif assertion.object_type == 'xsd:integer': try: act_obj = int(float(assertion.data_num)) except: act_obj = None elif assertion.object_type in self.NUMERIC_OBJECT_TYPES: act_obj = assertion.data_num else: # the object of is something identified by a URI, not a literal ent = parts_json_ld.get_new_object_item_entity( assertion.object_uuid, assertion.object_type ) if ent is not False: act_obj = entity.uri if act_obj is not None: json_ld[assertion.predicate_uuid] = act_obj return json_ld
def add_translation(self, string_uuid, language, script, trans_text): """ adds translation to a string """ ok = False trans_text = str(trans_text) trans_text = trans_text.strip() try: str_obj = OCstring.objects.get(uuid=string_uuid) except OCstring.DoesNotExist: str_obj = False if str_obj is not False: # found the string object if language != Languages.DEFAULT_LANGUAGE: # editing in another language, so save to localization object lan_obj = Languages() key = lan_obj.get_language_script_key(language, script) str_obj.localized_json = lan_obj.modify_localization_json( str_obj.localized_json, key, trans_text) str_obj.save() else: str_obj.content = trans_text str_obj.save() ok = True return ok
def save_entity_comments(self): """ saves comments about an entity """ if self.graph is not False and self.vocabulary_uri is not False: lequiv = LinkEquivalence() # get all the varients of RDFS:comments comment_uris = lequiv.get_identifier_list_variants('rdfs:comment') # now get all the entities from this vocabulary (that may be the subject of a comment) raw_subject_uris = LinkEntity.objects.filter( vocab_uri=self.vocabulary_uri) lequiv = LinkEquivalence() subject_uris = lequiv.get_identifier_list_variants( raw_subject_uris) if self.replace_old: # delete the old comments LinkAnnotation.objects\ .filter(subject__in=subject_uris, predicate_uri__in=comment_uris)\ .delete() for s, p, o in self.graph.triples((None, RDFS.comment, None)): subject_uri = s.__str__( ) # get the URI of the subject as a string comment = o.__str__( ) # get the comment from the object as a string # update the entity's comment link_ent = False try: link_ent = LinkEntity.objects.get(uri=subject_uri) except LinkEntity.DoesNotExist: link_ent = False if link_ent is not False: lang = Languages() newr = LinkAnnotation() # make the subject a prefixed URI if common newr.subject = URImanagement.prefix_common_uri(subject_uri) newr.subject_type = 'uri' newr.project_uuid = '0' newr.source_id = self.vocabulary_uri newr.predicate_uri = 'rdfs:comment' newr.obj_extra = {} newr.obj_extra[lang.DEFAULT_LANGUAGE] = comment newr.save()
def infer_assertions_for_item_json_ld(self, json_ld): """Makes a list of inferred assertions from item json ld """ lang_obj = Languages() inferred_assertions = [] if not isinstance(json_ld, dict): return inferred_assertions if not ItemKeys.PREDICATES_OCGEN_HASOBS in json_ld: return inferred_assertions unique_pred_assertions = LastUpdatedOrderedDict() for obs_dict in json_ld[ItemKeys.PREDICATES_OCGEN_HASOBS]: # Get the status of the observation, defaulting to 'active'. If # active, then it's OK to infer assertions, otherwise skip the # observation. obs_status = obs_dict.get(ItemKeys.PREDICATES_OCGEN_OBSTATUS, 'active') if obs_status != 'active': # Skip this observation. It's there but has a deprecated # status. continue for obs_pred_key, obj_values in obs_dict.items(): if obs_pred_key in self.LINKDATA_OBS_PREDS_SKIP: # Skip this obs_pred_key, it is a general # description of the observation, and will # not have any linked assertions to infer. continue obs_pred_info = self.lookup_predicate(obs_pred_key) pred_data_type = self.get_predicate_datatype_for_graph_obj( obs_pred_info) if not obs_pred_info: continue equiv_pred_objs = self.get_equivalent_objects(obs_pred_info) if not equiv_pred_objs: # No linked data equivalence for the obs_pred_key # so continue, skipping the rest. continue # Start with a None assertion. assertion = None # Iterate through all the equivalent predicate objects. for equiv_pred_obj in equiv_pred_objs: equiv_pred_uri = self.get_id_from_g_obj(equiv_pred_obj) # Inferred assertions will have unique LOD predicates, with # one or more values. The unique_pred_assertions dict makes # sure the LOD predicates are used only once. if not equiv_pred_uri in unique_pred_assertions: assertion = equiv_pred_obj assertion['type'] = pred_data_type assertion['ld_objects'] = LastUpdatedOrderedDict() assertion['oc_objects'] = LastUpdatedOrderedDict() assertion['literals'] = [] unique_pred_assertions[equiv_pred_uri] = assertion assertion = unique_pred_assertions[equiv_pred_uri] if assertion and equiv_pred_uri: # we have a LOD equvalient property if not isinstance(obj_values, list): obj_values = [obj_values] for obj_val in obj_values: literal_val = None if not isinstance(obj_val, dict): # the object of the assertion is not a dict, so it must be # a literal literal_val = obj_val if obj_val not in assertion['literals']: assertion['literals'].append(obj_val) elif 'xsd:string' in obj_val: literal_val = lang_obj.get_all_value_str( obj_val['xsd:string']) if literal_val and literal_val not in assertion[ 'literals']: assertion['literals'].append(literal_val) if literal_val is None: # Add any linked data equivalences by looking for this # type in the graph list obj_val = self.lookup_type_by_type_obj(obj_val) obj_uri = self.get_id_from_g_obj(obj_val) equiv_obj_objs = self.get_equivalent_objects( obj_val) if len(equiv_obj_objs): # We have LD equivalents for the object value for equiv_obj_obj in equiv_obj_objs: equiv_obj_uri = self.get_id_from_g_obj( equiv_obj_obj) if not biological_taxonomy_validation( equiv_pred_uri, equiv_obj_uri): # This object_uri does not belong to this # predicated uri. continue assertion['ld_objects'][ equiv_obj_uri] = equiv_obj_obj elif obj_uri: # We don't have LD equivalents for the object value # add to the oc_objects assertion['oc_objects'][obj_uri] = obj_val unique_pred_assertions[ equiv_pred_uri] = assertion for pred_key, assertion in unique_pred_assertions.items(): inferred_assertions.append(assertion) return inferred_assertions
def make_graph(self): """ makes a graph of assertions for the void file """ lang_obj = Languages() # get a list of project manifest + projects objects # these are filtered for publicly available projects only pprojs = PelagiosProjects() pprojs.request = self.request self.man_proj_objs = pprojs.get_projects() # first make assertions about Open Context oc_projs_uri = settings.CANONICAL_HOST + '/projects/' self.make_add_triple(oc_projs_uri, RDF.type, self.make_full_uri('void', 'Dataset')) self.make_add_triple(oc_projs_uri, self.make_full_uri('dcterms', 'title'), None, settings.CANONICAL_SITENAME) self.make_add_triple(oc_projs_uri, self.make_full_uri('dcterms', 'description'), None, settings.HOST_TAGLINE) self.make_add_triple(oc_projs_uri, self.make_full_uri('foaf', 'homepage'), settings.CANONICAL_HOST) # now add assertions about Web data and Open Context self.make_add_web_dataset_assertions() # now add the projects as subsets of data for proj_dict in self.man_proj_objs: man = proj_dict['man'] uri = URImanagement.make_oc_uri(man.uuid, man.item_type) self.make_add_triple(oc_projs_uri, self.make_full_uri('void', 'subset'), uri) # now add assertions about each project, esp. datadump uri for proj_dict in self.man_proj_objs: man = proj_dict['man'] proj = proj_dict['proj'] uri = URImanagement.make_oc_uri(man.uuid, man.item_type) data_uri = self.base_uri + man.uuid + self.data_dump_extension self.make_add_triple(uri, RDF.type, self.make_full_uri('void', 'Dataset')) self.make_add_triple(uri, self.make_full_uri('void', 'dataDump'), data_uri) """ self.make_add_triple(uri, self.make_full_uri('foaf', 'homepage'), uri) """ self.make_add_triple(uri, self.make_full_uri('dcterms', 'publisher'), None, settings.CANONICAL_SITENAME) self.make_add_triple(data_uri, self.make_full_uri('dcterms', 'license'), self.OA_LICENSE) self.make_add_triple(uri, self.make_full_uri('dcterms', 'title'), None, man.label) self.make_add_triple(uri, self.make_full_uri('dcterms', 'description'), None, proj.short_des) if man.published is not None: self.make_add_triple(uri, self.make_full_uri('dcterms', 'issued'), None, man.published.date().isoformat()) if man.revised is not None: self.make_add_triple(uri, self.make_full_uri('dcterms', 'modified'), None, man.revised.date().isoformat())
def add_json_ld_assertion_predicate_objects(self, act_obs, assertion): """ adds value objects to for an assertion predicate """ # we've already looked up objects from the manifest parts_json_ld = PartsJsonLD() parts_json_ld.proj_context_json_ld = self.proj_context_json_ld parts_json_ld.manifest_obj_dict = self.manifest_obj_dict if assertion.object_type == 'persons': # add a stable ID to person items, but only if they are ORCID IDs parts_json_ld.stable_id_predicate = ItemKeys.PREDICATES_FOAF_PRIMARYTOPICOF parts_json_ld.stable_id_prefix_limit = StableIdentifer.ID_TYPE_PREFIXES[ 'orcid'] pred_obj = self.get_pred_obj_by_uuid(parts_json_ld, assertion.predicate_uuid) pred_slug_uri = pred_obj['slug_uri'] if isinstance(pred_slug_uri, str): if pred_slug_uri in act_obs: act_obj_list = act_obs[pred_slug_uri] else: act_obj_list = [] act_obj = None add_literal_object = True if assertion.object_type == 'xsd:string': # look for the string uuid in the dict of string objects we already # got from the database if assertion.object_uuid in self.string_obj_dict: act_obj = LastUpdatedOrderedDict() act_obj['id'] = '#string-' + str(assertion.object_uuid) string_obj = self.string_obj_dict[assertion.object_uuid] lang_obj = Languages() act_obj['xsd:string'] = lang_obj.make_json_ld_value_obj( string_obj.content, string_obj.localized_json) else: act_obj = 'string content missing' elif assertion.object_type == 'xsd:date': act_obj = assertion.data_date.date().isoformat() elif assertion.object_type == 'xsd:integer': try: act_obj = int(float(assertion.data_num)) except: act_obj = None elif assertion.object_type in self.NUMERIC_OBJECT_TYPES: act_obj = assertion.data_num else: # the object of is something identified by a URI, not a literal # so we're using function in the parts_json_ld to add the uri identified # object as a dict that has some useful information # {id, label, slug, sometimes class} add_literal_object = False # some assertions use predicates equiv. to DC-Terms creators or contributors self.add_assertion_dc_authors(pred_obj, assertion.object_uuid) # if self.assertion_hashes: # we need to add the assertion hash identifier so as to be able # to identify assertions for editing purposes act_obs = parts_json_ld.addto_predicate_list( act_obs, pred_slug_uri, assertion.object_uuid, assertion.object_type, False, assertion.hash_id) else: # normal default assertion creation, without identification of # the assertion's hash ID act_obs = parts_json_ld.addto_predicate_list( act_obs, pred_slug_uri, assertion.object_uuid, assertion.object_type) if act_obj is not None and add_literal_object: if self.assertion_hashes: # we need to add the assertion hash identifier so as to be able # to identify assertions for editing purposes if not isinstance(act_obj, dict): literal = act_obj act_obj = LastUpdatedOrderedDict() act_obj['literal'] = literal act_obj['hash_id'] = assertion.hash_id act_obj_list.append(act_obj) if len(act_obj_list) > 0 and add_literal_object: # only add a list of literal objects if they are literal objects :) act_obs[pred_slug_uri] = act_obj_list return act_obs
def infer_assertions_for_item_json_ld(self, json_ld): """Makes a list of inferred assertions from item json ld """ lang_obj = Languages() inferred_assertions = [] if not isinstance(json_ld, dict): return inferred_assertions if not ItemKeys.PREDICATES_OCGEN_HASOBS in json_ld: return inferred_assertions unique_pred_assertions = LastUpdatedOrderedDict() for obs_dict in json_ld[ItemKeys.PREDICATES_OCGEN_HASOBS]: # Get the status of the observation, defaulting to 'active'. If # active, then it's OK to infer assertions, otherwise skip the # observation. obs_status = obs_dict.get(ItemKeys.PREDICATES_OCGEN_OBSTATUS, 'active') if obs_status != 'active': # Skip this observation. It's there but has a deprecated # status. continue for obs_pred_key, obj_values in obs_dict.items(): if obs_pred_key in self.LINKDATA_OBS_PREDS_SKIP: # Skip this obs_pred_key, it is a general # description of the observation, and will # not have any linked assertions to infer. continue obs_pred_info = self.lookup_predicate(obs_pred_key) pred_data_type = self.get_predicate_datatype_for_graph_obj(obs_pred_info) equiv_pred_objs = self.get_equivalent_objects(obs_pred_info) if not equiv_pred_objs: # No linked data equivalence for the obs_pred_key # so continue, skipping the rest. continue # Start with a None assertion. assertion = None # We're ony going to use the first equivalent of a predicate # otherwise this gets too complicated. equiv_pred_obj = equiv_pred_objs[0] equiv_pred_uri = self.get_id_from_g_obj(equiv_pred_obj) # Inferred assertions will have unique LOD predicates, with # one or more values. The unique_pred_assertions dict makes # sure the LOD predicates are used only once. if not equiv_pred_uri in unique_pred_assertions: assertion = equiv_pred_obj assertion['type'] = pred_data_type assertion['ld_objects'] = LastUpdatedOrderedDict() assertion['oc_objects'] = LastUpdatedOrderedDict() assertion['literals'] = [] unique_pred_assertions[equiv_pred_uri] = assertion assertion = unique_pred_assertions[equiv_pred_uri] if assertion and equiv_pred_uri: # we have a LOD equvalient property if not isinstance(obj_values, list): obj_values = [obj_values] for obj_val in obj_values: literal_val = None if not isinstance(obj_val, dict): # the object of the assertion is not a dict, so it must be # a literal literal_val = obj_val if obj_val not in assertion['literals']: assertion['literals'].append(obj_val) elif 'xsd:string' in obj_val: literal_val = lang_obj.get_all_value_str(obj_val['xsd:string']) if literal_val and literal_val not in assertion['literals']: assertion['literals'].append(literal_val) if literal_val is None: # Add any linked data equivalences by looking for this # type in the graph list obj_val = self.lookup_type_by_type_obj(obj_val) obj_uri = self.get_id_from_g_obj(obj_val) equiv_obj_objs = self.get_equivalent_objects(obj_val) if len(equiv_obj_objs): # We have LD equivalents for the object value for equiv_obj_obj in equiv_obj_objs: equiv_obj_uri = self.get_id_from_g_obj(equiv_obj_obj) assertion['ld_objects'][equiv_obj_uri] = equiv_obj_obj elif obj_uri: # We don't have LD equivalents for the object value # add to the oc_objects assertion['oc_objects'][obj_uri] = obj_val unique_pred_assertions[equiv_pred_uri] = assertion for pred_key, assertion in unique_pred_assertions.items(): inferred_assertions.append(assertion) return inferred_assertions
def update_string_content(self, content, content_type='content', post_data={}): """ Updates the main string content of an item (project, document, or table abstract) """ content = content.strip() html_ok = self.valid_as_html(content) # check if valid, but allow invalid if html_ok: note = '' else: note = self.errors['html'] if self.manifest is not False: # check for translation! if 'language' in post_data: language = post_data['language'] else: language = Languages.DEFAULT_LANGUAGE if 'script' in post_data: script = post_data['script'] else: script = None if language != Languages.DEFAULT_LANGUAGE: # editing another language, not the default lan_obj = Languages() localize_key = lan_obj.get_language_script_key(language, script) else: localize_key = False if self.manifest.item_type == 'projects': try: cobj = Project.objects.get(uuid=self.manifest.uuid) if localize_key is not False: if content_type == 'short_des': cobj.sm_localized_json = lan_obj.modify_localization_json(cobj.sm_localized_json, localize_key, content) else: cobj.lg_localized_json = lan_obj.modify_localization_json(cobj.lg_localized_json, localize_key, content) else: if content_type == 'short_des': cobj.short_des = content else: cobj.content = content cobj.save() ok = True except Project.DoesNotExist: self.errors['uuid'] = self.manifest.uuid + ' not in projects' ok = False elif self.manifest.item_type == 'tables': ex_id = ExpTableIdentifiers() ex_id.make_all_identifiers(self.manifest.uuid) try: cobj = ExpTable.objects.get(table_id=ex_id.table_id) if localize_key is not False: if content_type == 'short_des': cobj.sm_localized_json = lan_obj.modify_localization_json(cobj.sm_localized_json, localize_key, content) else: cobj.lg_localized_json = lan_obj.modify_localization_json(cobj.lg_localized_json, localize_key, content) else: if content_type == 'short_des': cobj.short_des = content else: cobj.abstract = content cobj.save() ok = True except ExpTable.DoesNotExist: self.errors['uuid'] = ex_id.table_id + ' not in tables' ok = False elif self.manifest.item_type == 'documents' and content_type == 'content': try: cobj = OCdocument.objects.get(uuid=self.manifest.uuid) if localize_key is not False: cobj.localized_json = lan_obj.modify_localization_json(cobj.localized_json, localize_key, content) else: cobj.content = content cobj.save() ok = True except OCdocument.DoesNotExist: self.errors['uuid'] = self.manifest.uuid + ' not in documents' ok = False elif self.manifest.item_type == 'predicates' or self.manifest.item_type == 'types': # make a skos not to document a predicate or type ok = True string_uuid = None old_notes = Assertion.objects\ .filter(uuid=self.manifest.uuid, predicate_uuid='skos:note') for old_note in old_notes: string_uuid = old_note.object_uuid if localize_key is False: # only delete if this is not a translation! old_note.delete() if localize_key is not False and string_uuid is not None: # OK, we're just adding a translation act_string = False try: act_string = OCstring.objects.get(uuid=string_uuid) except OCstring.DoesNotExist: act_string = False string_uuid = None if act_string is not False: # update the localization JSON with the content act_string.localized_json = lan_obj.modify_localization_json(act_string.localized_json, localize_key, content) act_string.save() if localize_key is False: # this is for changing SKOS notes in cases where we're not # adding a translation if string_uuid is not None: string_used = Assertion.objects\ .filter(project_uuid=self.manifest.project_uuid, object_uuid=string_uuid)[:1] if len(string_used) > 0: # the string is used elsewhere, so we can't just use that # string uuid string_uuid = None else: # put the new content int the string that is not in use # for other items act_string = False try: act_string = OCstring.objects.get(uuid=string_uuid) except OCstring.DoesNotExist: act_string = False string_uuid = None if act_string is not False: # save the content in the string to overwrite it act_string.content = content act_string.save() if string_uuid is None: # we don't have a string_uuid to overwrite str_man = StringManagement() str_man.project_uuid = self.manifest.project_uuid str_man.source_id = 'web-form' str_obj = str_man.get_make_string(str(content)) string_uuid = str_obj.uuid # now make the assertion new_ass = Assertion() new_ass.uuid = uuid = self.manifest.uuid new_ass.subject_type = self.manifest.item_type new_ass.project_uuid = self.manifest.project_uuid new_ass.source_id = 'web-form' new_ass.obs_node = '#obs-1' new_ass.obs_num = 1 new_ass.sort = 1 new_ass.visibility = 1 new_ass.predicate_uuid = 'skos:note' new_ass.object_type = 'xsd:string' new_ass.object_uuid = string_uuid new_ass.save() else: ok = False if ok: # now clear the cache a change was made self.clear_caches() self.response = {'action': 'update-string-content', 'ok': ok, 'change': {'prop': content_type, 'new': content, 'old': '[Old content]', 'note': note}} return self.response
def update_label(self, label, post_data): """ Updates an item's label. Generally straightforward except for subjects """ ok = True note = '' old_label = self.manifest.label if 'language' in post_data: language = post_data['language'] else: language = Languages.DEFAULT_LANGUAGE if 'script' in post_data: script = post_data['script'] else: script = None if language != Languages.DEFAULT_LANGUAGE: # editing another language, not the default lan_obj = Languages() key = lan_obj.get_language_script_key(language, script) self.manifest.localized_json = lan_obj.modify_localization_json(self.manifest.localized_json, key, label) self.manifest.save() self.manifest.revised_save() else: # editing the default language self.manifest.label = label self.manifest.save() self.manifest.revised_save() # only do additional label changes in default language if self.manifest.item_type == 'projects': try: cobj = Project.objects.get(uuid=self.manifest.uuid) cobj.label = label cobj.save() ok = True except Project.DoesNotExist: self.errors['uuid'] = self.manifest.uuid + ' not in projects' ok = False elif self.manifest.item_type == 'subjects': # we need to adjust context paths for this subject + its children subj_gen = SubjectGeneration() subj_gen.generate_save_context_path_from_uuid(self.manifest.uuid) note = str(subj_gen.changes) + ' items affected' elif self.manifest.item_type == 'tables': ex_id = ExpTableIdentifiers() ex_id.make_all_identifiers(self.manifest.uuid) try: cobj = ExpTable.objects.get(table_id=ex_id.table_id) cobj.label = label cobj.save() ok = True except ExpTable.DoesNotExist: self.errors['uuid'] = ex_id.table_id + ' not in tables' ok = False elif self.manifest.item_type == 'persons': # we need to adjust person's combined name try: cobj = Person.objects.get(uuid=self.manifest.uuid) cobj.combined_name = label if 'given_name' in post_data: cobj.given_name = post_data['given_name'] if 'surname' in post_data: cobj.surname = post_data['surname'] if 'initials' in post_data: cobj.initials = post_data['initials'] if 'mid_init' in post_data: cobj.mid_init = post_data['mid_init'] cobj.save() ok = True except Person.DoesNotExist: self.errors['uuid'] = self.manifest.uuid + ' not in persons' ok = False # now reindex for solr, including child items impacted by the changes if self.manifest.item_type != 'tables' and self.edit_status > 0: if 'reindex' in post_data: sri = SolrReIndex() sri.reindex_related(self.manifest.uuid) if ok: # now clear the cache a change was made self.clear_caches() self.response = {'action': 'update-label', 'ok': ok, 'change': {'prop': 'label', 'new': label, 'old': old_label, 'note': note}} return self.response
def add_json_ld_assertion_predicate_objects(self, act_obs, assertion): """ adds value objects to for an assertion predicate """ # we've already looked up objects from the manifest parts_json_ld = PartsJsonLD() parts_json_ld.proj_context_json_ld = self.proj_context_json_ld parts_json_ld.manifest_obj_dict = self.manifest_obj_dict if assertion.object_type == 'persons': # add a stable ID to person items, but only if they are ORCID IDs parts_json_ld.stable_id_predicate = ItemKeys.PREDICATES_FOAF_PRIMARYTOPICOF parts_json_ld.stable_id_prefix_limit = StableIdentifer.ID_TYPE_PREFIXES['orcid'] pred_obj = self.get_pred_obj_by_uuid(parts_json_ld, assertion.predicate_uuid) pred_slug_uri = pred_obj['slug_uri'] if isinstance(pred_slug_uri, str): if pred_slug_uri in act_obs: act_obj_list = act_obs[pred_slug_uri] else: act_obj_list = [] act_obj = None add_literal_object = True if assertion.object_type == 'xsd:string': # look for the string uuid in the dict of string objects we already # got from the database if assertion.object_uuid in self.string_obj_dict: act_obj = LastUpdatedOrderedDict() act_obj['id'] = '#string-' + str(assertion.object_uuid) string_obj = self.string_obj_dict[assertion.object_uuid] lang_obj = Languages() act_obj['xsd:string'] = lang_obj.make_json_ld_value_obj(string_obj.content, string_obj.localized_json) else: act_obj = 'string content missing' elif assertion.object_type == 'xsd:date': act_obj = assertion.data_date.date().isoformat() elif assertion.object_type == 'xsd:integer': try: act_obj = int(float(assertion.data_num)) except: act_obj = None elif assertion.object_type in self.NUMERIC_OBJECT_TYPES: act_obj = assertion.data_num else: # the object of is something identified by a URI, not a literal # so we're using function in the parts_json_ld to add the uri identified # object as a dict that has some useful information # {id, label, slug, sometimes class} add_literal_object = False # some assertions use predicates equiv. to DC-Terms creators or contributors self.add_assertion_dc_authors(pred_obj, assertion.object_uuid) # if self.assertion_hashes: # we need to add the assertion hash identifier so as to be able # to identify assertions for editing purposes act_obs = parts_json_ld.addto_predicate_list(act_obs, pred_slug_uri, assertion.object_uuid, assertion.object_type, False, assertion.hash_id) else: # normal default assertion creation, without identification of # the assertion's hash ID act_obs = parts_json_ld.addto_predicate_list(act_obs, pred_slug_uri, assertion.object_uuid, assertion.object_type) if act_obj is not None and add_literal_object: if self.assertion_hashes: # we need to add the assertion hash identifier so as to be able # to identify assertions for editing purposes if not isinstance(act_obj, dict): literal = act_obj act_obj = LastUpdatedOrderedDict() act_obj['literal'] = literal act_obj['hash_id'] = assertion.hash_id act_obj_list.append(act_obj) if len(act_obj_list) > 0 and add_literal_object: # only add a list of literal objects if they are literal objects :) act_obs[pred_slug_uri] = act_obj_list return act_obs