def _fill_metadata_db_model(self, metadata: Metadata): """ Fills a Metadata db record from the ISOMetadata data Args: metadata (Metadata): The old/empty object Returns: metadata (Metadata): The metadata object """ metadata.identifier = self.file_identifier metadata.abstract = self.abstract metadata.access_constraints = self.access_constraints # Take the polygon with the largest area as bounding geometry if len(self.polygonal_extent_exterior) > 0: max_area_poly = None for poly in self.polygonal_extent_exterior: if max_area_poly is None: max_area_poly = poly if max_area_poly.area < poly.area: max_area_poly = poly metadata.bounding_geometry = max_area_poly try: metadata.contact = Organization.objects.get_or_create( organization_name=self.responsible_party, email=self.contact_email, )[0] except MultipleObjectsReturned: # okay, we need to create a unique organization # "unique" since it will only be identified using organization_name and email metadata.contact = Organization.objects.get_or_create( organization_name="{}#1".format(self.responsible_party), email=self.contact_email, )[0] metadata.is_inspire_conform = self.inspire_interoperability metadata.metadata_url = self.uri metadata.last_remote_change = self.last_change_date metadata.spatial_res_type = self.spatial_res_type metadata.spatial_res_value = self.spatial_res_val if self.title is None: self.title = "BROKEN" metadata.title = self.title metadata.origin = self.origin metadata.is_broken = self.is_broken metadata.save() # save legal dates and reports for report in self.legal_reports: report.date.save() report.save() metadata.legal_reports.add(report) for date in self.legal_dates: date.save() metadata.legal_dates.add(date) metadata.save() return metadata
def _persist_metadata(self, md_data_entry: dict): """ Creates real Metadata model records from the parsed data Args: md_data_entry (dict): Returns: metadata (Metadata): The persisted metadata object """ _id = md_data_entry["id"] # Remove this id from the set of metadata which shall be deleted in the end. try: self.deleted_metadata.remove(_id) except KeyError: pass try: md = Metadata.objects.get(identifier=_id, ) is_new = False if md.last_remote_change == md_data_entry["date_stamp"]: # Nothing to do here! return except ObjectDoesNotExist: md = Metadata(identifier=_id) is_new = True md.access_constraints = md_data_entry.get("access_constraints", None) md.created_by = self.harvesting_group md.origin = ResourceOriginEnum.CATALOGUE.value md.last_remote_change = md_data_entry.get("date_stamp", None) md.title = md_data_entry.get("title", None) md.contact = md_data_entry.get("contact", None) md.language_code = md_data_entry.get("language_code", None) md.metadata_type = md_data_entry.get("metadata_type", None) md.abstract = md_data_entry.get("abstract", None) md.bounding_geometry = md_data_entry.get("bounding_geometry", None) formats = md_data_entry.get("formats", []) md.is_active = True md.capabilities_original_uri = md_data_entry.get( "capabilities_original_url", None) try: # Improve speed for keyword get-create by fetching (filter) all existing ones and only perform # get_or_create on the ones that do not exist yet. Speed up by ~50% for large amount of data existing_kws = Keyword.objects.filter( keyword__in=md_data_entry["keywords"]) existing_kws = [kw.keyword for kw in existing_kws] new_kws = [ kw for kw in md_data_entry["keywords"] if kw not in existing_kws ] [Keyword.objects.get_or_create(keyword=kw)[0] for kw in new_kws] kws = Keyword.objects.filter(keyword__in=md_data_entry["keywords"]) # Same for MimeTypes existing_formats = MimeType.objects.filter( mime_type__in=md_data_entry["formats"]) existing_formats = [ _format.mime_type for _format in existing_formats ] new_formats = [ _format for _format in md_data_entry["formats"] if _format not in existing_formats ] [ MimeType.objects.get_or_create(mime_type=_format)[0] for _format in new_formats ] formats = MimeType.objects.filter( mime_type__in=md_data_entry["formats"]) with transaction.atomic(): if len(md_data_entry["categories"]) > 0: q = Q() for cat in md_data_entry["categories"]: q |= Q(title_EN__iexact=cat) categories = Category.objects.filter(q) else: categories = [] for link in md_data_entry.get("links", []): url = link.get("link", None) if url is None: continue generic_url = GenericUrl() generic_url.description = "[HARVESTED URL] \n{}".format( link.get("description", "")) generic_url.method = "Get" generic_url.url = url generic_url.save() md.additional_urls.add(generic_url) md.save(add_monitoring=False) md.keywords.add(*kws) md.categories.add(*categories) md.formats.add(*formats) # To reduce runtime, we only create a new MetadataRelation if we are sure there hasn't already been one. # Using get_or_create increases runtime on existing metadata too much! if is_new: md.add_metadata_relation( to_metadata=self.metadata, relation_type=MetadataRelationEnum.HARVESTED_THROUGH. value, origin=ResourceOriginEnum.CATALOGUE.value) parent_id = md_data_entry["parent_id"] # Add the found parent_id to the parent_child map! if parent_id is not None: if self.parent_child_map.get(parent_id, None) is None: self.parent_child_map[parent_id] = [md] else: self.parent_child_map[parent_id].append(md) except (IntegrityError, DataError) as e: csw_logger.error( CSW_ERROR_LOG_TEMPLATE.format(md.identifier, self.metadata.title, e))