def done(self, form_list, **kwargs): """ Iterates over all forms and fills the Metadata/Dataset records accordingly Args: form_list (FormList): An iterable list of forms kwargs: Returns: """ # Create instances self.metadata = Metadata() self.metadata.metadata_type = MetadataEnum.DATASET.value self.metadata.is_active = True self.dataset = Dataset() self.dataset.is_active = True self.dataset.md_identifier_code = self.metadata.identifier self.dataset.metadata_standard_name = "ISO 19115 Geographic information - Metadata" self.dataset.metadata_standard_version = "ISO 19115:2003(E)" # Pre-save objects to be able to add M2M relations self.metadata.save() self.metadata.identifier = self.metadata.id self.dataset.metadata = self.metadata self.dataset.save() self.metadata.metadata_url = reverse("resource:get-dataset-metadata", args=(self.dataset.id, )) return super().done(form_list=form_list, **kwargs)
def post(self): dataset_dict = request.get_json() if not dataset_dict: return {'message': 'No input data provided.'}, 400 existing_dataset = Dataset.query.filter_by( name=dataset_dict['name']).first() if existing_dataset: return {'message': 'Dataset named **{}** already exists.'\ .format(dataset_dict['name'])}, 400 errors = dataset_schema.validate(dataset_dict) if errors: return errors, 400 user = OrgPerson.query.filter_by(email=dataset_dict['email']).first() if not user: return {'message': '{} not found.'.format(dataset_dict['email'])} dataset = Dataset(name=dataset_dict['name'], fields=dataset_dict['fields'], creator=user) if 'description' in dataset_dict: dataset.description = dataset_dict['description'] try: db.session.add(dataset) db.session.commit() except SQLAlchemyError as e: db.session.rollback() response = {'error': str(e)} return response, 400 return dataset_schema.dump(dataset), 201
def _fill_metadata_dataset_identification_form(data: dict, metadata: Metadata, dataset: Dataset, user: MrMapUser): """ Fills form data into Metadata/Dataset records Args: data (dict): Cleaned form data metadata (dict): The metadata record dataset (dict): The dataset record user: The performing user Returns: """ metadata.title = data.get("title", None) metadata.abstract = data.get("abstract", None) metadata.created = data.get("date_stamp", None) metadata.created_by = data.get("created_by", None) dataset.language_code = data.get("language_code", None) dataset.character_set_code = data.get("character_set_code", None) dataset.date_stamp = data.get("date_stamp", None) ref_systems = data.get("reference_system", []) metadata.reference_system.clear() for ref_system in ref_systems: metadata.reference_system.add(ref_system) additional_related_objects = data.get("additional_related_objects", []) MetadataRelation.objects.filter(to_metadata=metadata, origin=ResourceOriginEnum.EDITOR.value).delete() for additional_object in additional_related_objects: additional_object.add_metadata_relation(to_metadata=metadata, relation_type=MetadataRelationEnum.DESCRIBES.value, internal=True, origin=ResourceOriginEnum.EDITOR.value)
def _fill_metadata_dataset_quality_form(data: dict, metadata: Metadata, dataset: Dataset, user: MrMapUser): """ Fills form data into Metadata/Dataset records Args: data (dict): Cleaned form data metadata (dict): The metadata record dataset (dict): The dataset record user: The performing user Returns: """ dataset.update_frequency_code = data.get("maintenance_and_update_frequency", None) dataset.lineage_statement = data.get("lineage_statement", None)
def to_db_model(self, type=MetadataEnum.DATASET.value, created_by: MrMapGroup = None): """ Get corresponding metadata object from database or create it if not found! Returns: metadata (Metadata): A db model Metadata object """ update = False new = False # try to find the object by uuid and uri. If not existing yet, create a new record try: metadata = Metadata.objects.get(identifier=self.file_identifier, metadata_url=self.uri) # check if the parsed metadata might be newer # make sure both date time objects will be comparable persisted_change = metadata.last_remote_change.replace(tzinfo=utc) new_change = self.last_change_date.replace(tzinfo=utc) if persisted_change > new_change: # Nothing to do here return metadata else: update = True except ObjectDoesNotExist: # object does not seem to exist -> create it! metadata = Metadata() md_type = type metadata.metadata_type = md_type if metadata.is_dataset_metadata: metadata.dataset = Dataset() metadata.dataset.created_by = created_by metadata.created_by = created_by new = True if update or new: # In case of a dataset, we need to fill the information into the dataset object if metadata.is_dataset_metadata: metadata.dataset = self._fill_dataset_db_model( metadata.dataset) metadata = self._fill_metadata_db_model(metadata) metadata.save() metadata.dataset.save() orig_document = Document.objects.get_or_create( metadata=metadata, document_type=DocumentEnum.METADATA.value, is_original=True, )[0] orig_document.content = self.raw_metadata orig_document.save() if update: metadata.keywords.clear() for kw in self.keywords: keyword = Keyword.objects.get_or_create(keyword=kw)[0] metadata.keywords.add(keyword) return metadata
def _fill_form_list(form_list, metadata: Metadata, dataset: Dataset, user: MrMapUser): """ Iterates over all forms and applies the metadata changes on the objects Args: form_list: The list of forms metadata: The metadata record dataset: The dataset record user: The performing user Returns: """ function_map = { "DatasetIdentificationForm": DatasetWizard._fill_metadata_dataset_identification_form, "DatasetResponsiblePartyForm": DatasetWizard._fill_metadata_dataset_responsible_party_form, "DatasetClassificationForm": DatasetWizard._fill_metadata_dataset_classification_form, "DatasetSpatialExtentForm": DatasetWizard._fill_metadata_dataset_spatial_extent_form, "DatasetLicenseConstraintsForm": DatasetWizard._fill_metadata_dataset_licence_form, "DatasetQualityForm": DatasetWizard._fill_metadata_dataset_quality_form, } for form in form_list: form_class = type(form).__name__ function_map[form_class](form.cleaned_data, metadata, dataset, user) dataset.save() metadata.is_custom = True metadata.save() try: doc = Document.objects.get( metadata__id=metadata.id, document_type=DocumentEnum.METADATA.value, is_original=False, ) doc.is_active = metadata.is_active DatasetWizard._overwrite_dataset_document(metadata, doc) except ObjectDoesNotExist: DatasetWizard._create_dataset_document(metadata)
class NewDatasetWizard(PermissionRequiredMixin, DatasetWizard): permission_required = PermissionEnum.CAN_ADD_DATASET_METADATA.value raise_exception = True permission_denied_message = NO_PERMISSION def __init__(self, *args, **kwargs): super().__init__(action_url=reverse( 'editor:dataset-metadata-wizard-new', ), title=_(format_html('<b>Add New Dataset</b>')), *args, **kwargs) def get_form_kwargs(self, step=None): return {'request': self.request} def done(self, form_list, **kwargs): """ Iterates over all forms and fills the Metadata/Dataset records accordingly Args: form_list (FormList): An iterable list of forms kwargs: Returns: """ # Create instances self.metadata = Metadata() self.metadata.metadata_type = MetadataEnum.DATASET.value self.metadata.is_active = True self.dataset = Dataset() self.dataset.is_active = True self.dataset.md_identifier_code = self.metadata.identifier self.dataset.metadata_standard_name = "ISO 19115 Geographic information - Metadata" self.dataset.metadata_standard_version = "ISO 19115:2003(E)" # Pre-save objects to be able to add M2M relations self.metadata.save() self.metadata.identifier = self.metadata.id self.dataset.metadata = self.metadata self.dataset.save() self.metadata.metadata_url = reverse("resource:get-dataset-metadata", args=(self.dataset.id, )) return super().done(form_list=form_list, **kwargs)
def _fill_dataset_db_model(self, dataset: Dataset): """ Fills a Dataset db record from the ISOMetadata data Args: dataset (Dataset): The old/empty object Returns: dataset (Dataset): The dataset object """ dataset.language_code = self.language dataset.character_set_code = self.character_set_code or "utf8" dataset.hierarchy_level_code = self.hierarchy_level dataset.update_frequency_code = self.update_frequency dataset.update_frequency_code = self.update_frequency dataset.legal_restriction_code = None dataset.date_stamp = self.date_stamp dataset.metadata_standard_name = self.md_standard_name dataset.metadata_standard_version = self.md_standard_version dataset.reference_system_code = self.ref_system dataset.reference_system_version = self.ref_system_version dataset.reference_system_authority_title = self.ref_system_authority dataset.md_identifier_code = self.file_identifier dataset.distribution_function_code = self.distribution_function or "dataset" dataset.lineage_statement = self.lineage dataset.legal_restriction_other_constraints = self.access_constraints dataset.use_limitation = self.use_limitation return dataset
def _create_dataset_from_md_metadata(self, md_metadata: Element, metadata: Metadata) -> Dataset: """ Creates a Dataset record from xml data Args: md_metadata (Element): The xml element which holds the data metadata (Metadata): The related metadata element Returns: dataset (Dataset): The dataset record """ dataset = Dataset() dataset.language_code = metadata.language_code dataset.language_code_list_url = xml_helper.try_get_attribute_from_xml_element( md_metadata, "codeList", ".//" + GENERIC_NAMESPACE_TEMPLATE.format("language") + "/" + GENERIC_NAMESPACE_TEMPLATE.format("LanguageCode")) dataset.character_set_code = xml_helper.try_get_text_from_xml_element( md_metadata, ".//" + GENERIC_NAMESPACE_TEMPLATE.format("characterSet") + "/" + GENERIC_NAMESPACE_TEMPLATE.format("MD_CharacterSetCode")) dataset.character_set_code_list_url = xml_helper.try_get_attribute_from_xml_element( md_metadata, "codeList", ".//" + GENERIC_NAMESPACE_TEMPLATE.format("characterSet") + "/" + GENERIC_NAMESPACE_TEMPLATE.format("MD_CharacterSetCode")) dataset.date_stamp = xml_helper.try_get_text_from_xml_element( md_metadata, ".//" + GENERIC_NAMESPACE_TEMPLATE.format("dateStamp") + "/" + GENERIC_NAMESPACE_TEMPLATE.format("Date")) dataset.metadata_standard_name = xml_helper.try_get_text_from_xml_element( md_metadata, ".//" + GENERIC_NAMESPACE_TEMPLATE.format("metadataStandardName") + "/" + GENERIC_NAMESPACE_TEMPLATE.format("CharacterString")) dataset.metadata_standard_version = xml_helper.try_get_text_from_xml_element( md_metadata, ".//" + GENERIC_NAMESPACE_TEMPLATE.format("metadataStandardVersion") + "/" + GENERIC_NAMESPACE_TEMPLATE.format("CharacterString")) dataset.update_frequency_code = xml_helper.try_get_text_from_xml_element( md_metadata, ".//" + GENERIC_NAMESPACE_TEMPLATE.format("MD_MaintenanceFrequencyCode")) dataset.update_frequency_code_list_url = xml_helper.try_get_attribute_from_xml_element( md_metadata, "codeList", ".//" + GENERIC_NAMESPACE_TEMPLATE.format("MD_MaintenanceFrequencyCode")) dataset.use_limitation = xml_helper.try_get_text_from_xml_element( md_metadata, ".//" + GENERIC_NAMESPACE_TEMPLATE.format("useLimitation") + "/" + GENERIC_NAMESPACE_TEMPLATE.format("CharacterString")) dataset.lineage_statement = xml_helper.try_get_text_from_xml_element( md_metadata, ".//" + GENERIC_NAMESPACE_TEMPLATE.format("statement") + "/" + GENERIC_NAMESPACE_TEMPLATE.format("CharacterString")) return dataset