def test_update_from_dict(self): """Test update_from_dict method.""" good_data = { 'start_time': '20140714_060955', 'finish_time': '20140714_061255', 'hazard_layer': 'path/to/hazard/layer', 'exposure_layer': 'path/to/exposure/layer', 'impact_function_id': 'IF_id', 'impact_function_version': '2.1', 'host_name': 'my_computer', 'user': '******', 'qgis_version': '2.4', 'gdal_version': '1.9.1', 'qt_version': '4.5', 'pyqt_version': '5.1', 'os': 'ubuntu 12.04', 'inasafe_version': '2.1', 'exposure_pixel_size': '0.1', 'hazard_pixel_size': '0.2', 'impact_pixel_size': '0.1', 'actual_extent': [0, 1, 2, 2], 'requested_extent': [0, 1, 2, 2], 'actual_extent_crs': 'EPSG: 4326', 'requested_extent_crs': 'EPSG: 4326', 'parameter': {}, } metadata = ExposureSummaryLayerMetadata('random_layer_id') provenance = Provenance() provenance.append_step( 'Title 1', 'Description of step 1', '2015-06-25T13:14:24.508974') provenance.append_step( 'Title 2', 'Description of step 2', '2015-06-25T13:14:24.508980') provenance.append_if_provenance_step( 'Title 3', 'Description of step 3', '2015-06-25T13:14:24.508984', data=good_data ) keywords = { 'layer_purpose': 'impact_layer', 'layer_geometry': 'raster', 'if_provenance': provenance, } metadata.update_from_dict(keywords) self.assertEqual(metadata.layer_purpose, 'impact_layer') self.assertEqual(metadata.layer_geometry, 'raster') self.assertNotEqual(metadata.layer_mode, 'raster') self.assertEqual(len(metadata.provenance.steps), 3) self.assertEqual(metadata.provenance.get(2), provenance.get(2)) self.assertEqual(metadata.provenance.get(2).user, 'my_user')
def test_append_step(self): provenance = Provenance() title0 = 'Calculated first random impact' description0 = 'In this step we calculated a first random impact' provenance.append_step(title0, description0) title1 = 'Calculated second random impact' description1 = 'In this step we calculated a second random impact' provenance.append_step(title1, description1) title2 = 'Calculated third random impact' description2 = 'In this step we calculated a third random impact' provenance.append_step(title2, description2) self.assertEqual(provenance.count, 3) self.assertEqual(provenance.get(1).title, title1) self.assertEqual(provenance.last.title, title2)
def test_update_from_dict(self): """Test update_from_dict method.""" good_data = { 'start_time': '20140714_060955', 'finish_time': '20140714_061255', 'hazard_layer': 'path/to/hazard/layer', 'exposure_layer': 'path/to/exposure/layer', 'impact_function_id': 'IF_id', 'impact_function_version': '2.1', 'host_name': 'my_computer', 'user': '******', 'qgis_version': '2.4', 'gdal_version': '1.9.1', 'qt_version': '4.5', 'pyqt_version': '5.1', 'os': 'ubuntu 12.04', 'inasafe_version': '2.1', 'exposure_pixel_size': '0.1', 'hazard_pixel_size': '0.2', 'impact_pixel_size': '0.1', 'actual_extent': [0, 1, 2, 2], 'requested_extent': [0, 1, 2, 2], 'actual_extent_crs': 'EPSG: 4326', 'requested_extent_crs': 'EPSG: 4326', 'parameter': {}, } metadata = OutputLayerMetadata('random_layer_id') provenance = Provenance() provenance.append_step( 'Title 1', 'Description of step 1', '2015-06-25T13:14:24.508974') provenance.append_step( 'Title 2', 'Description of step 2', '2015-06-25T13:14:24.508980') provenance.append_if_provenance_step( 'Title 3', 'Description of step 3', '2015-06-25T13:14:24.508984', data=good_data ) keywords = { 'layer_purpose': 'impact_layer', 'layer_geometry': 'raster', 'if_provenance': provenance, } metadata.update_from_dict(keywords) self.assertEqual(metadata.layer_purpose, 'impact_layer') self.assertEqual(metadata.layer_geometry, 'raster') self.assertNotEqual(metadata.layer_mode, 'raster') self.assertEqual(len(metadata.provenance.steps), 3) self.assertEqual(metadata.provenance.get(2), provenance.get(2)) self.assertEqual(metadata.provenance.get(2).user, 'my_user')
def __init__(self, layer_uri, xml_uri=None, json_uri=None): """ Constructor :param layer_uri: uri of the layer for which the metadata ae :type layer_uri: str :param xml_uri: uri of an xml file to use :type xml_uri: str :param json_uri: uri of a json file to use :type json_uri: str """ # Initialise members # private members self._provenance = Provenance() # public members self.summary_data = None # initialize base class super(ImpactLayerMetadata, self).__init__(layer_uri, xml_uri, json_uri)
def test_append_step(self): provenance = Provenance() title0 = 'Calculated first random impact' description0 = 'In this step we calculated a first random impact' provenance.append_step(title0, description0) title1 = 'Calculated second random impact' description1 = 'In this step we calculated a second random impact' provenance.append_step(title1, description1) title2 = 'Calculated third random impact' description2 = 'In this step we calculated a third random impact' provenance.append_step(title2, description2) title3 = 'Calculated fourth random impact' description3 = 'In this step we calculated a fourth random impact' data = { 'start_time': '20140714_060955', 'finish_time': '20140714_061255', 'hazard_layer': 'path/to/hazard/layer', 'exposure_layer': 'path/to/exposure/layer', 'impact_function_id': 'IF_id', 'impact_function_version': '2.1', 'host_name': 'my_computer', 'user': '******', 'qgis_version': '2.4', 'gdal_version': '1.9.1', 'qt_version': '4.5', 'pyqt_version': '5.1', 'os': 'ubuntu 12.04', 'inasafe_version': '2.1', 'exposure_pixel_size': '0.1', 'hazard_pixel_size': '0.2', 'impact_pixel_size': '0.1', 'analysis_extent': [0, 1, 2, 2], 'parameter': {} } provenance.append_step(title3, description3, data=data) self.assertEqual(provenance.count, 4) self.assertEqual(provenance.get(1).title, title1) self.assertEqual(provenance.last.title, title3) self.assertEqual(provenance.last.data(), data)
def __init__(self): """Base class constructor. All derived classes should normally call this constructor e.g.:: def __init__(self): super(FloodImpactFunction, self).__init__() """ # User who runs this self._user = getpass.getuser().replace(' ', '_') # The host that runs this self._host_name = gethostname() # Requested extent to use self._requested_extent = None # Requested extent's CRS self._requested_extent_crs = QgsCoordinateReferenceSystem('EPSG:4326') # The current viewport extent of the map canvas self._viewport_extent = None # Actual extent to use - Read Only # For 'old-style' IF we do some manipulation to the requested extent self._actual_extent = None # Actual extent's CRS - Read Only self._actual_extent_crs = QgsCoordinateReferenceSystem('EPSG:4326') # set this to a gui call back / web callback etc as needed. self._callback = self.console_progress_callback # Set the default parameters self._parameters = self._metadata.parameters() # Layer representing hazard e.g. flood self._hazard = None # Layer representing people / infrastructure that are exposed self._exposure = None # Layer used for aggregating results by area / district self._aggregation = None # The best extents to use for the assessment self._clip_parameters = None # Clip features that extend beyond the extents. self._clip_hard = False # Show intermediate layers. self._show_intermediate_layers = False # Force memory. self._force_memory = False # Layer produced by the impact function self._impact = None # The question of the impact function self._question = None # Post analysis Result dictionary (suitable to conversion to json etc.) self._tabulated_impact = None # Style information for the impact layer - at some point we should # formalise this into a more natural model # ABC's will normally set this property. self._impact_style = None # The target field for vector impact layer self._target_field = 'safe_ag' # The string to mark not affected value in the vector impact layer self._not_affected_value = 'Not Affected' # Store provenances self._provenances = Provenance() # Start time self._start_time = None self.provenance.append_step('Initialize Impact Function', 'Impact function is being initialized')
class ImpactLayerMetadata(BaseMetadata): """ Metadata class for impact layers if you need to add a standard XML property that only applies to this subclass, do it this way. @property and @propname.setter will be generated automatically _standard_properties = { 'TESTprop': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'gco:CharacterString') } from safe.metadata.utils import merge_dictionaries _standard_properties = merge_dictionaries( BaseMetadata._standard_properties, _standard_properties) .. versionadded:: 3.2 """ # remember to add an attribute or a setter property with the same name # these are properties that need special getters and setters thus are # not put in the standard_properties _standard_properties = { 'elapsed_time': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'elapsed_time/' 'gco:Integer'), 'hazard_title': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'hazard_title/' 'gco:CharacterString'), 'postprocessing_report': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'postprocessing_report/' 'gco:CharacterString'), 'exposure_title': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'exposure_title/' 'gco:CharacterString'), 'legend_title': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'legend_title/' 'gco:CharacterString'), 'legend_notes': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'legend_notes/' 'gco:CharacterString'), 'exposure_source': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'exposure_source/' 'gco:CharacterString'), 'map_title': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'map_title/' 'gco:CharacterString'), 'legend_units': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'legend_units/' 'gco:CharacterString'), 'impact_summary': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'impact_summary/' 'gco:CharacterString'), 'user': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'user/' 'gco:CharacterString'), 'host_name': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'host_name/' 'gco:CharacterString'), 'time_stamp': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'time_stamp/' 'gco:CharacterString'), 'hazard_source': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'hazard_source/' 'gco:CharacterString'), 'target_field': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'target_field/' 'gco:CharacterString'), 'impact_table': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'impact_table/' 'gco:CharacterString'), 'statistics_classes': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'statistics_classes/' 'gco:List'), 'statistics_type': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'statistics_type/' 'gco:CharacterString'), } _standard_properties = merge_dictionaries( BaseMetadata._standard_properties, _standard_properties) _special_properties = { 'provenance': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe_provenance') } def __init__(self, layer_uri, xml_uri=None, json_uri=None): """ Constructor :param layer_uri: uri of the layer for which the metadata ae :type layer_uri: str :param xml_uri: uri of an xml file to use :type xml_uri: str :param json_uri: uri of a json file to use :type json_uri: str """ # Initialise members # private members self._provenance = Provenance() # public members self.summary_data = None # initialize base class super(ImpactLayerMetadata, self).__init__(layer_uri, xml_uri, json_uri) @property def dict(self): """ calls the overridden method and adds provenance and summary data :return: dictionary representation of the metadata :rtype: dict """ metadata = super(ImpactLayerMetadata, self).dict metadata['provenance'] = self.provenance metadata['summary_data'] = self.summary_data return metadata @property def json(self): """ json representation of the metadata :return: json representation of the metadata :rtype: str """ metadata = self.dict metadata['provenance'] = self.provenance.dict json_dumps = json.dumps( metadata, indent=2, sort_keys=True, separators=(',', ': '), cls=MetadataEncoder) if not json_dumps.endswith('\n'): json_dumps += '\n' return json_dumps def read_json(self): """ read metadata from json and set all the found properties. :return: the read metadata :rtype: dict """ with reading_ancillary_files(self): metadata = super(ImpactLayerMetadata, self).read_json() if 'provenance' in metadata: for provenance_step in metadata['provenance']: try: title = provenance_step['title'] if 'IF Provenance' in title: self.append_if_provenance_step( provenance_step['title'], provenance_step['description'], provenance_step['time'], provenance_step['data'] ) else: self.append_provenance_step( provenance_step['title'], provenance_step['description'], provenance_step['time'], ) except KeyError: # we want to get as much as we can without raising # errors pass if 'summary_data' in metadata: self.summary_data = metadata['summary_data'] return metadata @property def xml(self): """ xml representation of the metadata. :return: xml representation of the metadata :rtype: ElementTree.Element """ root = super(ImpactLayerMetadata, self).xml provenance_path = self._special_properties['provenance'] provenance_element = root.find(provenance_path, XML_NS) # find the provenance parent tag if provenance_element is not None: # there is already a provenance tag so we remove it provenance_parent = provenance_element.getparent() provenance_parent.remove(provenance_element) else: # find the parent using the provenance path minus one level provenance_parent = '/'.join(provenance_path.split('/')[:-1]) provenance_parent = root.find(provenance_parent, XML_NS) # generate the provenance xml element provenance_element = ElementTree.fromstring(self.provenance.xml) provenance_parent.append(provenance_element) return prettify_xml(ElementTree.tostring(root)) def read_xml(self): """ read metadata from xml and set all the found properties. :return: the root element of the xml :rtype: ElementTree.Element """ with reading_ancillary_files(self): root = super(ImpactLayerMetadata, self).read_xml() if root is not None: self._read_provenance_from_xml(root) return root def _read_provenance_from_xml(self, root): """ read metadata provenance from xml. :param root: container in which we search :type root: ElementTree.Element """ path = self._special_properties['provenance'] provenance = root.find(path, XML_NS) for step in provenance.iter('provenance_step'): title = step.find('title').text description = step.find('description').text timestamp = step.get('timestamp') if 'IF Provenance' in title: data = {} from safe.metadata.provenance import IFProvenanceStep keys = IFProvenanceStep.impact_functions_fields for key in keys: value = step.find(key) if value is not None: data[key] = value.text else: data[key] = '' self.append_if_provenance_step( title, description, timestamp, data) else: self.append_provenance_step(title, description, timestamp) @property def provenance(self): """ Get the provenance elements of the metadata there is no setter as provenance can only grow. use append_provenance_step to add steps :return: The provenance element :rtype: Provenance """ return self._provenance def append_provenance_step(self, title, description, timestamp=None): """ Add a step to the provenance of the metadata :param title: The title of the step. :type title: str :param description: The content of the step :type description: str :param timestamp: the time of the step :type timestamp: datetime, str """ step_time = self._provenance.append_step(title, description, timestamp) if step_time > self.last_update: self.last_update = step_time def append_if_provenance_step( self, title, description, timestamp=None, data=None): """Add a if provenance step to the provenance of the metadata :param title: The title of the step. :type title: str :param description: The content of the step :type description: str :param timestamp: the time of the step :type timestamp: datetime, str :param data: The data of the step. :type data: dict """ step_time = self._provenance.append_if_provenance_step( title, description, timestamp, data) if step_time > self.last_update: self.last_update = step_time def update_from_dict(self, keywords): """Update metadata value from a keywords dictionary. :param keywords: :return: """ super(ImpactLayerMetadata, self).update_from_dict(keywords) if 'if_provenance' in keywords.keys(): if_provenance = keywords['if_provenance'] for provenance_step in if_provenance: self.provenance.append_provenance_step(provenance_step)
class ImpactLayerMetadata(BaseMetadata): """ Metadata class for impact layers if you need to add a standard XML property that only applies to this subclass, do it this way. @property and @propname.setter will be generated automatically _standard_properties = { 'TESTprop': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'gco:CharacterString') } from safe.metadata.utils import merge_dictionaries _standard_properties = merge_dictionaries( BaseMetadata._standard_properties, _standard_properties) .. versionadded:: 3.2 """ # remember to add an attribute or a setter property with the same name # these are properties that need special getters and setters thus are # not put in the standard_properties _standard_properties = { 'elapsed_time': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'elapsed_time/' 'gco:Integer'), 'hazard_title': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'hazard_title/' 'gco:CharacterString'), 'postprocessing_report': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'postprocessing_report/' 'gco:CharacterString'), 'exposure_title': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'exposure_title/' 'gco:CharacterString'), 'legend_title': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'legend_title/' 'gco:CharacterString'), 'legend_notes': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'legend_notes/' 'gco:CharacterString'), 'exposure_source': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'exposure_source/' 'gco:CharacterString'), 'map_title': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'map_title/' 'gco:CharacterString'), 'legend_units': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'legend_units/' 'gco:CharacterString'), 'impact_summary': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'impact_summary/' 'gco:CharacterString'), 'user': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'user/' 'gco:CharacterString'), 'host_name': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'host_name/' 'gco:CharacterString'), 'time_stamp': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'time_stamp/' 'gco:CharacterString'), 'hazard_source': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'hazard_source/' 'gco:CharacterString'), 'target_field': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'target_field/' 'gco:CharacterString'), 'impact_table': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'impact_table/' 'gco:CharacterString'), } _standard_properties = merge_dictionaries( BaseMetadata._standard_properties, _standard_properties) _special_properties = { 'provenance': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe_provenance') } def __init__(self, layer_uri, xml_uri=None, json_uri=None): """ Constructor :param layer_uri: uri of the layer for which the metadata ae :type layer_uri: str :param xml_uri: uri of an xml file to use :type xml_uri: str :param json_uri: uri of a json file to use :type json_uri: str """ # Initialise members # private members self._provenance = Provenance() # public members self.summary_data = None # initialize base class super(ImpactLayerMetadata, self).__init__(layer_uri, xml_uri, json_uri) @property def dict(self): """ calls the overridden method and adds provenance and summary data :return: dictionary representation of the metadata :rtype: dict """ metadata = super(ImpactLayerMetadata, self).dict metadata['provenance'] = self.provenance metadata['summary_data'] = self.summary_data return metadata @property def json(self): """ json representation of the metadata :return: json representation of the metadata :rtype: str """ metadata = self.dict metadata['provenance'] = self.provenance.dict json_dumps = json.dumps( metadata, indent=2, sort_keys=True, separators=(',', ': '), cls=MetadataEncoder) if not json_dumps.endswith('\n'): json_dumps += '\n' return json_dumps def read_json(self): """ read metadata from json and set all the found properties. :return: the read metadata :rtype: dict """ with reading_ancillary_files(self): metadata = super(ImpactLayerMetadata, self).read_json() if 'provenance' in metadata: for provenance_step in metadata['provenance']: try: title = provenance_step['title'] if 'IF Provenance' in title: self.append_if_provenance_step( provenance_step['title'], provenance_step['description'], provenance_step['time'], provenance_step['data'] ) else: self.append_provenance_step( provenance_step['title'], provenance_step['description'], provenance_step['time'], ) except KeyError: # we want to get as much as we can without raising # errors pass if 'summary_data' in metadata: self.summary_data = metadata['summary_data'] return metadata @property def xml(self): """ xml representation of the metadata. :return: xml representation of the metadata :rtype: ElementTree.Element """ root = super(ImpactLayerMetadata, self).xml provenance_path = self._special_properties['provenance'] provenance_element = root.find(provenance_path, XML_NS) # find the provenance parent tag if provenance_element is not None: # there is already a provenance tag so we remove it provenance_parent = provenance_element.getparent() provenance_parent.remove(provenance_element) else: # find the parent using the provenance path minus one level provenance_parent = '/'.join(provenance_path.split('/')[:-1]) provenance_parent = root.find(provenance_parent, XML_NS) # generate the provenance xml element provenance_element = ElementTree.fromstring(self.provenance.xml) provenance_parent.append(provenance_element) return prettify_xml(ElementTree.tostring(root)) def read_xml(self): """ read metadata from xml and set all the found properties. :return: the root element of the xml :rtype: ElementTree.Element """ with reading_ancillary_files(self): root = super(ImpactLayerMetadata, self).read_xml() if root is not None: self._read_provenance_from_xml(root) return root def _read_provenance_from_xml(self, root): """ read metadata provenance from xml. :param root: container in which we search :type root: ElementTree.Element """ path = self._special_properties['provenance'] provenance = root.find(path, XML_NS) for step in provenance.iter('provenance_step'): title = step.find('title').text description = step.find('description').text timestamp = step.get('timestamp') if 'IF Provenance' in title: data = {} from safe.metadata.provenance import IFProvenanceStep keys = IFProvenanceStep.impact_functions_fields for key in keys: value = step.find(key) if value is not None: data[key] = value.text else: data[key] = '' self.append_if_provenance_step( title, description, timestamp, data) else: self.append_provenance_step(title, description, timestamp) @property def provenance(self): """ Get the provenance elements of the metadata there is no setter as provenance can only grow. use append_provenance_step to add steps :return: The provenance element :rtype: Provenance """ return self._provenance def append_provenance_step(self, title, description, timestamp=None): """ Add a step to the provenance of the metadata :param title: The title of the step. :type title: str :param description: The content of the step :type description: str :param timestamp: the time of the step :type timestamp: datetime, str """ step_time = self._provenance.append_step(title, description, timestamp) if step_time > self.last_update: self.last_update = step_time def append_if_provenance_step( self, title, description, timestamp=None, data=None): """Add a if provenance step to the provenance of the metadata :param title: The title of the step. :type title: str :param description: The content of the step :type description: str :param timestamp: the time of the step :type timestamp: datetime, str :param data: The data of the step. :type data: dict """ step_time = self._provenance.append_if_provenance_step( title, description, timestamp, data) if step_time > self.last_update: self.last_update = step_time def update_from_dict(self, keywords): """Update metadata value from a keywords dictionary. :param keywords: :return: """ super(ImpactLayerMetadata, self).update_from_dict(keywords) if 'if_provenance' in keywords.keys(): if_provenance = keywords['if_provenance'] for provenance_step in if_provenance: self.provenance.append_provenance_step(provenance_step)
class ImpactLayerMetadata(BaseMetadata): """ Metadata class for impact layers if you need to add a standard XML property that only applies to this subclass, do it this way. @property and @propname.setter will be generated automatically _standard_properties = { 'TESTprop': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'gco:CharacterString') } from safe.metadata.utils import merge_dictionaries _standard_properties = merge_dictionaries( BaseMetadata._standard_properties, _standard_properties) .. versionadded:: 3.2 """ # remember to add an attribute or a setter property with the same name # these are properties that need special getters and setters thus are # not put in the standard_properties _special_properties = { 'provenance': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe_provenance') } def __init__(self, layer_uri, xml_uri=None, json_uri=None): """ Constructor :param layer_uri: uri of the layer for which the metadata ae :type layer_uri: str :param xml_uri: uri of an xml file to use :type xml_uri: str :param json_uri: uri of a json file to use :type json_uri: str """ # Initialise members # private members self._provenance = Provenance() # public members self.summary_data = None # initialize base class super(ImpactLayerMetadata, self).__init__(layer_uri, xml_uri, json_uri) @property def dict(self): """ calls the overridden method and adds provenance and summary data :return: dictionary representation of the metadata :rtype: dict """ metadata = super(ImpactLayerMetadata, self).dict metadata['provenance'] = self.provenance metadata['summary_data'] = self.summary_data return metadata @property def json(self): """ json representation of the metadata :return: json representation of the metadata :rtype: str """ metadata = self.dict metadata['provenance'] = self.provenance.dict return json.dumps(metadata, indent=2, sort_keys=True) def read_json(self): """ read metadata from json and set all the found properties. :return: the read metadata :rtype: dict """ with reading_ancillary_files(self): metadata = super(ImpactLayerMetadata, self).read_json() if 'provenance' in metadata: for provenance_step in metadata['provenance']: try: self.append_provenance_step( provenance_step['title'], provenance_step['description'], provenance_step['time'], ) except KeyError: # we want to get as much as we can without raising # errors pass if 'summary_data' in metadata: self.summary_data = metadata['summary_data'] return metadata @property def xml(self): """ xml representation of the metadata. :return: xml representation of the metadata :rtype: ElementTree.Element """ root = super(ImpactLayerMetadata, self).xml provenance_path = self._special_properties['provenance'] provenance_element = root.find(provenance_path, XML_NS) # find the provenance parent tag if provenance_element is not None: # there is already a provenance tag so we remove it provenance_parent = provenance_element.getparent() provenance_parent.remove(provenance_element) else: # find the parent using the provenance path minus one level provenance_parent = '/'.join(provenance_path.split('/')[:-1]) provenance_parent = root.find(provenance_parent, XML_NS) # generate the provenance xml element provenance_element = ElementTree.fromstring(self.provenance.xml) provenance_parent.append(provenance_element) return prettify_xml(ElementTree.tostring(root)) def read_xml(self): """ read metadata from xml and set all the found properties. :return: the root element of the xml :rtype: ElementTree.Element """ with reading_ancillary_files(self): root = super(ImpactLayerMetadata, self).read_xml() if root is not None: self._read_provenance_from_xml(root) return root def _read_provenance_from_xml(self, root): """ read metadata provenance from xml. :param root: container in which we search :type root: ElementTree.Element """ path = self._special_properties['provenance'] provenance = root.find(path, XML_NS) for step in provenance.iter('provenance_step'): title = step.find('title').text description = step.find('description').text timestamp = step.get('timestamp') self.append_provenance_step(title, description, timestamp) @property def provenance(self): """ Get the provenance elements of the metadata there is no setter as provenance can only grow. use append_provenance_step to add steps :return: The provenance element :rtype: Provenance """ return self._provenance def append_provenance_step(self, title, description, timestamp=None): """ Add a step to the provenance of the metadata :param title: the title of the step :type title: str :param description: the content of the step :type description: str :param timestamp: the time of the step :type timestamp: datetime """ step_time = self._provenance.append_step(title, description, timestamp) if step_time > self.last_update: self.last_update = step_time
class ImpactLayerMetadata(BaseMetadata): """ Metadata class for impact layers if you need to add a standard XML property that only applies to this subclass, do it this way. @property and @propname.setter will be generated automatically _standard_properties = { 'TESTprop': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'gco:CharacterString') } from safe.metadata.utils import merge_dictionaries _standard_properties = merge_dictionaries( BaseMetadata._standard_properties, _standard_properties) .. versionadded:: 3.2 """ # remember to add an attribute or a setter property with the same name # these are properties that need special getters and setters thus are # not put in the standard_properties _special_properties = { 'provenance': ('gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe_provenance') } def __init__(self, layer_uri, xml_uri=None, json_uri=None): """ Constructor :param layer_uri: uri of the layer for which the metadata ae :type layer_uri: str :param xml_uri: uri of an xml file to use :type xml_uri: str :param json_uri: uri of a json file to use :type json_uri: str """ # Initialise members # private members self._provenance = Provenance() # public members self.summary_data = None # initialize base class super(ImpactLayerMetadata, self).__init__(layer_uri, xml_uri, json_uri) @property def dict(self): """ calls the overridden method and adds provenance and summary data :return: dictionary representation of the metadata :rtype: dict """ metadata = super(ImpactLayerMetadata, self).dict metadata['provenance'] = self.provenance metadata['summary_data'] = self.summary_data return metadata @property def json(self): """ json representation of the metadata :return: json representation of the metadata :rtype: str """ metadata = self.dict metadata['provenance'] = self.provenance.dict return json.dumps(metadata, indent=2, sort_keys=True) def read_json(self): """ read metadata from json and set all the found properties. :return: the read metadata :rtype: dict """ with reading_ancillary_files(self): metadata = super(ImpactLayerMetadata, self).read_json() if 'provenance' in metadata: for provenance_step in metadata['provenance']: try: self.append_provenance_step( provenance_step['title'], provenance_step['description'], provenance_step['time'], ) except KeyError: # we want to get as much as we can without raising # errors pass if 'summary_data' in metadata: self.summary_data = metadata['summary_data'] return metadata @property def xml(self): """ xml representation of the metadata. :return: xml representation of the metadata :rtype: ElementTree.Element """ root = super(ImpactLayerMetadata, self).xml provenance_path = self._special_properties['provenance'] provenance_element = root.find(provenance_path, XML_NS) # find the provenance parent tag if provenance_element is not None: # there is already a provenance tag so we remove it provenance_parent = provenance_element.getparent() provenance_parent.remove(provenance_element) else: # find the parent using the provenance path minus one level provenance_parent = '/'.join(provenance_path.split('/')[:-1]) provenance_parent = root.find(provenance_parent, XML_NS) # generate the provenance xml element provenance_element = ElementTree.fromstring(self.provenance.xml) provenance_parent.append(provenance_element) return prettify_xml(ElementTree.tostring(root)) def read_xml(self): """ read metadata from xml and set all the found properties. :return: the root element of the xml :rtype: ElementTree.Element """ with reading_ancillary_files(self): root = super(ImpactLayerMetadata, self).read_xml() if root is not None: self._read_provenance_from_xml(root) return root def _read_provenance_from_xml(self, root): """ read metadata provenance from xml. :param root: container in which we search :type root: ElementTree.Element """ path = self._special_properties['provenance'] provenance = root.find(path, XML_NS) for step in provenance.iter('provenance_step'): title = step.find('title').text description = step.find('description').text timestamp = step.get('timestamp') self.append_provenance_step(title, description, timestamp) @property def provenance(self): """ Get the provenance elements of the metadata there is no setter as provenance can only grow. use append_provenance_step to add steps :return: The provenance element :rtype: Provenance """ return self._provenance def append_provenance_step(self, title, description, timestamp=None): """ Add a step to the provenance of the metadata :param title: the title of the step :type title: str :param description: the content of the step :type description: str :param timestamp: the time of the step :type timestamp: datetime """ step_time = self._provenance.append_step(title, description, timestamp) if step_time > self.last_update: self.last_update = step_time