def test_standard_properties(self): metadata = AggregationLayerMetadata(unique_filename()) with self.assertRaises(KeyError): metadata.get_property('non_existing_key') # from BaseMetadata metadata.get_property('organisation')
def write_iso19115_metadata(layer_uri, keywords): """Create metadata object from a layer path and keywords dictionary. :param layer_uri: Uri to layer. :type layer_uri: str :param keywords: Dictionary of keywords. :type keywords: dict """ if 'layer_purpose' in keywords.keys(): if keywords['layer_purpose'] == 'exposure': metadata = ExposureLayerMetadata(layer_uri) elif keywords['layer_purpose'] == 'hazard': metadata = HazardLayerMetadata(layer_uri) elif keywords['layer_purpose'] == 'aggregation': metadata = AggregationLayerMetadata(layer_uri) elif keywords['layer_purpose'] == 'impact': metadata = ImpactLayerMetadata(layer_uri) else: metadata = GenericLayerMetadata(layer_uri) else: metadata = GenericLayerMetadata(layer_uri) metadata.update_from_dict(keywords) # Always set keyword_version to the latest one. metadata.update_from_dict({'keyword_version': inasafe_keyword_version}) if metadata.layer_is_file_based: xml_file_path = os.path.splitext(layer_uri)[0] + '.xml' metadata.write_to_file(xml_file_path) else: metadata.write_to_db() return metadata
def test_standard_properties(self): metadata = AggregationLayerMetadata(unique_filename()) with self.assertRaises(KeyError): metadata.get_property('non_existing_key') # from BaseMetadata metadata.get_property('organisation') # from AggregationLayerMetadata metadata.get_property('aggregation_attribute') metadata.get_property('adult_ratio_attribute') metadata.get_property('adult_ratio_default') metadata.get_property('elderly_ratio_attribute') metadata.get_property('elderly_ratio_default') metadata.get_property('female_ratio_attribute') metadata.get_property('female_ratio_default') metadata.get_property('youth_ratio_attribute') metadata.get_property('youth_ratio_default')
def read_iso19115_metadata(layer_uri, keyword=None): """Retrieve keywords from a metadata object :param layer_uri: :param keyword: :return: """ xml_uri = os.path.splitext(layer_uri)[0] + '.xml' if not os.path.exists(xml_uri): xml_uri = None if not xml_uri and os.path.exists(layer_uri): message = 'Layer based file but no xml file.\n' message += 'Layer path: %s.' % layer_uri raise NoKeywordsFoundError(message) metadata = GenericLayerMetadata(layer_uri, xml_uri) if metadata.layer_purpose == 'exposure': metadata = ExposureLayerMetadata(layer_uri, xml_uri) elif metadata.layer_purpose == 'hazard': metadata = HazardLayerMetadata(layer_uri, xml_uri) elif metadata.layer_purpose == 'aggregation': metadata = AggregationLayerMetadata(layer_uri, xml_uri) elif metadata.layer_purpose == 'impact': metadata = ImpactLayerMetadata(layer_uri, xml_uri) # dictionary comprehension keywords = { x[0]: x[1]['value'] for x in metadata.dict['properties'].iteritems() if x[1]['value'] is not None } if 'keyword_version' not in keywords.keys() and xml_uri: message = 'No keyword version found. Metadata xml file is invalid.\n' message += 'Layer uri: %s\n' % layer_uri message += 'Keywords file: %s\n' % os.path.exists( os.path.splitext(layer_uri)[0] + '.xml') message += 'keywords:\n' for k, v in keywords.iteritems(): message += '%s: %s\n' % (k, v) raise MetadataReadError(message) keywords = {} temp_keywords = { x[0]: x[1]['value'] for x in metadata.dict['properties'].iteritems() } included = [ 'aggregation attribute', 'female ratio attribute', 'youth ratio attribute', 'adult ratio attribute', 'elderly ratio attribute', ] for key in temp_keywords.iterkeys(): if key in included: keywords[key] = temp_keywords[key] else: if temp_keywords[key] is not None: keywords[key] = temp_keywords[key] if keyword: try: return keywords[keyword] except KeyError: message = 'Keyword with key %s is not found' % keyword message += 'Layer path: %s' % layer_uri raise KeywordNotFoundError(message) if isinstance(metadata, ImpactLayerMetadata): keywords['if_provenance'] = metadata.provenance return keywords