def remove_attrib(self, attribute): try: del self.attributes[attribute] except KeyError: logwrite("Attribute %s does not exist" % attribute) return self.get_attrib()
def __init__(self, feature_class=None, feature_layer=None, metadata_file=None, items=list(), temp_folder=metadata_temp_folder): self.items = items self.metadata_file = metadata_file self.elements = xml.etree.ElementTree.ElementTree() self.feature_class = feature_class self.feature_layer = feature_layer self.temp_folder = temp_folder self.created_temp_folder = False if self.feature_class and self.feature_layer: raise ValueError( "MetadataEditor can only use either feature_class or feature_layer - do not provide both" ) if not self.temp_folder: self.temp_folder = tempfile.mkdtemp("arcpy_metadata") self.created_temp_folder = True if self.feature_layer: # if we are using a feature layer, we'll turn it into an in_memory feature class for the remainder logwrite("Copying layer to a feature class") self.feature_class = arcpy.CreateScratchName( "pisces_metadata_temp", "", "", arcpy.env.workspace) arcpy.CopyFeatures_management( self.feature_layer, self.feature_class) # copy the features over if self.feature_class: # for both, we want to export the metadata out # export the metadata to the temporary location metadata_filename = arcpy.CreateScratchName( "pisces", "metadata", "xml", self.temp_folder) self.metadata_file = os.path.join(self.temp_folder, metadata_filename) logwrite("Exporting metadata to temporary file %s" % self.metadata_file) arcpy.ExportMetadata_conversion(self.feature_class, translation_file, self.metadata_file) self.elements.parse(self.metadata_file) # create these all after the parsing happens so that if they have any self initialization, they can correctly perform it self.abstract = MetadataAbstract(parent=self) self.purpose = MetadataPurpose(parent=self) self.tags = MetadataTags(parent=self) self.title = MetadataTitle(parent=self) self.items.extend([self.abstract, self.purpose, self.tags, self.title]) if items: self.initialize_items()
def save(self): logwrite("Saving metadata", True) for item in self.items: item._write() self.elements.write(self.metadata_file) # overwrites itself if self._workspace_type != 'FileSystem': arcpy.ImportMetadata_conversion(self.metadata_file, "FROM_ARCGIS", self.dataset, Enable_automatic_updates=True)
def cleanup(self, delete_created_fc=False): try: logwrite("cleaning up from metadata operation") if self._workspace_type != 'FileSystem': if os.path.exists(self.metadata_file): os.remove(self.metadata_file) xsl_extras = self.metadata_file + "_xslttransfor.log" if os.path.exists(xsl_extras): os.remove(xsl_extras) except: logwarning("Unable to remove temporary metadata files")
def save(self, to_feature_class=True): logwrite("Saving metadata", True) for item in self.items: item._write() self.elements.write(self.metadata_file) # overwrites itself if to_feature_class and self.feature_class: # if we want to save it out to the feature class and feature class is defined arcpy.ImportMetadata_conversion(self.metadata_file, "FROM_FGDC", self.feature_class, Enable_automatic_updates=False) # Leave Enable_automatic_updates as False because it will undo some of what we set and we just exported off of this dataset anyway! if self.feature_layer: # if we started with a feature layer, we need to recreate it with our new metadata arcpy.Delete_management(self.feature_layer) # delete the existing feature layer arcpy.MakeFeatureLayer_management(self.feature_class, self.feature_layer) # and turn our temporary feature class into the feature layer
def save(self, Enable_automatic_updates=False): logwrite("Saving metadata", True) for item in self.items: try: print(item.value) except: print(item) self.elements.write(self.metadata_file) # overwrites itself if self._workspace_type != 'FileSystem': arcpy.ImportMetadata_conversion(self.metadata_file, "FROM_ARCGIS", self.dataset, Enable_automatic_updates=Enable_automatic_updates)
def cleanup(self, delete_created_fc=False): try: logwrite("cleaninup up from metadata operation") if os.path.exists(self.metadata_file): os.remove(self.metadata_file) xsl_extras = self.metadata_file + "_xslttransfor.log" if os.path.exists(xsl_extras): os.remove(xsl_extras) if self.feature_layer and delete_created_fc: # for people who passed in a feature layer, do we want to delete the exported feature class? arcpy.Delete_management(self.feature_class) if self.created_temp_folder: os.remove(self.temp_folder) except: logwarning("Unable to remove temporary metadata files")
def save(self, Enable_automatic_updates=False): logwrite("Saving metadata", True) for item in self.items: try: print(item.value) except: print(item) #for item in self.items: # item._write() self.elements.write(self.metadata_file) # overwrites itself if self._workspace_type != 'FileSystem': arcpy.ImportMetadata_conversion(self.metadata_file, "FROM_ARCGIS", self.dataset, Enable_automatic_updates=Enable_automatic_updates)
def user_entry_data(self, *function): """ user_entry_data collects and processes user data for log entry data is collected and stored is passed by user_entry_data() employee name,date, duration, project_name, and optional_notes variables are used toconstruct named tupple: instance variable useri. which is appended to instance variabledatalist afterall user data for entry creation is gathered returnvalue of method is instance variable datalist which holds ordereddictdatalist is consumed by logwrite and is written to the database. """ # instance variables: store and pass data to clear() datalist = [] useri = collections.namedtuple('useri', [ 'employee_name', 'date', 'project_name', 'duration', 'optional_notes' ]) user_data = useri(*function) # user_data appended to list datalist datalist.append(user_data._asdict()) print(datalist) # option create another entry or retunt main menu # if another entry continue statement is run and user # will be prompted for data again # if main menu # try to instantiate a worklog objec # pass instance vaiable to WorkLog.logwrite try: logwrite(datalist) # confirmation of entry creation clear() print('\n Thankyou your entry has been created') '\n' # Exception if there is a issue with entry creation except ValueError: print('I am sorry but the entry was not saved')
def save(self, overwrite=True, Enable_automatic_updates=False): logwrite("Saving metadata", True) for item in self.items: try: print(item.value) except: print(item) #for item in self.items: # item._write() self.elements.write(self.metadata_file) # overwrites itself out_xml_path = os.path.join(os.path.dirname(self.metadata_file), str(uuid.uuid4()) + '.xml') shutil.copy(self.metadata_file, out_xml_path) if self._workspace_type != 'FileSystem' and overwrite: arcpy.ImportMetadata_conversion(self.metadata_file, "FROM_ARCGIS", self.dataset, Enable_automatic_updates=Enable_automatic_updates) return out_xml_path
def save(self, to_feature_class=True): logwrite("Saving metadata", True) for item in self.items: item._write() self.elements.write(self.metadata_file) # overwrites itself if to_feature_class and self.feature_class: # if we want to save it out to the feature class and feature class is defined arcpy.ImportMetadata_conversion( self.metadata_file, "FROM_FGDC", self.feature_class, Enable_automatic_updates=False ) # Leave Enable_automatic_updates as False because it will undo some of what we set and we just exported off of this dataset anyway! if self.feature_layer: # if we started with a feature layer, we need to recreate it with our new metadata arcpy.Delete_management( self.feature_layer) # delete the existing feature layer arcpy.MakeFeatureLayer_management( self.feature_class, self.feature_layer ) # and turn our temporary feature class into the feature layer
def __init__(self, feature_class=None, feature_layer=None, metadata_file=None, items=list(), temp_folder=metadata_temp_folder): self.items = items self.metadata_file = metadata_file self.elements = xml.etree.ElementTree.ElementTree() self.feature_class = feature_class self.feature_layer = feature_layer self.temp_folder = temp_folder self.created_temp_folder = False if self.feature_class and self.feature_layer: raise ValueError("MetadataEditor can only use either feature_class or feature_layer - do not provide both") if not self.temp_folder: self.temp_folder = tempfile.mkdtemp("arcpy_metadata") self.created_temp_folder = True if self.feature_layer: # if we are using a feature layer, we'll turn it into an in_memory feature class for the remainder logwrite("Copying layer to a feature class") self.feature_class = arcpy.CreateScratchName("pisces_metadata_temp", "", "", arcpy.env.workspace) arcpy.CopyFeatures_management(self.feature_layer, self.feature_class) # copy the features over if self.feature_class: # for both, we want to export the metadata out # export the metadata to the temporary location metadata_filename = arcpy.CreateScratchName("pisces", "metadata", "xml", self.temp_folder) self.metadata_file = os.path.join(self.temp_folder, metadata_filename) logwrite("Exporting metadata to temporary file %s" % self.metadata_file) arcpy.ExportMetadata_conversion(self.feature_class, translation_file, self.metadata_file) self.elements.parse(self.metadata_file) # create these all after the parsing happens so that if they have any self initialization, they can correctly perform it self.abstract = MetadataAbstract(parent=self) self.purpose = MetadataPurpose(parent=self) self.tags = MetadataTags(parent=self) self.title = MetadataTitle(parent=self) self.items.extend([self.abstract, self.purpose, self.tags, self.title]) if items: self.initialize_items()
def __init__(self, dataset=None, metadata_file=None, items=list(), temp_folder=metadata_temp_folder): self.items = items self.metadata_file = metadata_file self.elements = xml.etree.ElementTree.ElementTree() self.temp_folder = temp_folder self.dataset = dataset self._gdb_datasets = ["FeatureClass", "Table", "RasterDataset", "RasterCatalog", "MosaicDataset"] self._simple_datasets = ["ShapeFile", "RasterDataset"] self._layers = ["FeatureLayer", "Layer"] if self.dataset: # for both, we want to export the metadata out # export the metadata to the temporary location self.data_type = self.get_datatype() # for layers get the underlying dataset and start over if self.data_type in self._layers: desc = arcpy.Describe(self.dataset) self.data_type = desc.dataElement.dataType self.dataset = desc.dataElement.catalogPath self._workspace = self.get_workspace() self._workspace_type = self.get_workspace_type() # Datasets in Filesystem have metadata attached as XML file # we can directly write to it if self._workspace_type == 'FileSystem': if self.data_type in self._simple_datasets: xml_file = self.dataset + ".xml" #if no XML file exists create one and add most basic metadata item to it if not os.path.exists(xml_file): with open(xml_file, "w") as f: f.write('<metadata xml:lang="en"></metadata>') self.metadata_file = xml_file else: raise TypeError("Datatype is not supported") # Metadata for GDB datasets are stored inside the GDB itself. # We need to first export them to a temporary file, modify them and then import them back else: if self.data_type in self._gdb_datasets: metadata_filename = os.path.basename(self.dataset) + ".xml" self.metadata_file = os.path.join(self.temp_folder, metadata_filename) if os.path.exists(self.metadata_file): os.remove(self.metadata_file) logwrite("Exporting metadata to temporary file %s" % self.metadata_file) arcpy.XSLTransform_conversion(self.dataset, xslt, self.metadata_file) else: raise TypeError("Datatype is not supported") self.elements.parse(self.metadata_file) # create these all after the parsing happens so that if they have any self initialization, they can correctly perform it self.title = MetadataTitle(parent=self) self.abstract = MetadataAbstract(parent=self) self.purpose = MetadataPurpose(parent=self) self.tags = MetadataTags(parent=self) self.place_keywords = MetadataPlaceKeywords(parent=self) self.extent_description = MetadataExtentDescription(parent=self) self.temporal_extent_description = MetadataTemporalExtentDescription(parent=self) self.temporal_extent_instance = MetadataTemporalExtentInstance(parent=self) self.temporal_extent_start = MetadataTemporalExtentStart(parent=self) self.temporal_extent_end = MetadataTemporalExtentEnd(parent=self) self.min_scale = MetadataMinScale(parent=self) self.max_scale = MetadataMaxScale(parent=self) self.last_update = MetadataLastUpdate(parent=self) self.update_frequency = MetadataUpdateFrequency(parent=self) self.update_frequency_description = MetadataUpdateFrequencyDescription(parent=self) self.credits = MetadataCredits(parent=self) self.citation = MetadataCitation(parent=self) self.limitation = MetadataLimitation(parent=self) self.source = MetadataSource(parent=self) self.points_of_contact = MetadataPointsOfContact(parent=self) self.maintenance_contacts = MetadataMaintenanceContacts(parent=self) self.citation_contacts = MetadataCitationContacts(parent=self) self.language = MetadataDataLanguage(parent=self) self.metadata_language = MetadataMDLanguage(parent=self) self.locals = MetadataLocals(parent=self) self.items.extend([self.title, self.abstract, self.purpose, self.tags, self.place_keywords, self.extent_description, self.temporal_extent_description, self.temporal_extent_instance, self.temporal_extent_start, self.temporal_extent_end, self.min_scale, self.max_scale, self.last_update, self.update_frequency,self.update_frequency_description, self.credits, self.citation, self.limitation, self.source, self.points_of_contact, self.maintenance_contacts, self.citation_contacts, self.language, self.metadata_language, self.locals]) if items: self.initialize_items()
def __init__(self, dataset=None, metadata_file=None, items=None, temp_folder=metadata_temp_folder): if items is None: items = list() self.items = items self.dataset = dataset self.metadata_file = metadata_file self.elements = xml.etree.ElementTree.ElementTree() self.temp_folder = temp_folder self._gdb_datasets = ["FeatureClass", "Table", "RasterDataset", "RasterCatalog", "MosaicDataset"] self._simple_datasets = ["ShapeFile", "RasterDataset"] self._layers = ["FeatureLayer", "Layer"] if self.dataset: # for both, we want to export the metadata out # export the metadata to the temporary location self.data_type = self.get_datatype() # for layers get the underlying dataset and start over if self.data_type in self._layers: desc = arcpy.Describe(self.dataset) self.data_type = desc.dataElement.dataType self.dataset = desc.dataElement.catalogPath self._workspace = self.get_workspace() self._workspace_type = self.get_workspace_type() # Datasets in Filesystem have metadata attached as XML file # we can directly write to it if self._workspace_type == 'FileSystem': if self.data_type in self._simple_datasets: xml_file = self.dataset + ".xml" # if no XML file exists create one and add most basic metadata item to it if not os.path.exists(xml_file): with open(xml_file, "w") as f: f.write('<metadata xml:lang="en"></metadata>') self.metadata_file = xml_file else: raise TypeError("Datatype is not supported") # Metadata for GDB datasets are stored inside the GDB itself. # We need to first export them to a temporary file, modify them and then import them back else: if self.data_type in self._gdb_datasets: metadata_filename = os.path.basename(self.dataset) + ".xml" self.metadata_file = os.path.join(self.temp_folder, metadata_filename) if os.path.exists(self.metadata_file): os.remove(self.metadata_file) logwrite("Exporting metadata to temporary file %s" % self.metadata_file) arcpy.XSLTransform_conversion(self.dataset, xslt, self.metadata_file) else: raise TypeError("Datatype is not supported") self.elements.parse(self.metadata_file) # create these all after the parsing happens so that if they have any self initialization, # they can correctly perform it for name in elements.keys(): setattr(self, "_%s" % name, None) if elements[name]['type'] in ["string", "date", "integer", "float"]: setattr(self, "_{}".format(name), MetadataItem(elements[name]['path'], name, self)) if self.__dict__["_{}".format(name)].value is not None: setattr(self, name, self.__dict__["_{}".format(name)].value.strip()) else: setattr(self, name, self.__dict__["_{}".format(name)].value) elif elements[name]['type'] == "list": setattr(self, "_{}".format(name), MetadataList(elements[name]["tagname"], elements[name]['path'], name, self)) setattr(self, name, self.__dict__["_{}".format(name)].value) elif elements[name]['type'] == "language": setattr(self, "_{}".format(name), MetadataLanguage(elements[name]['path'], name, self)) if self.__dict__["_{}".format(name)].value is not None: setattr(self, name, self.__dict__["_{}".format(name)].value.strip()) else: setattr(self, name, self.__dict__["_{}".format(name)].value) elif elements[name]['type'] == "local": setattr(self, name, MetadataLocals(elements[name]['path'], name, self)) elif elements[name]['type'] == "contact": setattr(self, "_{}".format(name), MetadataContact(elements[name]['path'], name, self)) setattr(self, name, self.__dict__["_{}".format(name)]) if elements[name] in self.__dict__.keys(): self.items.append(getattr(self, "_{}".format(elements[name]))) if items: self.initialize_items()