Exemplo n.º 1
0
def fixObjXML(objName,objType,objLoc,domMR, fdDataSourceValues=[]):
    addMsgAndPrint('  '+objName)
    arcXMLfile = wksp+'/'+objName+'.xml'
    testAndDelete(arcXMLfile)
    arcpy.ExportMetadata_conversion(objLoc,translator,arcXMLfile)
    with open(arcXMLfile) as xml:
       arcXML = parse(xml)
       
    #arcXML = xml.dom.minidom.parse(arcXMLfile)
    dom = copy.deepcopy(domMR)
    
    # updateTableDom updates entity-attribute info, also returns dataSourceValues
    dom, dataSourceValues = updateTableDom(dom,objLoc,logFile)
    
    if objType <> 'Feature dataset':
        # delete unused dataqual/lineage/srcinfo branches
        dom = pruneSrcInfo(dom,dataSourceValues)
    else:
        dom = pruneSrcInfo(dom,fdDataSourceValues)
    
    # add spdoinfo and spref from arcXML
    dom = replaceSpatialStuff(dom, arcXML)
    
    # redo title and supplinfo
    dom = replaceTitleSupplinf(objType,objName,gdb,dom)
    domName = gdb[:-4]+'_'+objName+'-metadata.xml'
    writeDomToFile(wksp,dom,domName)
    if not debug:
        os.remove(arcXMLfile)
    
    return dataSourceValues
def purgeGeoprocessingFGDC(table,metadataFile):
    addMsgAndPrint('  exporting metadata to '+metadataFile)
    arcpy.ExportMetadata_conversion(table,translator,metadataFile)
    addMsgAndPrint('  clearing internal metadata')
    arcpy.ClearMetadata_usgs(table)
    addMsgAndPrint('  importing metadata from '+metadataFile)
    arcpy.ImportMetadata_conversion (metadataFile,"FROM_FGDC",table)
def export_xml():
    #set the output path for export metadata function
    OUTPUT = os.path.join(base_path, record, "final_XMLs")

    #get a list of all the XMLs
    files = arcpy.ListFiles("*.xml")
    print files
    raw_input()

    #loop through XMLs and export the metadata for each to the final_XMLs directory
    for f in files:

        if f[len(f) - 7:len(f) - 4] == 'shp':
            filePath = os.path.join(OUTPUT, f[:-8] + '.xml')
        elif f[len(f) - 7:len(f) - 4] == 'txt':
            pass
        else:
            filePath = os.path.join(OUTPUT, f)

        print filePath

        statinfo = os.stat(os.path.join(base_path, record, record_path, f))

        print f, '=', statinfo.st_size

        if statinfo.st_size == 0:
            continue

        if os.path.exists(filePath):
            print f, 'already exists.  Deleting now.'
            os.remove(filePath)

        print 'Trying to export XML for: ', f
        arcpy.ExportMetadata_conversion(
            f, TRANSLATOR, os.path.join(OUTPUT, f[:f.find(".")] + ".xml"))
 def generate_layer_summaries(directory_path, input, summary_text):
     print('Generating xmls for {}'.format(lyr_item))
     arcpy.ExportMetadata_conversion(input, translator, os.path.join(directory_path, file_item, lyr_item + ".xml"))
     tree = ET.parse(os.path.join(directory_path, file_item, lyr_item + ".xml"))
     root = tree.getroot()
     for summary_item in root.iter("purpose"):
         summary_item.text = summary_item.text + "\n\n" + "Layer: {} Lots. {}".format(file_item, summary_text)
     tree.write(os.path.join(directory_path, file_item, lyr_item + ".xml"))
Exemplo n.º 5
0
 def export_metadata(fc, desired_dict):
     arcpy.ExportMetadata_conversion(
         os.path.join(sde_path, fc), translator,
         os.path.join(temp_path, 'metadata', '{}.xml'.format(fc)))
     tree = ET.parse(
         os.path.join(temp_path, 'metadata', '{}.xml'.format(fc)))
     root = tree.getroot()
     for pubdate in root.iter('pubdate'):
         desired_dict[fc] = pubdate.text
Exemplo n.º 6
0
    def __init__(self,
                 feature_class=None,
                 feature_layer=None,
                 metadata_file=None,
                 items=list(),
                 temp_folder=metadata_temp_folder):
        self.items = items
        self.metadata_file = metadata_file
        self.elements = xml.etree.ElementTree.ElementTree()
        self.feature_class = feature_class
        self.feature_layer = feature_layer
        self.temp_folder = temp_folder
        self.created_temp_folder = False

        if self.feature_class and self.feature_layer:
            raise ValueError(
                "MetadataEditor can only use either feature_class or feature_layer - do not provide both"
            )

        if not self.temp_folder:
            self.temp_folder = tempfile.mkdtemp("arcpy_metadata")
            self.created_temp_folder = True

        if self.feature_layer:  # if we are using a feature layer, we'll turn  it into an in_memory feature class for the remainder
            logwrite("Copying layer to a feature class")
            self.feature_class = arcpy.CreateScratchName(
                "pisces_metadata_temp", "", "", arcpy.env.workspace)
            arcpy.CopyFeatures_management(
                self.feature_layer,
                self.feature_class)  # copy the features over

        if self.feature_class:  # for both, we want to export the metadata out
            # export the metadata to the temporary location
            metadata_filename = arcpy.CreateScratchName(
                "pisces", "metadata", "xml", self.temp_folder)
            self.metadata_file = os.path.join(self.temp_folder,
                                              metadata_filename)
            logwrite("Exporting metadata to temporary file %s" %
                     self.metadata_file)
            arcpy.ExportMetadata_conversion(self.feature_class,
                                            translation_file,
                                            self.metadata_file)

        self.elements.parse(self.metadata_file)

        # create these all after the parsing happens so that if they have any self initialization, they can correctly perform it
        self.abstract = MetadataAbstract(parent=self)
        self.purpose = MetadataPurpose(parent=self)
        self.tags = MetadataTags(parent=self)
        self.title = MetadataTitle(parent=self)

        self.items.extend([self.abstract, self.purpose, self.tags, self.title])

        if items:
            self.initialize_items()
Exemplo n.º 7
0
def update_layer_metadata(sdeFC, props, srcType):

    #TEMP_DIR = r"C:\Users\kdb086\Documents\ArcGIS\metadata-files"
    TEMP_DIR = tempfile.gettempdir()
    metadataFile = os.path.join(TEMP_DIR,
                                os.path.basename(sdeFC) + '-metadata.xml')
    #migrationText = " *** Migrated from the L Drive (%s)" % props["Data Source"]
    migrationText = "<b>Retired L Drive Path: </b> %s" % props["Data Source"]
    livelinkText = "<b>Livelink Path: </b>"
    if props["Livelink Link"]:
        #livelinkText = 'Click <a href="' + props["Livelink Link"] + '">here</a> to go to Livelink'
        livelinkText = livelinkText + ' <a href="' + props[
            "Livelink Link"] + '">' + props["Livelink Link"] + '</a>'
    else:
        print('%-60s%s' % (" ", "??? no Livelink Link found"))

    if os.path.exists(metadataFile):
        os.remove(metadataFile)

    # A- export the medata from SDE feature class
    # print 'exporting the metadata of %s to %s' % (sdeFC, metadataFile)
    arcpy.ExportMetadata_conversion(Source_Metadata=sdeFC,
                                    Translator=METADATA_TRANSLATOR,
                                    Output_File=metadataFile)

    # B- modify metadata
    # print 'modifying the metadata file [%s]' % (metadataFile)
    tree = ET.parse(metadataFile)
    root = tree.getroot()
    idinfo = root.find('idinfo')
    dspt = idinfo.find('descript')
    # B1- add the element
    if dspt is None:
        dspt = ET.SubElement(idinfo, 'descript')
        ET.SubElement(dspt, 'abstract')
    else:
        abstract = dspt.find('abstract')
        if abstract is None:
            ET.SubElement(dspt, 'abstract')
    # B2- modify the element text
    abstract = dspt.find('abstract')
    if abstract.text is None:
        # use dot in place of empty space to force CDATA to be recognisable
        abstract.text = ". "
    abstract.text = ET.CDATA("%s<p/><p>%s</p><p>%s</p>" %
                             (abstract.text, livelinkText, migrationText))

    tree.write(metadataFile)

    # C- import the modified metadata back to SDE feature class
    # print 'importing the metadata file [%s] to %s' % (metadataFile, sdeFC)
    arcpy.ImportMetadata_conversion(Source_Metadata=metadataFile,
                                    Import_Type="FROM_FGDC",
                                    Target_Metadata=sdeFC,
                                    Enable_automatic_updates="ENABLED")
def export_xml():
    #set the output path for export metadata function
    OUTPUT = os.path.join(base_path,record,"final_XMLs")

    #get a list of all the XMLs
    if record_path == "converted\GISfiles":
        files = []
        for dirpath,dirnames,filenames in os.walk(os.path.join(base_path,record,record_path)):
            if dirpath.endswith("shapefiles"):
                for f in filenames:
                    if fnmatch.fnmatch(f,"*.xml"):
                        files.append(os.path.join(dirpath,f))
    else:
         files = glob.glob(os.path.join(arcpy.env.workspace,"*.shp"))


    #loop through XMLs and export the metadata for each to the final_XMLs directory
    for f in files:

        if os.path.splitext(f)[1] == '.shp':
            if os.path.isabs(f) == False:
                filePath = os.path.join(OUTPUT,
                    os.path.split(os.path.splitext(f)[0])[1],'.xml')
            else:
                filePath = os.path.join(OUTPUT,os.path.split(f)[1])
        elif f[len(f)-7:len(f)-4] == 'txt':
            pass
        else:
            if os.path.isabs(f) == False:
                filePath = os.path.join(OUTPUT,
                    os.path.split(os.path.splitext(f)[0])[1],'.xml')
            else:
                filePath = os.path.join(OUTPUT,os.path.split(f)[1])

        print filePath

        statinfo = os.stat(os.path.join(base_path,record,record_path,f))

        print f, '=', statinfo.st_size

        if statinfo.st_size == 0:
            continue

        if os.path.exists(filePath):
            print f, 'already exists.  Deleting now.'
            os.remove(filePath)

        print 'Trying to export XML for: ', f
        arcpy.ExportMetadata_conversion(f,
            TRANSLATOR,
            os.path.join(OUTPUT, os.path.splitext(os.path.split(f)[1])[0]+ ".xml"))
Exemplo n.º 9
0
def export_xml(input_path):

    for root, dirs, files in os.walk(input_path):
        for f in files:
            if f.endswith('shp'):
                inFile = os.path.join(root, f)
                outFile = os.path.join(outputDir, f[:-4] + '.xml')
                print outFile
                if os.path.isfile(outFile):
                    print 'Removing', outFile
                    os.remove(outFile)

                print 'Trying to export XML for: ', f
                arcpy.ExportMetadata_conversion(inFile, translator, outFile)
Exemplo n.º 10
0
def metadata(item):
    """
	Get feature class metadata as XML. Uses METADATA_TRANSLATOR environment
	variable for metadata schema
	"""
    output_file = './tmp/' + item + '.xml'
    try:  # Make sure file doesn't exist already (can't overwrite)
        os.remove(output_file)
    except OSError:
        pass

    arcpy.ExportMetadata_conversion(item, METADATA_TRANSLATOR, output_file)
    with open(output_file) as f:
        file_contents = xmltodict.parse(f.read())
    return jsonify(file_contents)
 def layer_meta_archive(input, clip_val, clip_text):
     print("Archiving MapPLUTO {}".format(clip_text))
     print("Creating in-memory layer.")
     arcpy.MakeFeatureLayer_management(input, input + prod_version + clip_val)
     print("Saving layer to appropriate path.")
     arcpy.SaveToLayerFile_management(input + prod_version + clip_val, os.path.join(m_arch_path,
                                                                                    input.replace("_UNLCIPPED", "")  + " " + prod_version + " " +
                                                                                    release_date_text + " - {}".format(clip_text)))
     print("Exporting metadata xmls to appropriate path")
     arcpy.ExportMetadata_conversion(input, translator, os.path.join(m_arch_path,
                                                                     input.replace("_UNLCIPPED", "") + " " + prod_version + " " + release_date_text
                                                                      + " - {}.lyr.xml".format(clip_text)))
     print("Applying appropriate symbology from previous export")
     arcpy.ApplySymbologyFromLayer_management(os.path.join(m_arch_path,
                                                           input.replace("_UNLCIPPED", "") + " " + prod_version + " " + release_date_text
                                                           + " - {}.lyr".format(clip_text)), layer_symb_path)
Exemplo n.º 12
0
def update_sde_metadata(sdeFC, srcFC):

    TEMP_DIR = tempfile.gettempdir()
    metadataFile = os.path.join(TEMP_DIR,
                                os.path.basename(sdeFC) + '-metadata.xml')
    migrationText = "*** Migrated from the L Drive (%s)" % srcFC

    if os.path.exists(metadataFile):
        os.remove(metadataFile)

    # A- export the medata from SDE feature class
    print 'exporting the metadata of %s to %s' % (sdeFC, metadataFile)
    arcpy.ExportMetadata_conversion(Source_Metadata=sdeFC,
                                    Translator=METADATA_TRANSLATOR,
                                    Output_File=metadataFile)

    # B- modify metadata
    print 'modifying the metadata file [%s]' % (metadataFile)
    tree = ET.parse(metadataFile)
    root = tree.getroot()
    idinfo = root.find('idinfo')
    dspt = idinfo.find('descript')
    # B1- add the element
    if dspt is None:
        dspt = ET.SubElement(idinfo, 'descript')
        ET.SubElement(dspt, 'abstract')
    else:
        abstract = dspt.find('abstract')
        if abstract is None:
            ET.SubElement(dspt, 'abstract')
    # B2- modify the element text
    abstract = dspt.find('abstract')
    if abstract.text is None:
        abstract.text = migrationText
    elif abstract.text.find(migrationText) == -1:
        abstract.text = abstract.text + migrationText

    tree.write(metadataFile)

    # C- import the modified metadata back to SDE feature class
    print 'importing the metadata file [%s] to %s' % (metadataFile, sdeFC)
    arcpy.ImportMetadata_conversion(Source_Metadata=metadataFile,
                                    Import_Type="FROM_FGDC",
                                    Target_Metadata=sdeFC,
                                    Enable_automatic_updates="ENABLED")

    print 'The metadata of %s is updated' % sdeFC
Exemplo n.º 13
0
def export_xml(input_path):
    outDir = os.path.join(input_path, 'export_fgdc')
    if not os.path.exists(outDir):
        os.makedirs(outDir)

    for root, dirs, files in os.walk(input_path):
        for f in files:
            if f.endswith('shp'):
                inFile = os.path.join(root, f)
                outFile = os.path.join(outDir,(f[:-4]+'.xml'))
                ws = os.path.join(root)
                if os.path.isfile(outFile):
                    print 'Removing', outFile
                    os.remove(outFile)

                print 'Trying to export XML for: ', f
                arcpy.ExportMetadata_conversion(inFile, translator, outFile)
Exemplo n.º 14
0
def export_metadata():
    """Exports the feature class metadata to an xml file
    
    Returns:
        None
    """

    folder = 'metadata'
    name = get_dataset_filename()

    # Create a metadata folder in the temp directory if it does not exist
    temp_working_folder = os.path.join(temp_workspace, folder)
    create_folder(temp_working_folder, True)

    # Set the destinion of the metadata export
    source = staging_feature_class
    raw_metadata_export = os.path.join(temp_working_folder, name + "_raw.xml")

    # Export the metadata
    arcpy.env.workspace = temp_working_folder
    installDir = arcpy.GetInstallInfo("desktop")["InstallDir"]
    translator = installDir + "Metadata/Translator/ARCGIS2FGDC.xml"
    arcpy.ExportMetadata_conversion(source, translator, raw_metadata_export)

    # Process: XSLT Transformation to remove any sensitive info or format
    destination = os.path.join(temp_working_folder, name + ".xml")
    if os.path.exists(args.metadata_xslt):
        logger.info("Applying metadata XSLT: " + args.metadata_xslt)
        arcpy.XSLTransform_conversion(raw_metadata_export, args.metadata_xslt,
                                      destination, "")

        # Reimport the clean metadata into the FGDB
        logger.debug("Reimporting metadata to file geodatabase " + destination)
        arcpy.MetadataImporter_conversion(destination, staging_feature_class)
    else:
        # If no transformation exists, just rename and publish the raw metadata
        logger.warn("Metadata XSLT not found")
        os.rename(raw_metadata_export, destination)

    # Publish the metadata to the download folder
    publish_file(temp_working_folder, name + ".xml", "metadata")
Exemplo n.º 15
0
inGdb = os.path.abspath(inGdb)
wksp = os.path.dirname(inGdb)
xmlGdb = inGdb[:-4]+'-metadata.xml'
mrXML = xmlGdb

dataSources = os.path.join(inGdb, 'DataSources')
addMsgAndPrint('  DataSources = '+dataSources)

# export master record
if debug:
    addMsgAndPrint('  inGdb = '+inGdb)
    addMsgAndPrint('  translator = '+translator)
    addMsgAndPrint('  mrXML = '+mrXML)
if os.path.exists(mrXML):
    os.remove(mrXML)
arcpy.ExportMetadata_conversion(inGdb,translator,mrXML)
addMsgAndPrint('  Metadata for '+os.path.basename(inGdb)+' exported to file ')
addMsgAndPrint('    '+mrXML)

# parse mrXML to DOM
try:
    with open(mrXML) as xml:
        domMR = parse(xml)
    #domMR = xml.dom.minidom.parse(mrXML)
    addMsgAndPrint('  Master record parsed successfully')
except:
    addMsgAndPrint(arcpy.GetMessages())
    addMsgAndPrint('Failed to parse '+mrXML)
    raise arcpy.ExecuteError
    sys.exit()
    
Exemplo n.º 16
0
def mainFunction(
    geodatabase, outputFile
):  # Get parameters from ArcGIS Desktop tool by seperating by comma e.g. (var1 is 1st parameter,var2 is 2nd parameter,var3 is 3rd parameter)
    try:
        # Logging
        if (enableLogging == "true"):
            # Setup logging
            logger, logMessage = setLogging(logFile)
            # Log start of process
            logger.info("Process started.")

        # --------------------------------------- Start of code --------------------------------------- #

        # Set the workspace
        arcpy.env.workspace = geodatabase

        # Set the directory for the translator
        installDirectory = arcpy.GetInstallInfo("desktop")["InstallDir"]
        # Use the FGDC to get into clean xml format
        translator = installDirectory + "Metadata/Translator/ARCGIS2FGDC.xml"

        # Get a list of the feature datasets in the database
        featureDatasetList = arcpy.ListDatasets("", "Feature")

        # Loop through the feature datasets
        datasetList = []
        for featureDataset in featureDatasetList:
            # Get a list of the feature classes in the feature dataset and add to list
            datasetList = arcpy.ListFeatureClasses("", "", featureDataset)

        # Get a list of the feature classes and add to list
        datasetList = datasetList + arcpy.ListFeatureClasses()

        # Get a list of the tables and add to list
        datasetList = datasetList + arcpy.ListTables()

        # Go through the datasets in the list
        for dataset in datasetList:
            arcpy.AddMessage("Exporting metadata for " + dataset + "...")
            # Logging
            if (enableLogging == "true"):
                logger.info("Exporting metadata for " + dataset + "...")

            # Export the metadata for the dataset
            arcpy.ExportMetadata_conversion(
                dataset, translator,
                os.path.join(arcpy.env.scratchFolder, "Metadata.xml"))

            # Convert file to xml
            tree = ET.ElementTree(
                file=os.path.join(arcpy.env.scratchFolder, "Metadata.xml"))
            # Import and reference the xml file
            root = tree.getroot()

            # Set default values
            abstract = "No Abstract"
            purpose = "No Purpose"
            # Look at the metadata
            description = root.find("idinfo/descript")
            # If there are description values
            if description:
                # Look at the description xml element
                for child in root.find("idinfo/descript"):
                    # Get abstract
                    if (child.tag.lower() == "abstract"):
                        abstract = child.text
                    # Get purpose
                    if (child.tag.lower() == "purpose"):
                        purpose = child.text

            # If any variables are none
            if (abstract is None):
                abstract = "No Abstract"
            if (purpose is None):
                purpose = "No Purpose"

            # Write to text file
            with open(outputFile, "a") as f:
                f.write("Dataset - " + str(dataset) + "\n")
                f.write("Abstract - " + str(abstract) + "\n")
                f.write("Purpose - " + str(purpose) + "\n")
                f.write("--------------------" + "\n")
                f.close()

        # --------------------------------------- End of code --------------------------------------- #

        # If called from gp tool return the arcpy parameter
        if __name__ == '__main__':
            # Return the output if there is any
            if output:
                arcpy.SetParameterAsText(1, output)
        # Otherwise return the result
        else:
            # Return the output if there is any
            if output:
                return output
        # Logging
        if (enableLogging == "true"):
            # Log end of process
            logger.info("Process ended.")
            # Remove file handler and close log file
            logging.FileHandler.close(logMessage)
            logger.removeHandler(logMessage)
        pass
    # If arcpy error
    except arcpy.ExecuteError:
        # Build and show the error message
        errorMessage = arcpy.GetMessages(2)
        arcpy.AddError(errorMessage)
        # Logging
        if (enableLogging == "true"):
            # Log error
            logger.error(errorMessage)
            # Remove file handler and close log file
            logging.FileHandler.close(logMessage)
            logger.removeHandler(logMessage)
        if (sendErrorEmail == "true"):
            # Send email
            sendEmail(errorMessage)
    # If python error
    except Exception as e:
        errorMessage = ""
        # Build and show the error message
        for i in range(len(e.args)):
            if (i == 0):
                errorMessage = str(e.args[i])
            else:
                errorMessage = errorMessage + " " + str(e.args[i])
        arcpy.AddError(errorMessage)
        # Logging
        if (enableLogging == "true"):
            # Log error
            logger.error(errorMessage)
            # Remove file handler and close log file
            logging.FileHandler.close(logMessage)
            logger.removeHandler(logMessage)
        if (sendErrorEmail == "true"):
            # Send email
            sendEmail(errorMessage)
Exemplo n.º 17
0
# =============================================================================
# gdb = r"C:\Users\stevenconnorg\Documents\knight-federal-solutions\CIP_DataReview\archive\ANG_Peoria  - Copy\Non_Network_CIP\ANG_Peoria_CIP.gdb"
# translator = r"C:\Program Files (x86)\ArcGIS\Desktop10.6\Metadata\Translator\ARCGIS2FGDC.xml"
# outDir = r"C:\Users\stevenconnorg\Documents\knight-federal-solutions\CIP_DataReview\archive\ANG_Peoria  - Copy\Non_Network_CIP\METADATA"
# 
# =============================================================================
arcpy.env.workspace = gdb

FDSs = arcpy.ListDatasets()
arcpy.AddMessage(FDSs)

if not FDSs:
    FCs = arcpy.ListFeatureClasses(gdb)
    for fc in FCs:
        outFile = outDir+"/"+fc+".xml"
        if arcpy.Exists(outFile):
            arcpy.Delete_management(outFile)
        arcpy.ExportMetadata_conversion(fc,Translator = translator,Output_File = outDir+"/"+fc+".xml")
else:
    for fds in FDSs:
        outFile = outDir+"/"+fds+".xml"
        if arcpy.Exists(outFile):
            arcpy.Delete_management(outFile)
        arcpy.ExportMetadata_conversion(fds,Translator = translator,Output_File = outFile)
        FCs = arcpy.ListFeatureClasses(feature_dataset = fds)
        for fc in FCs:   
            outFile = outDir+"/"+fds+"_"+fc+".xml"
            if arcpy.Exists(outFile):
                arcpy.Delete_management(outFile)
            arcpy.ExportMetadata_conversion(fc,Translator = translator,Output_File = outFile)
Exemplo n.º 18
0
    pubdate = re.sub(r'<.*?>', '', item)
print pubdate

#Make sde Connection
sde_conn = "####"
arcpy.env.workspace = sde_conn
#fcList = arcpy.ListFeatureClasses()
#for fc in fcList:
#    print fc

#Create xml file of metadata
dir = arcpy.GetInstallInfo("desktop")["InstallDir"]
translator = dir + "Metadata/Translator/ArcGIS2ISO19139.xml"
date = time.strftime('%Y%m%d%I%M%S')
xmlFile = "#####" + date + ".xml"
arcpy.ExportMetadata_conversion(sde_conn, translator, xmlFile)

#Parse the xml file
myxml = minidom.parse(xmlFile)
purpose = myxml.getElementsByTagName("purpose")[0]
document = purpose.getElementsByTagName(
    "gco:CharacterString")[0].firstChild.data
date = re.findall(r'\d\d\d\d\d\d\d\d', document)
for item in date:
    metaDate = ''
    metaDate += item
print metaDate

#If data is updated print "Data up-to-date"
#If data is outdated send email
if metaDate == pubdate:
Exemplo n.º 19
0
gmDict = os.path.join(inGdb, 'GeoMaterialDict')
logFileName = inGdb + '-metadataLog.txt'
xmlFileMR = gdb + '-MR.xml'
xmlFileGdb = gdb + '.xml'

# export master record
fXML = workDir + '/' + gdb + '.xml'
addMsgAndPrint('fXML = ' + fXML)
if os.path.exists(fXML):
    os.remove(fXML)
gdbObj = inGdb + '/GeologicMap'
if debug:
    addMsgAndPrint('  gdbObj = ' + gdbObj)
    addMsgAndPrint('  translator = ' + translator)
    addMsgAndPrint('  fXML = ' + fXML)
arcpy.ExportMetadata_conversion(gdbObj, translator, fXML)

addMsgAndPrint('  Metadata for GeologicMap exported to file ')
addMsgAndPrint('    ' + fXML)

# parse xml to DOM
try:
    domMR = xml.dom.minidom.parse(fXML)
    addMsgAndPrint('  Master record parsed successfully')
    # should then delete xml file
    if not debug: os.remove(fXML)
except:
    addMsgAndPrint(arcpy.GetMessages())
    addMsgAndPrint('Failed to parse ' + fXML)
    raise arcpy.ExecuteError
    sys.exit()
    # Export metadata to BytesProduction directory using old MapPLUTO Production source -------------------------------
    print("Beginning requisite metadata file copy")
    Arcdir = arcpy.GetInstallInfo("desktop")["InstallDir"]
    translator = Arcdir + "Metadata/Translator/ARCGIS2FGDC.xml"

    sde_prod_clip_meta = config.get('PATHS', 'MapPLUTO_SDE_Clipped_Path')
    sde_prod_unclip_meta = config.get('PATHS', 'MapPLUTO_SDE_Unclipped_Path')
    m_bytes_prod_meta = os.path.join(temp_env, prod_version, 'meta')

    if os.path.exists(os.path.join(m_bytes_prod_meta, 'PLUTOmeta.xml')):
        print("PLUTO meta already exists in BytesProduction. Skipping")
    else:
        print("Exporting MapPLUTO clipped metadata files to BytesProduction meta folders")
        arcpy.ExportMetadata_conversion(sde_prod_clip_meta,
                                        translator,
                                        os.path.join(m_bytes_prod_meta, 'PLUTOmeta.xml'))

    if os.path.exists(os.path.join(m_bytes_prod_meta, 'PLUTOmeta_unclip.xml')):
        print("PLUTO unclipped meta already exists in BytesProduction. Skipping")
    else:
        print("Exporting MapPLUTO unclipped metadata files to BytesProduction meta folder")
        arcpy.ExportMetadata_conversion(sde_prod_unclip_meta,
                                        translator,
                                        os.path.join(m_bytes_prod_meta, 'PLUTOmeta_unclip.xml'))

    # Modify existing xml fields to update metdata -------------------------------------------------------------------

    print(m_bytes_prod_meta)

    def replace_xml_content(input):
Exemplo n.º 21
0
 def distribute_layer_metadata(in_path, out_path):
     Arcdir = arcpy.GetInstallInfo("desktop")["InstallDir"]
     translator = Arcdir + "Metadata/Translator/ARCGIS2FGDC.xml"
     print("Exporting xml file on M: drive - {}".format(in_path))
     arcpy.ExportMetadata_conversion(in_path, translator,
                                     out_path.replace('.lyr', '.lyr.xml'))
        perCorrect = 0
        numElements = 0
        pieCorrect = 0
        pieIncorrect = 0
        piePartial = 0

        # increment the number of metadata elements
        numFC = numFC + 1
        print(numFC)

        # variables
        translator = "C:\\Program Files (x86)\\ArcGIS\\Desktop10.4\\Metadata\\Translator\\ARCGIS2FGDC.xml"
        output_xml = "C:\Temp" + "\XML_" + fc + ".xml"

        # Process: Export Metadata
        arcpy.ExportMetadata_conversion(fc, translator, output_xml)

        # Get the element Tree object from the XML file.  Will be access them using CSDGM XPath
        tree = ElementTree()
        tree.parse(output_xml)

        # Put the Feature Class name at top of error file
        errorFile.write("<br>")
        errorFile.write("<b>" + fc + ":" + "</b>")
        errorFile.write("<br>")
        errorFile.write("<br>")

        title = tree.find("idinfo/citation/citeinfo/title")
        numElements = numElements + 1
        if title is not None:
            print "Title = " + title.text
Exemplo n.º 23
0
import os.path

#Get the record and path to XMLs
record = raw_input("Enter record number: ")
record_path = raw_input("Enter path to shapefiles: ")

#Static vars
TRANSLATOR = "C:\\Program Files\\ArcGIS\\Desktop10.2\\Metadata\\Translator\\ARCGIS2FGDC.xml"
base_path = "D:\\drive\\Map Library Projects\\MGS\\Records"

#set workspace
arcpy.env.workspace = os.path.join(base_path, record, record_path)
ws = arcpy.env.workspace

#create final_XMLs dir if it doesn't already exist
if os.path.exists(os.path.join(base_path, record, "final_XMLs")) == False:
    arcpy.CreateFolder_management(os.path.join(base_path, record),
                                  "final_XMLs")

#set the output path for export metadata function
OUTPUT = os.path.join(base_path, record, "final_XMLs")

#get a list of all the XMLs
files = arcpy.ListFiles("*.xml")
print files

#loop through XMLs and export the metadata for each to the final_XMLs directory
for f in files:
    arcpy.ExportMetadata_conversion(
        f, TRANSLATOR, os.path.join(OUTPUT, f[:f.find(".")] + ".xml"))
Exemplo n.º 24
0
    xml_list = []

    for xml in os.listdir(m_drive_path):
        if xml.endswith(".xml"):
            xml_list.append(xml)

    arcpy.env.workspace = m_drive_path
    arcpy.env.overwriteOutput = True

    for xml in sde_xml_dict.keys():
        try:
            print("Exporting metadata from {} to {}".format(sde_path + r"\GISPROD.SDE.{}".format(sde_xml_dict[xml]),
                                                            os.path.join(m_drive_path, xml)))
            arcpy.ExportMetadata_conversion(sde_path + r"\GISPROD.SDE.{}".format(sde_xml_dict[xml]),
                                            translator,
                                            os.path.join(m_drive_path, xml))
        except Exception as e:
            print(e)

    # Reconnect SDE users

    arcpy.AcceptConnections(sde_path, True)

    EndTime = datetime.datetime.now().replace(microsecond=0)
    print("Script runtime: {}".format(EndTime - StartTime))

    log.write(str(StartTime) + "\t" + str(EndTime) + "\t" + str(EndTime - StartTime) + "\n")
    log.close()

except:
Exemplo n.º 25
0
    def _review_metadata(self, feature_class):
        
        xml_output_path = arcpy.env.scratchWorkspace + "\\featureClass.xml"

        if os.path.isfile(xml_output_path):
            os.remove(xml_output_path)
            
        arcpy.ExportMetadata_conversion(feature_class, self._metadata_translator, xml_output_path)
        xml_file = open(xml_output_path, 'r')
        tree = ElementTree()
        tree.parse(xml_file)
        root = tree.getroot()
        
        print("Reviewing Metadata")
        
        title_element = "{http://www.isotc211.org/2005/gmd}identificationInfo/" \
                        "{http://www.isotc211.org/2005/gmd}MD_DataIdentification/" \
                        "{http://www.isotc211.org/2005/gmd}citation/" \
                        "{http://www.isotc211.org/2005/gmd}CI_Citation/{http://www.isotc211.org/2005/gmd}title/" \
                        "{http://www.isotc211.org/2005/gco}CharacterString"
        title = root.find(title_element)
        
        summary_element = "{http://www.isotc211.org/2005/gmd}identificationInfo/" \
                          "{http://www.isotc211.org/2005/gmd}MD_DataIdentification/" \
                          "{http://www.isotc211.org/2005/gmd}purpose/" \
                          "{http://www.isotc211.org/2005/gco}CharacterString"
        summary = root.find(summary_element)
        
        constraints_element = "{http://www.isotc211.org/2005/gmd}identificationInfo/" \
                              "{http://www.isotc211.org/2005/gmd}MD_DataIdentification/" \
                              "{http://www.isotc211.org/2005/gmd}supplementalInformation/" \
                              "{http://www.isotc211.org/2005/gco}CharacterString"

        constraints = root.find(constraints_element)
        
        last_updated_element = "{http://www.isotc211.org/2005/gmd}identificationInfo/" \
                               "{http://www.isotc211.org/2005/gmd}MD_DataIdentification/" \
                               "{http://www.isotc211.org/2005/gmd}citation/" \
                               "{http://www.isotc211.org/2005/gmd}CI_Citation/" \
                               "{http://www.isotc211.org/2005/gmd}date/" \
                               "{http://www.isotc211.org/2005/gmd}CI_Date/{http://www.isotc211.org/2005/gmd}date/" \
                               "{http://www.isotc211.org/2005/gco}DateTime"

        last_updated = root.find(last_updated_element)
        
        update_frequency_element = "{http://www.isotc211.org/2005/gmd}identificationInfo/" \
                                   "{http://www.isotc211.org/2005/gmd}MD_DataIdentification/" \
                                   "{http://www.isotc211.org/2005/gmd}resourceMaintenance/" \
                                   "{http://www.isotc211.org/2005/gmd}MD_MaintenanceInformation/" \
                                   "{http://www.isotc211.org/2005/gmd}maintenanceAndUpdateFrequency/" \
                                   "{http://www.isotc211.org/2005/gmd}MD_MaintenanceFrequencyCode"
        update_frequency = root.find(update_frequency_element)
        
        point_of_contact_element = "{http://www.isotc211.org/2005/gmd}identificationInfo/" \
                                   "{http://www.isotc211.org/2005/gmd}MD_DataIdentification/" \
                                   "{http://www.isotc211.org/2005/gmd}pointOfContact/" \
                                   "{http://www.isotc211.org/2005/gmd}CI_ResponsibleParty/" \
                                   "{http://www.isotc211.org/2005/gmd}individualName/" \
                                   "{http://www.isotc211.org/2005/gco}CharacterString"
        point_of_contact = root.find(point_of_contact_element)

        if hasattr(title, "text") \
                and hasattr(summary, "text") \
                and + hasattr(constraints, "text") \
                and hasattr(last_updated, "text") \
                and + hasattr(update_frequency, "attrib") \
                and + hasattr(point_of_contact, "text"):

            self._hasMetadata = "Yes"
        
        elif hasattr(title, "text") \
                or + hasattr(summary, "text") \
                or + hasattr(constraints, "text") \
                or + hasattr(last_updated, "text") \
                or + hasattr(update_frequency, "attrib") \
                or + hasattr(point_of_contact, "text"):

            self._hasMetadata = "Partial"
            
        else:
            self._hasMetadata = "No"
        
        xml_file.close()
        os.remove(xml_output_path)
Exemplo n.º 26
0
    make_note("Getting metadata translators...")
    install_dir = arcpy.GetInstallInfo()["InstallDir"]
    translator_ISO = install_dir + "Metadata\\Translator\\ARCGIS2ISO19139.xml"
    translator_CSDGM = install_dir + "Metadata\\Translator\\ARCGIS2FGDC.xml"

    #ANALYZE EACH ITEM
    for i in item_list:
        make_note("Starting on " + i + "...")
        #MAKE GUID-BASED FILENAMES FOR THE TEMPORARY METADATA-FILES (XML)
        a_GUID = str(uuid.uuid4())
        temp_filename_ISO = a_GUID + "_ISO.xml"
        temp_filename_CSDGM = a_GUID + "_CSDGM.xml"
        #EXPORT ITEM'S METADATA TO TEMPORARY METADATA-FILES (XML)
        make_note("Exporting metadata to temporary metadata-files...")
        #(ISO)
        arcpy.ExportMetadata_conversion(
            i, translator_ISO, os.path.join(scratch_path, temp_filename_ISO))
        #(CSDGM)
        arcpy.ExportMetadata_conversion(
            i, translator_CSDGM, os.path.join(scratch_path,
                                              temp_filename_CSDGM))
        #GET XML ROOT-ELEMENTS AND NAMESPACES (ISO ONLY)
        make_note("Getting metadata-XML root-elements and namespaces...")
        #(ISO)
        ISO_tree = ET.parse(os.path.join(scratch_path, temp_filename_ISO))
        ISO_root = ISO_tree.getroot()
        ISO_ns = {
            "gmd": "http://www.isotc211.org/2005/gmd",
            "gco": "http://www.isotc211.org/2005/gco",
            "gts": "http://www.isotc211.org/2005/gts",
            "srv": "http://www.isotc211.org/2005/srv",
            "gml": "http://www.opengis.net/gml",
def mainFunction(geodatabase,outputFolder): # Get parameters from ArcGIS Desktop tool by seperating by comma e.g. (var1 is 1st parameter,var2 is 2nd parameter,var3 is 3rd parameter)  
    try:
        # --------------------------------------- Start of code --------------------------------------- #

        # Get a list of the feature classes and tables in the database
        arcpy.env.workspace = geodatabase
        featureClassList = arcpy.ListFeatureClasses()
        tableList = arcpy.ListTables()
        datasetList = featureClassList + tableList
        
        # Describe the workspace
        descWorkspace = arcpy.Describe(geodatabase)
        workspaceType = descWorkspace.workspaceType

        # Create the CSV files and setup headers
        datasetsCSVFile = open(os.path.join(outputFolder, descWorkspace.baseName + "_Datasets.csv"), 'wb')
        relationshipClassesCSVFile = open(os.path.join(outputFolder, descWorkspace.baseName + "_RelationshipClasses.csv"), 'wb')
        domainsCSVFile = open(os.path.join(outputFolder, descWorkspace.baseName + "_Domains.csv"), 'wb')

        datasetsWriter = csv.writer(datasetsCSVFile, delimiter=",")
        relationshipClassesWriter = csv.writer(relationshipClassesCSVFile, delimiter=",")
        domainsWriter = csv.writer(domainsCSVFile, delimiter=",")
    
        # Add in header information   
        datasetsHeaderRow = ["Name","Description","Dataset Type","Geometry","Spatial Reference","Versioned","Archived"]
        relationshipClassesHeaderRow = ["Name","Description","Cardinality","Origin Feature Class","Destination Feature Class"]
        domainsHeaderRow = ["Name","Description","Domain Type", "Field Type"]
        singleDatasetHeaderRow = ["Field Name","Field Alias Name","Type","Domain","Is Nullable","Is Editable","Length"]
        singleDomainHeaderRow = ["Code","Description"]
        datasetsWriter.writerow(datasetsHeaderRow)
        relationshipClassesWriter.writerow(relationshipClassesHeaderRow)
        domainsWriter.writerow(domainsHeaderRow)
                
        # For each dataset
        for dataset in datasetList:
            # Describe the dataset
            descDataset = arcpy.Describe(dataset)
            datasetName = descDataset.name
            dataType = descDataset.dataType

            arcpy.AddMessage("Documenting dataset - " + datasetName + "...")
            
            if (dataType.lower() == "featureclass"):
                shapeType = descDataset.shapeType
            else:
                shapeType = "Non-Spatial"
                
            if (dataType.lower() == "featureclass"):
                spatialReference = descDataset.spatialReference.name
            else:
                spatialReference = "Non-Spatial"
                
            if (descDataset.canVersion == True):
                versionedEnabled = descDataset.isVersioned
            else:
                versionedEnabled = "False"
                
            if (descDataset.canVersion == True):
                archiveEnabled = descDataset.isArchived
            else:
                archiveEnabled = "False"

            # Set the directory for the translator
            installDirectory = arcpy.GetInstallInfo("desktop")["InstallDir"]
            # Use the FGDC to get into clean xml format
            translator = installDirectory + "Metadata\Translator\ARCGIS2FGDC.xml"
            # Export the metadata for the dataset
            arcpy.ExportMetadata_conversion(dataset, translator, os.path.join(arcpy.env.scratchFolder, "Metadata.xml"))
            # Convert file to xml
            tree = ET.ElementTree(file=os.path.join(arcpy.env.scratchFolder, "Metadata.xml"))   
            # Import and reference the xml file
            root = tree.getroot()

            datasetDescription = "No Description"
            # Look at the metadata
            description = root.find("idinfo/descript")
            # If there are description values
            if description:
                # Look at the description xml element
                for child in root.find("idinfo/descript"):
                    # Get purpose
                    if (child.tag.lower() == "purpose"):
                        datasetDescription = child.text                

            # If any variables are none
            if (datasetDescription is None):
                datasetDescription = "No Description"
                
            # Write in dataset information
            row = [datasetName,datasetDescription,dataType,shapeType,spatialReference,versionedEnabled,archiveEnabled]
            datasetsWriter.writerow(row)

            with open(os.path.join(outputFolder, "Dataset_" + datasetName + ".csv"), 'wb') as file:
                singleDatasetWriter = csv.writer(file, delimiter=",")                                    
                singleDatasetWriter.writerow(singleDatasetHeaderRow)
                
                # Get a list of fields
                fields = arcpy.ListFields(dataset)
                
                # Iterate through the list of fields
                for field in fields:
                    fieldInfo = [field.name,field.aliasName,field.type,field.domain,field.isNullable,field.editable,field.length]
                    singleDatasetWriter.writerow(fieldInfo)

        # Get a list of domains on the geodatabase
        geodatabaseDomains = arcpy.da.ListDomains(geodatabase)      
        # For each of the domains
        for domain in geodatabaseDomains:
            domainName = domain.name
            domainDescription = domain.description
            domainType = domain.domainType
            domainFieldType = domain.type
            codedValues = domain.codedValues
            arcpy.AddMessage("Documenting domain - " + domainName + "...")            

            # Write in domain information
            row = [domainName,domainDescription,domainType,domainFieldType]
            domainsWriter.writerow(row)

            with open(os.path.join(outputFolder, "Domain_" + domainName + ".csv"), 'wb') as file:
                singleDomainWriter = csv.writer(file, delimiter=",")                                    
                singleDomainWriter.writerow(singleDomainHeaderRow)

                for codedValue in codedValues:
                    domainValue = codedValue
                    domainDescription = codedValues[codedValue]
                    domainInfo = [domainValue,domainDescription]
                    singleDomainWriter.writerow(domainInfo)

        # Get a list of relationship classes in the geodatabase
        # For each relationship class
        for dataset in arcpy.Describe(geodatabase).children:
            if dataset.datatype == "RelationshipClass":    
                rcName = dataset.name
                rcCardinality = dataset.cardinality
                rcOriginClassNames = dataset.originClassNames[0]
                rcDestinationClassNames = dataset.destinationClassNames[0]

                # Set the directory for the translator
                installDirectory = arcpy.GetInstallInfo("desktop")["InstallDir"]
                # Use the FGDC to get into clean xml format
                translator = installDirectory + "Metadata\Translator\ARCGIS2FGDC.xml"
                # Export the metadata for the dataset
                arcpy.ExportMetadata_conversion(dataset.catalogPath, translator, os.path.join(arcpy.env.scratchFolder, "Metadata.xml"))
                # Convert file to xml
                tree = ET.ElementTree(file=os.path.join(arcpy.env.scratchFolder, "Metadata.xml"))   
                # Import and reference the xml file
                root = tree.getroot()

                rcDescription = "No Description"
                # Look at the metadata
                description = root.find("idinfo/descript")
                # If there are description values
                if description:
                    # Look at the description xml element
                    for child in root.find("idinfo/descript"):
                        # Get purpose
                        if (child.tag.lower() == "purpose"):
                            rcDescription = child.text                

                # If any variables are none
                if (rcDescription is None):
                    rcDescription = "No Description"

                arcpy.AddMessage("Documenting relationship class - " + rcName + "...")  
                
                # Write in relationship class information
                row = [rcName,rcDescription,rcCardinality,rcOriginClassNames,rcDestinationClassNames]
                relationshipClassesWriter.writerow(row)
                
        # --------------------------------------- End of code --------------------------------------- #  
            
        # If called from gp tool return the arcpy parameter   
        if __name__ == '__main__':
            # Return the output if there is any
            if output:
                arcpy.SetParameterAsText(1, output)
        # Otherwise return the result          
        else:
            # Return the output if there is any
            if output:
                return output      
        # Logging
        if (enableLogging == "true"):
            # Log end of process
            logger.info("Process ended.")
            # Remove file handler and close log file            
            logging.FileHandler.close(logMessage)
            logger.removeHandler(logMessage)
        pass
    # If arcpy error
    except arcpy.ExecuteError:           
        # Build and show the error message
        errorMessage = arcpy.GetMessages(2)   
        arcpy.AddError(errorMessage)           
        # Logging
        if (enableLogging == "true"):
            # Log error          
            logger.error(errorMessage)
            # Log end of process
            logger.info("Process ended.")            
            # Remove file handler and close log file
            logging.FileHandler.close(logMessage)
            logger.removeHandler(logMessage)
        if (sendErrorEmail == "true"):
            # Send email
            sendEmail(errorMessage)
    # If python error
    except Exception as e:
        errorMessage = ""
        # Build and show the error message
        for i in range(len(e.args)):
            if (i == 0):
                errorMessage = unicode(e.args[i]).encode('utf-8')
            else:
                errorMessage = errorMessage + " " + unicode(e.args[i]).encode('utf-8')
        arcpy.AddError(errorMessage)              
        # Logging
        if (enableLogging == "true"):
            # Log error            
            logger.error(errorMessage)
            # Log end of process
            logger.info("Process ended.")            
            # Remove file handler and close log file
            logging.FileHandler.close(logMessage)
            logger.removeHandler(logMessage)
        if (sendErrorEmail == "true"):
            # Send email
            sendEmail(errorMessage)            
Exemplo n.º 28
0
OUTPUTDIR = os.path.abspath(r"Output")
print OUTPUTDIR
BASEGDBDIR = r"\\ontodar\URBIS\HSIP_GOLD_2015\Data\Infrastructure"

#set local variables
dir = arcpy.GetInstallInfo("desktop")["InstallDir"]
translator = dir + r"Metadata\Translator\ARCGIS2FGDC.xml"

# find all of the gdbs
for root, dirs, files in os.walk(BASEGDBDIR):
    for d in dirs:
        if d.endswith(".gdb"):
            gdboutputdir = op.join(OUTPUTDIR, d.split('.')[0])
            if not op.exists(gdboutputdir):
                os.mkdir(gdboutputdir)
            arcpy.env.workspace = op.join(root, d)
            print "Processing", op.join(root, d)
            for fds in arcpy.ListDatasets('', 'feature') + ['']:
                for fc in arcpy.ListFeatureClasses('', '', fds):
                    print "."
                    if not op.exists(
                            op.join(gdboutputdir, "{0}.xml".format(fc))):
                        print fc
                        print translator
                        print op.join(gdboutputdir, "{0}.xml".format(fc))
                        arcpy.ExportMetadata_conversion(
                            fc, translator,
                            op.join(gdboutputdir, "{0}.xml".format(fc)))

# In[ ]:
    # Transfer metadata from Current SDE Production feature class

    print("Setting arcdir")
    Arcdir = arcpy.GetInstallInfo("desktop")["InstallDir"]
    translator = Arcdir + "Metadata/Translator/ARCGIS2FGDC.xml"
    remove_geoprocess_xslt = Arcdir + "Metadata/Stylesheets/gpTools/remove geoprocessing history.xslt"
    remove_lcl_storage_xslt = Arcdir + "Metadata/Stylesheets/gpTools/remove local storage info.xslt"
    print("Arcdir set")

    print("Exporting previous cycle's POPS metadata")
    arcpy.env.workspace = current_meta_dir
    arcpy.env.overwriteOutput = True
    if arcpy.Exists(sde_pops_path):
        arcpy.ExportMetadata_conversion(
            sde_pops_path, translator,
            os.path.join(current_meta_dir, "nyc_pops_meta.xml"))
        print("Previous cycle's POPS metadata exported")
        arcpy.UpgradeMetadata_conversion(
            os.path.join(current_meta_dir, 'nyc_pops_meta.xml'),
            'FGDC_TO_ARCGIS')
        arcpy.XSLTransform_conversion(
            os.path.join(current_meta_dir, 'nyc_pops_meta.xml'),
            remove_geoprocess_xslt,
            os.path.join(current_meta_dir, 'nyc_pops_meta.xml'))

    # Convert csv to shapefile

    spatial_ref = config.get('PATHS', 'spatial_ref')

    for csv in os.listdir(current_csv_dir):
Exemplo n.º 30
0
 def copy_modify_fc(fc, gdb_path):
     arcpy.env.workspace = gdb_path
     arcpy.env.overwriteOutput = True
     desc = arcpy.Describe(fc)
     if hasattr(desc, "dataType"):
         print("Data set Data Type - {}".format(desc.dataType))
         if desc.dataType == "FeatureClass":
             print("Copying {} to SDE".format(fc))
             arcpy.env.workspace = sde_path
             arcpy.env.overwriteOutput = True
             arcpy.FeatureClassToFeatureClass_conversion(os.path.join(gdb_path, fc), sde_path, "CSCL_{}".format(fc))
             print("{} copy complete".format(fc))
             arcpy.ExportMetadata_conversion(os.path.join(sde_path, "CSCL_{}".format(fc)),
                                             translator,
                                             os.path.join(metadata_path, "{}.xml".format(fc)))
             print("Exporting metadata with geoprocessing history removed")
             arcpy.XSLTransform_conversion(os.path.join(metadata_path, "{}.xml".format(fc)),
                                           stylesheet,
                                           os.path.join(metadata_path, "{}_xslt.xml".format(fc)))
             print("Metadata exported")
             tree = ET.parse(os.path.join(metadata_path, "{}_xslt.xml".format(fc)))
             root = tree.getroot()
             print("Removing Publication Date since it is not currently maintained")
             for citeinfo in root.iter("citeinfo"):
                 for pubdate in citeinfo.findall("pubdate"):
                     citeinfo.remove(pubdate)
             print("Publication Date removed")
             print("Appending download date to metadata description")
             for descrip in root.iter("purpose"):
                 descrip.text = descrip.text + " Dataset Last Downloaded: {}".format(today_longform)
             tree.write(os.path.join(metadata_path, "{}_xslt_moded.xml".format(fc)))
             print("Download date appended to metadata description")
             print("Importing altered metadata to SDE")
             arcpy.MetadataImporter_conversion(os.path.join(metadata_path, "{}_xslt_moded.xml".format(fc)),
                                               os.path.join(sde_path, "CSCL_{}".format(fc)))
             print("Metadata imported")
             arcpy.UpgradeMetadata_conversion(os.path.join(sde_path, "CSCL_{}".format(fc)), "FGDC_TO_ARCGIS")
             print("Metadata upgraded")
         if desc.dataType == "Table":
             print("Copying {} to SDE".format(fc))
             arcpy.env.workspace = sde_path
             arcpy.env.overwriteOutput = True
             arcpy.TableToTable_conversion(os.path.join(gdb_path, fc), sde_path, "CSCL_{}".format(fc))
             print("{} copy complete".format(fc))
             arcpy.ExportMetadata_conversion(os.path.join(sde_path, "CSCL_{}".format(fc)),
                                             translator,
                                             os.path.join(metadata_path, "{}.xml".format(fc)))
             print("Exporting metadata with geoprocessing history removed")
             arcpy.XSLTransform_conversion(os.path.join(metadata_path, "{}.xml".format(fc)),
                                           stylesheet,
                                           os.path.join(metadata_path, "{}_xslt.xml".format(fc)))
             print("Metadata exported")
             tree = ET.parse(os.path.join(metadata_path, "{}_xslt.xml".format(fc)))
             root = tree.getroot()
             print("Removing Publication Date since it is not currently maintained")
             for citeinfo in root.iter("citeinfo"):
                 for pubdate in citeinfo.findall("pubdate"):
                     citeinfo.remove(pubdate)
             print("Publication Date removed")
             print("Appending download date to metadata description")
             for descrip in root.iter("purpose"):
                 descrip.text = descrip.text + " Dataset Last Downloaded: {}".format(today_longform)
             tree.write(os.path.join(metadata_path, "{}_xslt_moded.xml".format(fc)))
             print("Download date appended to metadata description")
             print("Importing altered metadata to SDE")
             arcpy.MetadataImporter_conversion(os.path.join(metadata_path, "{}_xslt_moded.xml".format(fc)),
                                               os.path.join(sde_path, "CSCL_{}".format(fc)))
             print("Metadata imported")
             arcpy.UpgradeMetadata_conversion(os.path.join(sde_path, "CSCL_{}".format(fc)), "FGDC_TO_ARCGIS")
             print("Metadata upgraded")