Example #1
0
def veg_metadata(path, template, place_dict, SITE):
    bname = os.path.split(t)[1]
    words = bname.split('_')
    if words[0] == 'protected':
        PROTECTED = True
        SLR = words[1]
        YEAR = words[2]
        TYPE = "Protected Vegetation"
    else:
        PROTECTED = False
        SLR = words[0]
        YEAR = words[1]
        TYPE = "Vegetation"
    arcpy.MetadataImporter_conversion(template, path)
    try:
        tree = ET.parse(path + '.xml')
    except IOError:
        print "File read error for ", path
        return -1
    root = tree.getroot()
    set_place_kwords(root, SITE)
    set_title(root, SITE, SLR, YEAR, TYPE)
    set_temp_kwords(root, YEAR)
    if PROTECTED:
        set_protected(root)
    try:
        tree.write(path + '.xml')
    except IOError:
        print "File write error for ", path
        return -1
    return 1
Example #2
0
def GetMetadataElementTree(dataset):
    """Creates and returns an ElementTree object from the specified
    dataset's metadata"""
    xmlfile = CreateDummyXMLFile()
    arcpy.MetadataImporter_conversion(dataset, xmlfile)
    tree = ElementTree()
    tree.parse(xmlfile)
    os.remove(xmlfile)
    return tree
Example #3
0
 def load(self, path):
     if os.path.isfile(path):
         tmpPath = path
     else:
         with tempfile.NamedTemporaryFile(mode='w',
                                          suffix='.xml',
                                          delete=False) as fout:
             fout.write('<metadata />')
             tmpPath = fout.name
         import arcpy
         arcpy.MetadataImporter_conversion(path, tmpPath)
     self.load_from_xml(tmpPath)
Example #4
0
    def export_featureclass(input_path, output_name):
        print("Upgrading downloaded metadata to ArcGIS standard")
        arcpy.UpgradeMetadata_conversion(input_path, 'FGDC_TO_ARCGIS')
        print("Downloaded metadata upgraded to ArcGIS standard")
        print("Overwriting original metadata with DCP standard")
        arcpy.MetadataImporter_conversion(
            os.path.join(template_path, '{}.xml'.format(output_name)),
            input_path)
        print("Original metadata overwritten")
        tree = ET.parse('{}.xml'.format(input_path))
        root = tree.getroot()
        for title in root.iter('title'):
            title.text = output_name.replace('_', ' ')
        for pubdate_fgdc in root.iter('pubdate'):
            pubdate_fgdc.text = last_update_date_str
        for descrip_fgdc in root.iter('abstract'):
            descrip_fgdc.text += ' Dataset last updated: {}. Dataset last downloaded: {}'.format(
                last_update_date_meta, today)

        print("Writing updated metadata to {}".format(input_path))
        tree.write('{}.xml'.format(input_path))
        print("Metadata update complete for {}".format(input_path))
        print("Upgrading metadata format for {}".format(input_path))
        arcpy.UpgradeMetadata_conversion(input_path, 'FGDC_TO_ARCGIS')
        print("Metadata format upgraded for {}".format(input_path))
        print("Exporting shapefile to SDE PROD as {}".format(output_name))
        arcpy.FeatureClassToFeatureClass_conversion(input_path, sde_path,
                                                    output_name)
        print("Removing local storage info")
        arcpy.XSLTransform_conversion(
            os.path.join(sde_path, output_name), xslt_storage,
            os.path.join(zip_dir_path, '{}_storage.xml'.format(input_path)))
        arcpy.XSLTransform_conversion('{}_storage.xml'.format(input_path),
                                      xslt_geoprocess,
                                      '{}_geoprocess.xml'.format(input_path))
        print("Importing final metadata to {}".format(output_name))
        arcpy.MetadataImporter_conversion(
            os.path.join(zip_dir_path, "{}_geoprocess.xml".format(input_path)),
            os.path.join(sde_path, output_name))
Example #5
0
def RemoveUnwantedMetadata(fgdb, fc):

    # set environments & variables
    arcpy.env.workspace = fgdb
    # the full path of feature class is passed, and we need the base name
    featureClass = os.path.basename(fc)
    # the below construction makes it installation independent
    installDir = arcpy.GetInstallInfo()['InstallDir']
    ssPath1 = 'Metadata\\Stylesheets\\gpTools\\remove geoprocessing history.xslt'
    ssPath2 = 'Metadata\\Stylesheets\\gpTools\\remove local storage info.xslt'

    xsltPath1 = installDir + ssPath1
    xsltPath2 = installDir + ssPath2

    # delete the output directory if it already exists so you don't have to overwrite
    outXml = 'c:\\XML_out'
    if os.path.exists(outXml):
        shutil.rmtree(outXml)
    os.mkdir(outXml)

    # Remove geoprocessing history and local machine name/paths

    # output xml for removing gp history
    nameXml1 = outXml + os.sep + str(featureClass) + ".xml"
    # output xml for removing local storage info
    nameXml2 = outXml + os.sep + str(featureClass) + "2.xml"

    try:
        arcpy.XSLTransform_conversion(featureClass, xsltPath1, nameXml1, "")
        arcpy.MetadataImporter_conversion(nameXml1, featureClass)
        arcpy.XSLTransform_conversion(featureClass, xsltPath2, nameXml2, "")
        arcpy.MetadataImporter_conversion(nameXml2, featureClass)
        return (True, "Succeeded")
    except Exception as e:
        error = "Failed: metadata removal for " + str(fc) + " " + str(e)
        return (False, error)
    finally:
        # clean up
        shutil.rmtree(outXml)
Example #6
0
 def save(self, path=None):
     if path is None: path = self.datasetPath
     if os.path.isfile(path) or (os.path.isdir(os.path.dirname(path))
                                 and path.endswith('.xml')):
         tmpPath = path
     else:
         with tempfile.NamedTemporaryFile(mode='w',
                                          suffix='.xml',
                                          delete=False) as fout:
             tmpPath = fout.name
     self.save_to_xml(tmpPath)
     if not os.path.isfile(path):
         import arcpy
         arcpy.MetadataImporter_conversion(tmpPath, path)
 def export_reduced_featureclass(input_path, output_name):
     print("Exporting Building Footprint - BIN only feature class to SDE PROD")
     if arcpy.Exists(input_path):
         print("Adding requisite fields to output feature class")
         fms = arcpy.FieldMappings()
         fm = arcpy.FieldMap()
         fm.addInputField(input_path, "BIN")
         fms.addFieldMap(fm)
         print("Requisite fields added to output feature class")
         print("Exporting reduced NYC Building Footprint Polygon feature class on SDE PROD")
         arcpy.FeatureClassToFeatureClass_conversion(input_path, sde_path, output_name, field_mapping=fms)
         print("Adding Index to BIN field")
         arcpy.AddIndex_management(input_path, ['BIN'], 'BIN_Index')
         print("Reduced NYC Building Footprint Polygon feature class exported to SDE PROD")
         arcpy.MetadataImporter_conversion(os.path.join(sde_path, 'NYC_Building_Footprints_Poly'),
                                           os.path.join(sde_path, output_name))
Example #8
0
def export_metadata():
    """Exports the feature class metadata to an xml file
    
    Returns:
        None
    """

    folder = 'metadata'
    name = get_dataset_filename()

    # Create a metadata folder in the temp directory if it does not exist
    temp_working_folder = os.path.join(temp_workspace, folder)
    create_folder(temp_working_folder, True)

    # Set the destinion of the metadata export
    source = staging_feature_class
    raw_metadata_export = os.path.join(temp_working_folder, name + "_raw.xml")

    # Export the metadata
    arcpy.env.workspace = temp_working_folder
    installDir = arcpy.GetInstallInfo("desktop")["InstallDir"]
    translator = installDir + "Metadata/Translator/ARCGIS2FGDC.xml"
    arcpy.ExportMetadata_conversion(source, translator, raw_metadata_export)

    # Process: XSLT Transformation to remove any sensitive info or format
    destination = os.path.join(temp_working_folder, name + ".xml")
    if os.path.exists(args.metadata_xslt):
        logger.info("Applying metadata XSLT: " + args.metadata_xslt)
        arcpy.XSLTransform_conversion(raw_metadata_export, args.metadata_xslt,
                                      destination, "")

        # Reimport the clean metadata into the FGDB
        logger.debug("Reimporting metadata to file geodatabase " + destination)
        arcpy.MetadataImporter_conversion(destination, staging_feature_class)
    else:
        # If no transformation exists, just rename and publish the raw metadata
        logger.warn("Metadata XSLT not found")
        os.rename(raw_metadata_export, destination)

    # Publish the metadata to the download folder
    publish_file(temp_working_folder, name + ".xml", "metadata")
Example #9
0
def probability_metadata(path, template, place_dict, SITE):
    bname = os.path.split(t)[1]
    words = bname.split('_')
    SLR = words[0]
    YEAR = words[1]
    TYPE = "Monte Carlo Probability"
    arcpy.MetadataImporter_conversion(template, path)
    try:
        tree = ET.parse(path + '.xml')
    except IOError:
        print "File read error for ", path
        return -1
    root = tree.getroot()
    set_place_kwords(root, SITE)
    set_title(root, SITE, SLR, YEAR, TYPE)
    set_temp_kwords(root, YEAR)
    try:
        tree.write(path + '.xml')
    except IOError:
        print "File write error for ", path
        return -1
    return 1
Example #10
0
# =============================================================================
# importType = arcpy.GetParameterAsText(2) # "FROM_ARCGIS"
# autoUpdate = arcpy.GetParameterAsText(3) # "ENABLED"
#
#
# targetGDB = r"C:\Users\stevenconnorg\Documents\knight-federal-solutions\AF_Installation_Feedback\DataPackage\NEW_CIP\GeoBASE_3101_CIP_FINAL_20180502.gdb"
# sourceGDB = r"C:\Users\stevenconnorg\Documents\knight-federal-solutions\AF_Installation_Feedback\DataPackage\NEW_CIP\GeoBASE_3101_CIP_FINAL_with_metadata.gdb"
# importType = "FROM_ARCGIS"
# autoUpdate = "ENABLED"
#
#
# =============================================================================
#image = r"C:\Users\stevenconnorg\Documents\knight-federal-solutions\AF_Installation_Feedback\DataPackage\thumbnail.
arcpy.env.workspace = targetGDB
FDSs = arcpy.ListDatasets()
arcpy.MetadataImporter_conversion(source=sourceGDB, target=targetGDB)

if not FDSs:
    FCs = arcpy.ListFeatureClasses()
    if not FCs:
        arcpy.AddMessage("No feature classes found in " +
                         os.path.join(sourceGDB) + " to import!")
    else:
        for fc in FCs:
            if arcpy.Exists(os.path.join(sourceGDB, fc)):
                arcpy.AddMessage("Importing " + os.path.join(sourceGDB, fc) +
                                 " to " + fc)
                arcpy.MetadataImporter_conversion(
                    source=os.path.join(sourceGDB, fc),
                    target=os.path.join(targetGDB, fc))
                arcpy.SynchronizeMetadata_conversion(source=os.path.join(
Example #11
0
def UpdateFCMetadata(fcPathName, metaConnStr):
    # Set connection strings and get other settings from configuration file
    parser = ConfigParser.SafeConfigParser()
    parser.read('LibMgr.ini')
    disclaimerFile = parser.get('Metadata', 'disclaimerFile')
    idCredit = parser.get('Metadata', 'idCredit')
    constraint_useLimit = parser.get('Metadata', 'constraint_useLimit')
    organization = parser.get('Metadata', 'organization')
    timeperd_current = parser.get('Metadata', 'timeperd_current')
    addrtype = parser.get('Metadata', 'addrtype')
    address = parser.get('Metadata', 'address')
    city = parser.get('Metadata', 'city')
    state = parser.get('Metadata', 'state')
    zip = parser.get('Metadata', 'zip')
    country = parser.get('Metadata', 'country')
    phone = parser.get('Metadata', 'phone')
    librarian = parser.get('Metadata', 'librarian')
    thumbnailsPath = parser.get('Metadata', 'thumbnailsPath')

    num_elements = 0
    featName = fcPathName.split('.')[-1]
    conn = odbcconn(metaConnStr)
    metaqry = 'SELECT [FULL_NAME],[COVER_NAME],[ABSTRACT],[UPDATEDATE],[OWNERNAME]' +\
                    ',[PATH],[METAACCESS],[ONMAINT],[MAINTFREQ],[KNOWNERROR],[LINEAGE]' +\
                    ',[DOMAIN],[RECTIFIED],[MAINTORG],[MAINTDESC],[LIBINPUT],[SOURCNAME]' +\
                    ',[SOURCCONTACT],[SOURCDOCNAME],[SOURCDATE],[SOURCSCALE],[SOURCFORMAT]' +\
                    ',[SOUR2NAME],[SOUR2CONTACT],[SOUR2DOCNAME],[SOUR2DATE],[SOUR2SCALE]' +\
                    ',[SOUR2FORMAT],[ONMG],[MGLAYERNAME],[MGSCALELOW],[MGSCALEHIGH] ' +\
                    'FROM [dbo].[metadata] WHERE [COVER_NAME] = \'' + featName + '\''
    df_FCMeta = readsqlqry(
        metaqry, conn)  # pandas 0.19.1 load query result to pandas dataframe
    df_row = df_FCMeta.iloc[0]

    qry = 'SELECT [FieldName] AS \'ndx\',[FieldName],[Description] FROM [dbo].[master_metafield] WHERE [CoverName] = \'' + featName + '\''
    df_fieldMeta = readsqlqry(
        qry, conn,
        index_col='ndx')  # pandas 0.19.1 load query result to pandas dataframe

    arcpy.env.overwriteOutput = True

    #    install location
    dir = arcpy.GetInstallInfo('desktop')['InstallDir']

    #    stylesheet to use
    copy_xslt = r'{0}'.format(
        os.path.join(dir, 'Metadata\Stylesheets\gpTools\exact copy of.xslt'))

    #    temporary XML file
    xmlfile = arcpy.CreateScratchName('.xml',
                                      workspace=arcpy.env.scratchFolder)

    # export xml
    arcpy.XSLTransform_conversion(fcPathName, copy_xslt, xmlfile, '')

    # read in XML
    tree = ET.parse(xmlfile)
    root = tree.getroot()

    # build the supplemental info string
    sSuppInfo = BuildMetadataSupString(df_row)

    # get the dataIdInfo element
    dataIdInfoEl = root.find('dataIdInfo')

    # dataIdInfo purpose element
    subEl = ET.SubElement(dataIdInfoEl, 'idPurp')
    subEl.text = df_row['FULL_NAME']
    num_elements += 1

    # dataIdInfo abstract element
    subEl = ET.SubElement(dataIdInfoEl, 'idAbs')
    subEl.text = df_row['ABSTRACT'] + sSuppInfo
    num_elements += 1

    # dataIdInfo access constraint element
    subEl = ET.SubElement(dataIdInfoEl, 'accconst')
    subEl.text = df_row['METAACCESS']
    num_elements += 1

    # dataIdInfo credit element
    subEl = ET.SubElement(dataIdInfoEl, 'idCredit')
    subEl.text = idCredit
    num_elements += 1

    # dataIdInfo maintenance frequency element
    subEl = ET.SubElement(dataIdInfoEl, 'resMaint')
    subEl = ET.SubElement(subEl, 'usrDefFreq')
    subEl = ET.SubElement(subEl, 'duration')
    subEl.text = df_row['MAINTFREQ']
    num_elements += 1

    # dataIdInfo use limit element
    subEl = ET.SubElement(dataIdInfoEl, 'resConst')
    subEl = ET.SubElement(subEl, 'Consts')
    subEl = ET.SubElement(subEl, 'useLimit')
    subEl.text = constraint_useLimit
    num_elements += 1

    # dataIdInfo keyword elements obtained from FULL_NAME
    searchKeysEl = ET.SubElement(dataIdInfoEl, 'searchKeys')
    keywords = df_row['FULL_NAME'].split(' ')
    for keyword in keywords:
        newKeyEl = ET.SubElement(searchKeysEl, 'keyword')
        newKeyEl.text = keyword
        num_elements += 1

    # create the idInfo element
    idInfoEl = ET.SubElement(root, 'idInfo')

    # idinfo use constraint element
    with open(disclaimerFile,
              'r') as file:  # read the disclaimer text file to a string
        disclaimer = file.read()
    subEl = ET.SubElement(idInfoEl, 'useconst')
    subEl.text = disclaimer
    num_elements += 1

    # idinfo citation onlink element
    # remove the server name portion of the path
    path = df_row['PATH'].split('\\')[2]
    pathRoot = '\\\\' + path + '\\'
    onlink = df_row['PATH'].replace(pathRoot, '')

    subEl = ET.SubElement(idInfoEl, 'citation')
    citeinfoEl = ET.SubElement(subEl, 'citeinfo')
    subEl = ET.SubElement(subEl, 'onlink')
    subEl.text = onlink
    num_elements += 1

    # idinfo citation origin element
    subEl = ET.SubElement(citeinfoEl, 'origin')
    subEl.text = organization
    num_elements += 1

    # idinfo citation pubdate element
    subEl = ET.SubElement(citeinfoEl, 'pubdate')
    subEl.text = datetime.datetime.now().strftime("%B %d, %Y")
    num_elements += 1

    # create the idInfo timeperd element
    timeperdEl = ET.SubElement(idInfoEl, 'timeperd')

    # idinfo timeperd update date comment element
    subEl = ET.SubElement(timeperdEl, 'current')
    subEl.text = timeperd_current
    num_elements += 1

    # idinfo timeperd update date element
    subEl = ET.SubElement(timeperdEl, 'timeinfo')
    subEl = ET.SubElement(subEl, 'sngdate')
    subEl = ET.SubElement(subEl, 'caldate')
    subEl.text = df_row['UPDATEDATE']
    num_elements += 1

    # create the idInfo descript element
    descriptEl = ET.SubElement(idInfoEl, 'descript')

    # idinfo descript abstract element
    subEl = ET.SubElement(descriptEl, 'abstract')
    subEl.text = df_row['ABSTRACT']
    num_elements += 1

    # idinfo descript purpose element
    subEl = ET.SubElement(descriptEl, 'purpose')
    subEl.text = df_row['FULL_NAME']
    num_elements += 1

    # idinfo descript supplinf element
    subEl = ET.SubElement(descriptEl, 'supplinf')
    subEl.text = sSuppInfo
    num_elements += 1

    # idinfo keywords themekey element
    subEl = ET.SubElement(idInfoEl, 'keywords')
    subEl = ET.SubElement(subEl, 'theme')
    subEl = ET.SubElement(subEl, 'themekey')
    subEl.text = df_row['FULL_NAME']
    num_elements += 1

    # create the idInfo point of contact elements
    subEl = ET.SubElement(idInfoEl, 'ptcontac')
    cntinfoEl = ET.SubElement(subEl, 'cntinfo')
    cntperpEl = ET.SubElement(cntinfoEl, 'cntperp')
    cntaddrEl = ET.SubElement(cntinfoEl, 'cntaddr')
    cntvoiceEl = ET.SubElement(cntinfoEl, 'cntvoice')

    # idinfo point of contact person element
    subEl = ET.SubElement(cntperpEl, 'cntper')
    subEl.text = df_row['OWNERNAME']
    num_elements += 1

    # idinfo point of contact organization element
    subEl = ET.SubElement(cntperpEl, 'cntorg')
    subEl.text = organization
    num_elements += 1

    # idinfo point of contact address type element
    subEl = ET.SubElement(cntaddrEl, 'addrtype')
    subEl.text = addrtype
    num_elements += 1

    # idinfo point of contact address element
    subEl = ET.SubElement(cntaddrEl, 'address')
    subEl.text = address
    num_elements += 1

    # idinfo point of contact city element
    subEl = ET.SubElement(cntaddrEl, 'city')
    subEl.text = city
    num_elements += 1

    # idinfo point of contact state element
    subEl = ET.SubElement(cntaddrEl, 'state')
    subEl.text = state
    num_elements += 1

    # idinfo point of contact zip element
    subEl = ET.SubElement(cntaddrEl, 'postal')
    subEl.text = zip
    num_elements += 1

    # idinfo point of contact country element
    subEl = ET.SubElement(cntaddrEl, 'country')
    subEl.text = country
    num_elements += 1

    # idinfo point of contact phone element
    subEl = ET.SubElement(cntinfoEl, 'cntvoice')
    subEl.text = phone
    num_elements += 1

    # create the metainfo point of contact elements
    metainfoEl = ET.SubElement(root, 'metainfo')
    subEl = ET.SubElement(metainfoEl, 'metc')
    cntinfoEl = ET.SubElement(subEl, 'cntinfo')
    cntorgpEl = ET.SubElement(cntinfoEl, 'cntorgp')
    cntaddrEl = ET.SubElement(subEl, 'cntaddr')

    # metainfo point of contact person element
    subEl = ET.SubElement(cntorgpEl, 'cntper')
    subEl.text = librarian
    num_elements += 1

    # metainfo point of contact organization element
    subEl = ET.SubElement(cntorgpEl, 'cntorg')
    subEl.text = df_row['OWNERNAME'] + '\n' + organization
    num_elements += 1

    # metainfo point of contact address type element
    subEl = ET.SubElement(cntaddrEl, 'addrtype')
    subEl.text = addrtype
    num_elements += 1

    # metainfo point of contact address element
    subEl = ET.SubElement(cntaddrEl, 'address')
    subEl.text = address
    num_elements += 1

    # metainfo point of contact city element
    subEl = ET.SubElement(cntaddrEl, 'city')
    subEl.text = city
    num_elements += 1

    # metainfo point of contact state element
    subEl = ET.SubElement(cntaddrEl, 'state')
    subEl.text = state
    num_elements += 1

    # metainfo point of contact zip element
    subEl = ET.SubElement(cntaddrEl, 'postal')
    subEl.text = zip
    num_elements += 1

    # metainfo point of contact country element
    subEl = ET.SubElement(cntaddrEl, 'country')
    subEl.text = country
    num_elements += 1

    # metainfo point of contact phone element
    subEl = ET.SubElement(cntinfoEl, 'cntvoice')
    subEl.text = phone
    num_elements += 1

    # idinfo maintenance status element
    statusEl = ET.SubElement(idInfoEl, 'status')
    subEl = ET.SubElement(statusEl, 'progress')
    if df_row['ONMAINT'] and df_row['ONMAINT'].upper() == 'Y':
        subEl.text = 'Maintained'
        num_elements += 1
    else:
        subEl.text = 'Not Maintained'
        num_elements += 1

    # idinfo maintenance frequency element
    subEl = ET.SubElement(statusEl, 'update')
    subEl.text = df_row['MAINTFREQ']
    num_elements += 1

    # add descriptions from library metadata table master_metafields to the feature class fields
    attrEls = root.findall('eainfo/detailed/attr')
    for attrEl in attrEls:  # iterate feature class fields
        lablEl = attrEl.find('attrlabl')  # find the attribute name element
        if lablEl is not None:  # for unknown reason, the root.findall sometimes gets attributes that are empty
            fldname = lablEl.text
            try:
                descrip = df_fieldMeta.loc[fldname][
                    'Description']  # get the field description from the dataframe
            except Exception as e:
                #print('\tNo description for field ' + fldname)
                pass  # ignore error returned by dataframe loc. Field not in field metadata table.
            else:
                subEl = ET.SubElement(attrEl,
                                      'attrdef')  #field description element
                subEl.text = descrip
                num_elements += 1
                subEl = ET.SubElement(
                    attrEl, 'attrdefs')  #field description source element
                subEl.text = 'Pima County'
                num_elements += 1

    # set metadata thumbnail
    jpgFile = thumbnailsPath + '/' + featName + '.jpg'
    if os.path.exists(jpgFile):
        with open(jpgFile, "rb") as img_file:
            strEncoded = base64.b64encode(img_file.read())

        attrib = {'EsriPropertyType': 'PictureX'}
        subEl = ET.SubElement(root, 'Binary')
        subEl = ET.SubElement(subEl, 'Thumbnail')
        subEl = ET.SubElement(subEl, 'Data', attrib)
        subEl.text = strEncoded
        num_elements += 1

    if num_elements > 0:
        # save modifications to XML
        try:
            tree.write(xmlfile)
            arcpy.MetadataImporter_conversion(xmlfile, fcPathName)
        except Exception as e:
            print(e.message)
    else:
        print('No changes to save')
        x.text = production_date
    tree.write(
        os.path.join(current_meta_dir,
                     "nyc_pops_{}meta.xml".format(production_date)))

    # Remove original metadata xml

    arcpy.Delete_management(os.path.join(current_meta_dir,
                                         'nyc_pops_meta.xml'))

    # Update Bytes shapefile metadata

    for shp in os.listdir(current_shp_dir):
        if shp.endswith('.shp') and 'nycpops' in shp:
            arcpy.MetadataImporter_conversion(
                os.path.join(current_meta_dir,
                             "nyc_pops_{}meta.xml".format(production_date)),
                os.path.join(current_shp_dir, shp))

    # Transfer generated shapefile to SDE Production with updated metadata

    for f in os.listdir(current_shp_dir):
        if f.endswith('.shp'):
            print("Copying {} to SDE Production".format(f))
            if arcpy.Exists(sde_pops_path
                            ) and not arcpy.Exists(sde_pops_replacement_path):
                print(
                    "DCP_MN_POPS already exists on PROD. Copying {} to SDE PROD as DCP_MN_POPS1."
                    .format(f))
                arcpy.FeatureClassToFeatureClass_conversion(
                    os.path.join(current_shp_dir, f), sde_path, 'DCP_MN_POPS1')
            if arcpy.Exists(sde_pops_path) and arcpy.Exists(
Example #13
0
 def copy_modify_fc(fc, gdb_path):
     arcpy.env.workspace = gdb_path
     arcpy.env.overwriteOutput = True
     desc = arcpy.Describe(fc)
     if hasattr(desc, "dataType"):
         print("Data set Data Type - {}".format(desc.dataType))
         if desc.dataType == "FeatureClass":
             print("Copying {} to SDE".format(fc))
             arcpy.env.workspace = sde_path
             arcpy.env.overwriteOutput = True
             arcpy.FeatureClassToFeatureClass_conversion(os.path.join(gdb_path, fc), sde_path, "CSCL_{}".format(fc))
             print("{} copy complete".format(fc))
             arcpy.ExportMetadata_conversion(os.path.join(sde_path, "CSCL_{}".format(fc)),
                                             translator,
                                             os.path.join(metadata_path, "{}.xml".format(fc)))
             print("Exporting metadata with geoprocessing history removed")
             arcpy.XSLTransform_conversion(os.path.join(metadata_path, "{}.xml".format(fc)),
                                           stylesheet,
                                           os.path.join(metadata_path, "{}_xslt.xml".format(fc)))
             print("Metadata exported")
             tree = ET.parse(os.path.join(metadata_path, "{}_xslt.xml".format(fc)))
             root = tree.getroot()
             print("Removing Publication Date since it is not currently maintained")
             for citeinfo in root.iter("citeinfo"):
                 for pubdate in citeinfo.findall("pubdate"):
                     citeinfo.remove(pubdate)
             print("Publication Date removed")
             print("Appending download date to metadata description")
             for descrip in root.iter("purpose"):
                 descrip.text = descrip.text + " Dataset Last Downloaded: {}".format(today_longform)
             tree.write(os.path.join(metadata_path, "{}_xslt_moded.xml".format(fc)))
             print("Download date appended to metadata description")
             print("Importing altered metadata to SDE")
             arcpy.MetadataImporter_conversion(os.path.join(metadata_path, "{}_xslt_moded.xml".format(fc)),
                                               os.path.join(sde_path, "CSCL_{}".format(fc)))
             print("Metadata imported")
             arcpy.UpgradeMetadata_conversion(os.path.join(sde_path, "CSCL_{}".format(fc)), "FGDC_TO_ARCGIS")
             print("Metadata upgraded")
         if desc.dataType == "Table":
             print("Copying {} to SDE".format(fc))
             arcpy.env.workspace = sde_path
             arcpy.env.overwriteOutput = True
             arcpy.TableToTable_conversion(os.path.join(gdb_path, fc), sde_path, "CSCL_{}".format(fc))
             print("{} copy complete".format(fc))
             arcpy.ExportMetadata_conversion(os.path.join(sde_path, "CSCL_{}".format(fc)),
                                             translator,
                                             os.path.join(metadata_path, "{}.xml".format(fc)))
             print("Exporting metadata with geoprocessing history removed")
             arcpy.XSLTransform_conversion(os.path.join(metadata_path, "{}.xml".format(fc)),
                                           stylesheet,
                                           os.path.join(metadata_path, "{}_xslt.xml".format(fc)))
             print("Metadata exported")
             tree = ET.parse(os.path.join(metadata_path, "{}_xslt.xml".format(fc)))
             root = tree.getroot()
             print("Removing Publication Date since it is not currently maintained")
             for citeinfo in root.iter("citeinfo"):
                 for pubdate in citeinfo.findall("pubdate"):
                     citeinfo.remove(pubdate)
             print("Publication Date removed")
             print("Appending download date to metadata description")
             for descrip in root.iter("purpose"):
                 descrip.text = descrip.text + " Dataset Last Downloaded: {}".format(today_longform)
             tree.write(os.path.join(metadata_path, "{}_xslt_moded.xml".format(fc)))
             print("Download date appended to metadata description")
             print("Importing altered metadata to SDE")
             arcpy.MetadataImporter_conversion(os.path.join(metadata_path, "{}_xslt_moded.xml".format(fc)),
                                               os.path.join(sde_path, "CSCL_{}".format(fc)))
             print("Metadata imported")
             arcpy.UpgradeMetadata_conversion(os.path.join(sde_path, "CSCL_{}".format(fc)), "FGDC_TO_ARCGIS")
             print("Metadata upgraded")
Example #14
0
sql = '''select owner, table_name from layers order by owner, table_name'''
query = conn.execute(sql)

for x in query:
    fc = str(x[0]) + '.' + str(x[1])
    print(fc)
    acct = x[0]
    conn_file = 'Database Connections' + os.sep + 'GIS_' + str(acct) + '.sde'
    print(conn_file)
    try:
        nameXml1 = outXML + os.sep + x[1] + '1.xml'
        nameXml2 = outXML + os.sep + x[1] + '2.xml'
        nameXml3 = outXML + os.sep + x[1] + '3.xml'
        arcpy.XSLTransform_conversion(conn_file + os.sep + x[1], sspath1,
                                      nameXml1, '')
        arcpy.MetadataImporter_conversion(nameXml1, conn_file + os.sep + x[1])
        arcpy.XSLTransform_conversion(conn_file + os.sep + x[1], sspath2,
                                      nameXml2, '')
        arcpy.MetadataImporter_conversion(nameXml2, conn_file + os.sep + x[1])
        arcpy.XSLTransform_conversion(conn_file + os.sep + x[1], sspath3,
                                      nameXml3, '')
        arcpy.MetadataImporter_conversion(nameXml3, conn_file + os.sep + x[1])
        print('complete')
    except:
        print('something broke...')
        continue

shutil.rmtree(outXML)

total = datetime.now() - start
Example #15
0
    for element in iterator:
        new_sub_element = ET.SubElement(element, 'keyword')
        new_sub_element.text = "Protected"
        new_sub_element = ET.SubElement(element, 'keyword')
        new_sub_element.text = "Developed Land"
    iterator = root.getiterator('themeKeys')
    for element in iterator:
        new_sub_element = ET.SubElement(element, 'keyword')
        new_sub_element.text = "Protect Developed Land"


#######################################################################################################################
stime = time.clock()

place_keyword_dict = getKeywords(place_name_path)
arcpy.MetadataImporter_conversion(template_path, path)

tree = ET.parse(path + '.xml')
root = tree.getroot()
iterator = root.getiterator('placeKeys')
for element in iterator:
    for sub_element in element.findall('keyword'):
        print sub_element.tag, sub_element.text
set_place_kwords(root, SITE)  #set_plcae() function call
print "again"

iterator = root.getiterator('placeKeys')
for element in iterator:
    for sub_element in element.findall('keyword'):
        print sub_element.tag, sub_element.text
    def export_featureclass(input_path, output_name, modified_path):
        '''
        Index BIN field, create new double field called PLUTO_BBL populate new field with BBL values with conditionals
        accounting for incorrect BBL values (either short or missing) and reorder output tables to include new field
        within the previous standard
        '''
        print("Creating PLUTO BBL field")
        arcpy.AddField_management(input_path, 'PLUTO_BBL', 'DOUBLE')
        print("PLUTO BBL field created")
        cursor = arcpy.da.UpdateCursor(input_path, ['BASE_BBL', 'MPLUTO_BBL', 'PLUTO_BBL', 'BIN'])
        for row in cursor:
            print("Parsing BASE_BBLS: {} and MPLUTO_BBLS: {}".format(row[0], row[1]))
            if len(row[0]) != 10:
                error_list.add("Short BBL. BASE_BBL = {} ; MPLUTO_BBL = {} ; BIN = {}".format(row[0], row[1], row[3]))
            if row[1].isspace() == True:
                if row[0].isspace() == True:
                    error_list.add("Missing BBL. BIN = {}".format(row[3]))
                    continue
                if row[0].isspace() == False and r"`" not in row[0]:
                    row[2] = float(row[0])
                    cursor.updateRow(row)
                    continue
                '''
                Case where ` character is included in BBL value. Interim value of 1 to replace this character until I can confirm with Matt.
                This would create the correct BBL based on DOB Property Profile Overview lookup information
                '''
                if r"`" in row[0]:
                    new_bbl = row[0].replace("`", "1")
                    row[2] = float(new_bbl)
                    cursor.updateRow(row)
                    continue
                else:
                    row[2] = float(row[0])
                    cursor.updateRow(row)
                    continue

            if row[1] == r'':
                if row[0] == r'':
                    error_list.add("Missing BBL. BIN = {}".format(row[3]))
                if r"`" in row[0]:
                    new_bbl = row[0].replace("`", "1")
                    row[2] = float(new_bbl)
                    cursor.updateRow(row)
                    continue
                else:
                    row[2] = float(row[0])
                    cursor.updateRow(row)
                    continue
            else:
                row[2] = float(row[1])
                cursor.updateRow(row)
                continue

        print("Creating field map in order to re-order the export tables")
        fieldMap = arcpy.FieldMappings()
        fieldMap.addTable(input_path)
        newFieldMap = arcpy.FieldMappings()
        print("Field mapping created")

        print("Adding fields to new field map")
        newFieldMap.addFieldMap(fieldMap.getFieldMap(fieldMap.findFieldMapIndex('NAME')))
        newFieldMap.addFieldMap(fieldMap.getFieldMap(fieldMap.findFieldMapIndex('BIN')))
        newFieldMap.addFieldMap(fieldMap.getFieldMap(fieldMap.findFieldMapIndex('CNSTRCT_YR')))
        newFieldMap.addFieldMap(fieldMap.getFieldMap(fieldMap.findFieldMapIndex('LSTMODDATE')))
        newFieldMap.addFieldMap(fieldMap.getFieldMap(fieldMap.findFieldMapIndex('LSTSTATYPE')))
        newFieldMap.addFieldMap(fieldMap.getFieldMap(fieldMap.findFieldMapIndex('DOITT_ID')))
        newFieldMap.addFieldMap(fieldMap.getFieldMap(fieldMap.findFieldMapIndex('HEIGHTROOF')))
        newFieldMap.addFieldMap(fieldMap.getFieldMap(fieldMap.findFieldMapIndex('FEAT_CODE')))
        newFieldMap.addFieldMap(fieldMap.getFieldMap(fieldMap.findFieldMapIndex('GROUNDELEV')))
        newFieldMap.addFieldMap(fieldMap.getFieldMap(fieldMap.findFieldMapIndex('BASE_BBL')))
        newFieldMap.addFieldMap(fieldMap.getFieldMap(fieldMap.findFieldMapIndex('MPLUTO_BBL')))
        newFieldMap.addFieldMap(fieldMap.getFieldMap(fieldMap.findFieldMapIndex('PLUTO_BBL')))
        newFieldMap.addFieldMap(fieldMap.getFieldMap(fieldMap.findFieldMapIndex('GEOMSOURCE')))
        print("All fields added to field map")

        print("Exporting as modified shapefile in temporary directory")
        arcpy.FeatureClassToFeatureClass_conversion(in_features=input_path,
                                                    out_path=zip_dir_path,
                                                    out_name=modified_path.split('.')[0],
                                                    field_mapping=newFieldMap)
        print("Modified shapefile exported")

        print("Upgrading downloaded metadata to ArcGIS standard")
        arcpy.env.workspace = zip_dir_path
        arcpy.env.overwriteOutput = True
        metadata_path = os.path.join(zip_dir_path, modified_path)
        arcpy.UpgradeMetadata_conversion(metadata_path, 'FGDC_TO_ARCGIS')
        print("Downloaded metadata upgraded to ArcGIS standard")
        print("Overwriting original metadata with DCP standard")
        arcpy.MetadataImporter_conversion(os.path.join(template_path, '{}.xml'.format(output_name)),
                                          metadata_path)
        print("Original metadata overwritten")
        tree = ET.parse('{}.xml'.format(metadata_path))
        root = tree.getroot()
        for title in root.iter('title'):
            title.text = output_name.replace('_', ' ')
        for pubdate_fgdc in root.iter('pubdate'):
            pubdate_fgdc.text = last_update_date_str
        for descrip_fgdc in root.iter('abstract'):
            descrip_fgdc.text += ' Dataset last updated: {}. Dataset last downloaded: {}'.format(
                last_update_date_meta, today)

        print("Writing updated metadata to {}".format(metadata_path))
        tree.write('{}.xml'.format(metadata_path))
        print("Metadata update complete for {}".format(metadata_path))
        print("Upgrading metadata format for {}".format(metadata_path))
        arcpy.UpgradeMetadata_conversion(metadata_path, 'FGDC_TO_ARCGIS')
        print("Metadata format upgraded for {}".format(metadata_path))

        arcpy.env.workspace = sde_path
        arcpy.env.overwriteOutput = True

        print("Exporting shapefile to SDE PROD as {}".format(output_name))
        arcpy.FeatureClassToFeatureClass_conversion(metadata_path, sde_path, output_name)
        print("Removing local storage info")
        print("Adding index to BIN field")
        arcpy.AddIndex_management(os.path.join(sde_path, output_name), ['BIN'], 'BIN_Index')
        print("Index added to BIN field")
        print("Adding index to PLUTO_BBL field")
        arcpy.AddIndex_management(os.path.join(sde_path, output_name), ['PLUTO_BBL'], 'PLUTO_BBL_Index')
        print("Index added to PLUTO_BBL field")
        arcpy.XSLTransform_conversion(os.path.join(sde_path, output_name),
                                      xslt_storage,
                                      os.path.join(zip_dir_path, '{}_storage.xml'.format(modified_path.split('.')[0])))
        arcpy.XSLTransform_conversion(os.path.join(zip_dir_path, '{}_storage.xml'.format(modified_path.split('.')[0])),
                                      xslt_geoprocess,
                                      os.path.join(zip_dir_path, '{}_geoprocess.xml'.format(modified_path.split('.')[0])))
        print("Importing final metadata to {}".format(output_name))
        arcpy.MetadataImporter_conversion(os.path.join(zip_dir_path, "{}_geoprocess.xml".format(modified_path.split('.')[0])),
                                          os.path.join(sde_path, output_name))
Example #17
0
arcpy.env.workspace = gdb
FDSs = arcpy.ListDatasets()

if not FDSs:
    print "not fds"
    FCs = arcpy.ListFeatureClasses()
    if not FCs:
        for fc in FCs:
            inFile = glob.glob(inMetadataDir + "/" + fc + "*")
            if not inFile:
                arcpy.AddMessage("No metadata found for " + fc +
                                 "...skipping!")
            else:
                arcpy.AddMessage("Importing " + os.path.basename(inFile[0]) +
                                 " to " + fc)
                arcpy.MetadataImporter_conversion(source=inFile[0],
                                                  target=os.path.join(gdb, fc))
                arcpy.SynchronizeMetadata_conversion(source=os.path.join(
                    gdb, fc),
                                                     synctype="ALWAYS")
                #arcpy.ImportMetadata_conversion(Source_Metadata = inFile[0], Import_Type=importType, Target_Metadata = os.path.join(gdb,fc), Enable_automatic_updates=autoUpdate)

else:
    for fds in FDSs:
        print fds
        inFile = glob.glob(inMetadataDir + "/" + fds + ".xml")
        if not inFile:
            arcpy.AddMessage("No metadata found for " + fds + "...skipping!")
        else:
            arcpy.AddMessage("Importing " + os.path.basename(inFile[0]) +
                             " to " + fds)
            arcpy.MetadataImporter_conversion(source=inFile[0],
Example #18
0
def set_protected(xml_root):
	iterator = xml_root.getiterator('searchKeys')
	for element in iterator:
		new_sub_element = ET.SubElement(element,'keyword')
		new_sub_element.text="Protected"
		new_sub_element = ET.SubElement(element,'keyword')
		new_sub_element.text="Developed Land"
	iterator = xml_root.getiterator('themeKeys')
	for element in iterator:
		new_sub_element = ET.SubElement(element,'keyword')
		new_sub_element.text="Protect Developed Land"
###############################################    Main   #############################################################
place_keyword_dict=getKeywords(place_name_path)
###################### veg #################################
path=os.path.join(ws,file_list[0])
arcpy.MetadataImporter_conversion(veg_template_path,path)
try:
	tree = ET.parse(path+'.xml')
except IOError:
	print "File read error for ",path
root = tree.getroot()
set_place_kwords(root,SITE)
set_title(root,SITE,"Vegetation","Input","2008")
try:
	tree.write(path+'.xml')
except IOError:
	print "File write error for ",path
###################### elevation ############################
path=os.path.join(ws,file_list[1])
arcpy.MetadataImporter_conversion(elev_template_path,path)
try:
Example #19
0
def write_metadata(input_items, template_xml, xslt_file, summary, description,
                   tags, data_credits, use_constraints, overwrite,
                   token_header):
    """Writes metadata."""
    updated = 0
    errors = 0
    skipped = 0
    global processed_count

    for item in input_items:
        try:
            id = item[1]
            path = item[0]
            # Temporary XML file
            temp_xml = tempfile.NamedTemporaryFile(suffix='.xml',
                                                   delete=True).name

            # Export xml
            try:
                arcpy.XSLTransform_conversion(path, xslt_file, temp_xml)
            except arcpy.ExecuteError:
                src_xml = os.path.join(
                    arcpy.Describe(path).path,
                    '{0}.xml'.format(os.path.basename(path)))
                shutil.copyfile(template_xml, src_xml)
                arcpy.XSLTransform_conversion(src_xml, xslt_file, temp_xml)

            # Read in XML
            tree = eTree.parse(temp_xml)
            root = tree.getroot()
            changes = 0

            # ISO allows many dataIdInfo groups; ArcGIS generally supports only one.
            data_id_elements = root.findall(".//dataIdInfo")
            if not data_id_elements:
                data_id_elements = [eTree.SubElement(root, 'dataIdInfo')]

            for data_id_element in data_id_elements:

                # Write summary.
                summary_element = root.findall(".//idPurp")
                if not summary_element:
                    summary_element = eTree.SubElement(data_id_element,
                                                       'idPurp')
                    summary_element.text = summary
                    changes += 1
                else:
                    for element in summary_element:
                        if summary and (overwrite or element.text is None):
                            element.text = summary
                            changes += 1

                # Write description.
                description_element = root.findall(".//idAbs")
                if not description_element:
                    description_element = eTree.SubElement(
                        data_id_element, 'idAbs')
                    description_element.text = description
                    changes += 1
                else:
                    for element in description_element:
                        if description and (overwrite or element.text is None):
                            element.text = description
                            changes += 1

                # Write tags.
                tags = task_utils.get_unique_strings(tags)
                search_keys = root.findall(".//searchKeys")
                if not search_keys:
                    search_element = eTree.SubElement(data_id_element,
                                                      'searchKeys')
                    for tag in tags:
                        new_tag = eTree.SubElement(search_element, "keyword")
                        new_tag.text = tag
                        changes += 1
                elif not overwrite:
                    # Still add any new tags.
                    for search_element in search_keys:
                        if tags:
                            for tag in tags:
                                if tag.lower() not in [
                                        se.text.lower() for se in
                                        search_element.findall('.//keyword')
                                ]:
                                    new_tag = eTree.SubElement(
                                        search_element, "keyword")
                                    new_tag.text = tag
                                    changes += 1
                else:
                    if tags:
                        for search_element in search_keys:
                            [
                                search_element.remove(e)
                                for e in search_element.findall('.//keyword')
                            ]
                            for tag in tags:
                                new_tag = eTree.SubElement(
                                    search_element, "keyword")
                                new_tag.text = tag
                                changes += 1

                # Write credits.
                credits_element = root.findall(".//idCredit")
                if not credits_element:
                    credits_element = eTree.SubElement(data_id_element,
                                                       'idCredit')
                    credits_element.text = data_credits
                    changes += 1
                else:
                    for element in credits_element:
                        if data_credits and (overwrite
                                             or element.text is None):
                            element.text = data_credits
                            changes += 1

                # Write use constraints.
                res_constraints = root.findall(".//resConst")
                if not res_constraints:
                    res_constraint_element = eTree.SubElement(
                        data_id_element, 'resConst')
                    const_element = eTree.SubElement(res_constraint_element,
                                                     'Consts')
                    new_constraint = eTree.SubElement(const_element,
                                                      'useLimit')
                    new_constraint.text = use_constraints
                    changes += 1
                elif not overwrite:
                    constraint_elements = root.findall('.//Consts')
                    for element in constraint_elements:
                        if use_constraints:
                            new_constraint = eTree.SubElement(
                                element, 'useLimit')
                            new_constraint.text = use_constraints
                            changes += 1
                else:
                    if use_constraints:
                        constraint_elements = root.findall('.//Consts')
                        if constraint_elements:
                            [
                                constraint_elements[0].remove(e) for e in
                                constraint_elements[0].findall('.//useLimit')
                            ]
                            new_constraint = eTree.SubElement(
                                constraint_elements[0], 'useLimit')
                            new_constraint.text = use_constraints
                            changes += 1

            if changes > 0:
                # Save modifications to the temporary XML file.
                tree.write(temp_xml)
                # Import the XML file to the item; existing metadata is replaced.
                arcpy.MetadataImporter_conversion(temp_xml, path)
                status_writer.send_percent(
                    processed_count / result_count,
                    _('Metadata updated for: {0}').format(path),
                    'write_metadata')
                processed_count += 1

                try:
                    index_item(id, token_header)
                except (IndexError, urllib2.HTTPError, urllib2.URLError) as e:
                    status_writer.send_status(e.message)
                    pass
                updated += 1
            else:
                processed_count += 1
                status_writer.send_percent(
                    processed_count / result_count,
                    _('No metadata changes for: {0}').format(path),
                    'write_metadata')
                skipped_reasons[path] = _(
                    'No metadata changes for: {0}').format(path)
                skipped += 1
        except Exception as ex:
            processed_count += 1
            status_writer.send_percent(processed_count / result_count,
                                       _('Skipped: {0}').format(path),
                                       'write_metadata')
            status_writer.send_status(_('FAIL: {0}').format(repr(ex)))
            errors_reasons[path] = repr(ex)
            errors += 1
            pass

    return updated, errors, skipped
arcpy.env.workspace = r"J:\David Jarvis\JAMNSP Jamaica NSP\ArcGIS\Metadata\metadata_import_process\Jamaica_NSP_Master_Geodatabase_metadata_import_test.gdb"
xmlPath = r"J:\David Jarvis\JAMNSP Jamaica NSP\ArcGIS\Metadata\metadata_import_process\4 Metadata_updated_xml_file"

fcs = arcpy.ListFeatureClasses()

XMLMissingFiles = []
UpdatedFiles = []

for fc in fcs[0:50]:
    fc_split = fc.split()
    fc_name = fc_split[-1]
    sourceXML = str(xmlPath) + "\\" + str(fc_name) + ".xml"

    if os.path.exists(sourceXML):
        arcpy.MetadataImporter_conversion(sourceXML, fc)
        UpdatedFiles.append(sourceXML)
        print "File updated: " + str(sourceXML)
    else:
        #pass
        print "WARNING - XML file missing: " + str(sourceXML)
        XMLMissingFiles.append(sourceXML)

#print "File processed: "+str(fc_name)

import csv

res = XMLMissingFiles
csvfile = r"J:\David Jarvis\JAMNSP Jamaica NSP\ArcGIS\Metadata\metadata_import_process\5 Missing_xml_file_report\Feature_Classes_Not_Updated.csv"

#Assuming res is a flat list
Example #21
0
        print(metadata_xml_nostorage_name)
        metadata_xml_final_name = temp_metadata_xml_name.replace(
            '.xml', '_final.xml')
        print(metadata_xml_final_name)
        print("Exporting xml metadata to temporary location for cleaning")
        arcpy.ExportMetadata_conversion(waterfront_fc_name, translator,
                                        temp_metadata_xml_name)
        arcpy.XSLTransform_conversion(temp_metadata_xml_name,
                                      xslt_remove_storage,
                                      metadata_xml_nostorage_name)
        arcpy.XSLTransform_conversion(metadata_xml_nostorage_name,
                                      xslt_remove_geoproc_hist,
                                      metadata_xml_final_name)
        print("Metadata xml final name - {}".format(metadata_xml_final_name))
        print('Waterfront FC name - {}'.format(waterfront_fc_name))
        arcpy.MetadataImporter_conversion(metadata_xml_final_name,
                                          waterfront_fc_name)
        print("Exporting xml metadata to desired location on BytesProd")
        arcpy.ExportMetadata_conversion(metadata_xml_final_name, translator,
                                        desired_metadata_xml_name)
        print(desired_metadata_xml_name.replace('.xml', '.html'))
        arcpy.XSLTransform_conversion(
            desired_metadata_xml_name, xslt_html,
            desired_metadata_xml_name.replace('.xml', '.html'))
        print("Exporting shapefiles to desired location on BytesProd")
        arcpy.FeatureClassToShapefile_conversion(waterfront_fc_name,
                                                 desired_shp_path)

        # Export layers and associated metadata to desired directories

        print("Layer path - {}".format(
            os.path.join(
def ProcessRoutine(ArgVariables):
    """Main Function that operates the logic of the script."""
    try:

        arcpy.AddMessage("\nInputData: " + InputData)
        arcpy.AddMessage("WorkingDir: " + WorkingDir)
        arcpy.AddMessage("CreateStandAloneXML: " + CreateStandAloneXML)
        arcpy.AddMessage("UseStartTemplate: " + UseStartTemplate)
        arcpy.AddMessage("StarterTemplate: " + CustomStarterTemplate)

        myDataType, myFeatType = Get_Data_Type(
        )  #Determine data type, and feature type if applicable
        arcpy.AddMessage("Data type being evaluated: " + myDataType)
        arcpy.AddMessage("Feature type being evaluated: " + myFeatType + "\n")

        SourceFile = os.path.split(os.path.splitext(InputData)[0])[
            1]  #The name of the input file. No extension. No full path.
        OriginalMDRecord = os.path.join(
            WorkingDir, SourceFile +
            "_Original.xml")  #File pointer to unmodified original.
        FGDCXML = os.path.join(
            WorkingDir, SourceFile +
            "_FGDC.xml")  #File pointer to the copy we will modify/update.

        #Create and keep 'Original' metadata copy in working directory.
        try:
            MDTools.CreateCopyMDRecord(InputData, OriginalMDRecord)
        except:
            pass

        #After we made a copy of the input's original MD, start process from custom template if it is toggled.
        if str(UseStartTemplate) == "true":
            try:
                arcpy.MetadataImporter_conversion(
                    CustomStarterTemplate, InputData
                )  # This imports only: does not convert and does not sync
                arcpy.AddMessage(
                    "The user's custom starter record is now being imported into the input data set...\n"
                )
            except:
                arcpy.AddWarning("!!!!!!!")
                arcpy.AddWarning(
                    "There was a problem importing from the Custom Starter Template. Please ensure that the file is here: ("
                    + CustomStarterTemplate + ")")
                arcpy.AddWarning("!!!!!!!\n")
                sys.exit(1)

        try:  #Extract any existing metadata, and translate to FGDC format if necessary.
            ExportFGDC_MD_Utility.GetMDContent(
                InputData, FGDCXML, WorkingDir
            )  #Export (translate if necessary) input metadata to FGDC format. Remove ESRI 'sync' & 'reminder' elements.
        except:
            arcpy.AddMessage(
                "No metadata could be found for this record. A new file will be created.\n"
            )
            MDTools.CreateCopyMDRecord(GenericTemplate, FGDCXML)

        MDTools.RemoveNameSpace(
            FGDCXML
        )  #Eliminate namespace tags from root element in xml if present (appear when tool is run on spatial data sets).
        MDTools.CheckMasterNodes(
            FGDCXML
        )  #Ensure all the key FGDC-CSDGM nodes are present in the record.

        if not InputIsXML and not InputIsCSV and not InputIsExcel and desc.DatasetType != "Table":  #Only attempt to extract/update spatial properties from spatial data sets.

            try:
                GCS_ExtentList = Get_LatLon_BndBox()[1]
            except:
                arcpy.AddWarning("!!!!!!!")
                arcpy.AddWarning(
                    "A problem was encountered when attempting to retrieve the spatial extent of the input data set. Please review the tool documentation and ensure the data set is a valid input and ENSURE THAT A COORDINATE SYSTEM HAS BEEN DEFINED."
                )
                arcpy.AddWarning("!!!!!!!\n")
                sys.exit()

            #Get/Update Bounding Coordinates
            GCS_ExtentList = Get_LatLon_BndBox()[1]
            Local_ExtentList = Get_LatLon_BndBox()[0]
            if "nan" in str(Local_ExtentList):
                arcpy.AddWarning(
                    "No spatial extent could be found for the input spatial data set. Please review the 'Bounding Extent' in the final metadata record. (Values will be set to maximum global extent).\n"
                )
            arcpy.AddMessage("Bounding Coordinates (Local): " +
                             str(Local_ExtentList))
            arcpy.AddMessage("Bounding Coordinates (Geographic): " +
                             str(GCS_ExtentList) + "\n")

            WestBC = Get_LatLon_BndBox()[1][0]
            EastBC = Get_LatLon_BndBox()[1][2]
            NorthBC = Get_LatLon_BndBox()[1][3]
            SouthBC = Get_LatLon_BndBox()[1][1]
            MDTools.WriteBoundingInfo(FGDCXML, WestBC, EastBC, NorthBC,
                                      SouthBC)

            #Get/Update Spatial Data Organization
            SpatialDataOrgInfo = Get_Spatial_Data_OrgInfo(
                InputData, myDataType, myFeatType)
            MDTools.WriteSpatialDataOrgInfo(FGDCXML, SpatialDataOrgInfo)

            #Get/Update Spatial Reference Information
            SpatialReferenceInfo = SpatialRefTools.SpatialRefInfo(
                GCS_PrjFile, InputData, WorkingDir, GCS_ExtentList)
            MDTools.WriteSpatialRefInfo(FGDCXML, SpatialReferenceInfo)
            #Handle vertical coordinate system?

        #Get/Update Geospatial Presentation Form. Also updates Format Name (within Distribution Info).
        #(Skip this step and leave existing content if tool input is XML).
        if not InputIsXML:
            MDTools.WriteGeospatialForm(FGDCXML, myDataType, myFeatType)

        #Get/Update Native Environment Details
        #This will be used as a switch to determine which .exe for the EA builder needs to be run (for either 10.0, 10.1, or 10.2).
        #The version info is also written out to the XML record in the 'Native Environment' section.
        ESRIVersion = GetESRIVersion_WriteNativeEnv(FGDCXML)

        #Get/Update Metadata Date of Editing
        Now = datetime.datetime.now()
        MDDate = Now.strftime("%Y%m%d")
        MDTools.WriteMDDate(FGDCXML, MDDate)

        #Update Entity/Attribute Section
        if InputIsCSV or InputIsExcel:
            contents_fname = InputData
        elif not InputIsXML:
            data_contents = introspector.introspect_dataset(InputData)
            input_fname = os.path.split(InputData)[1]
            contents_fname = os.path.join(WorkingDir, input_fname + ".p")
            pickle.dump(data_contents, open(contents_fname, "wb"))
        else:
            contents_fname = ''

        #Rerun FGDC Translator tool to handle newly-added elements that are out of order in XML tree.
        MDTools.ReRunFGDCTranslator(FGDCXML)

        #Re-import new metadata to the data set to capture E/A tool changes. If input file is a stand alone .xml this step is skipped
        if not InputIsXML:
            try:
                arcpy.MetadataImporter_conversion(
                    FGDCXML, InputData
                )  # This imports only: does not convert and does not sync
            except:
                print "There was a problem during the metadata importation process."

        #Open up Metadata Editor and allow user to review/update
        outXML = os.path.splitext(FGDCXML)[0] + "temp.xml"
        #Arg = '"' + MetadataEditor + '"' + " " + '"' + FGDCXML + '"' + " " + '"' + outXML + '"' + " " + '"' + Browser + '"' #Start and end quotes are necessary to handle spaces in file names and IE Path when passing to Command Prompt.
        #Arg = '"' + MetadataEditor + '"' + " " + '"' + FGDCXML + '"' + " " + '"' + outXML + '"' + " "
        Arg = '"%s" "%s" "%s"' % (python_exe, mdwiz_py_fname, FGDCXML)
        if contents_fname:
            Arg += ' "{}"'.format(contents_fname)
        arcpy.AddWarning(Arg)
        arcpy.AddMessage("*************************")
        arcpy.AddMessage(
            "\nPLEASE UPDATE/REVIEW THE METADATA INFO IN THE POP-UP WINDOW.")
        arcpy.AddMessage("(Allow a moment for the window to open).\n")
        arcpy.AddMessage("*************************")
        try:
            winsound.PlaySound(
                r"C:\Windows\Media\Cityscape\Windows Exclamation.wav",
                winsound.SND_FILENAME)
        except:
            pass
        #os.popen(Arg)
        p = subprocess.Popen(Arg)
        p.wait()

        try:
            MDTools.RemoveStyleSheet(
                FGDCXML
            )  #MP actually removes the stylesheet in VB.NET app... this is a redundancy here.
            # MDTools.ReplaceXML(FGDCXML, outXML)
        except:
            arcpy.AddWarning(
                "No content was saved in the Metadata Editor window. The metadata record was not updated.\n"
            )

        #Re-import new metadata to the data set to capture user edits from the Metadata Editor window.
        try:
            arcpy.MetadataImporter_conversion(
                FGDCXML, InputData
            )  # This imports only: does not convert and does not sync
            arcpy.AddMessage(
                "The updated metadata record is now being re-imported into the input data set...\n"
            )
        except:
            arcpy.AddMessage(
                "There was a problem during the metadata importation process!")

        #Remove the Error Report file generated by MP from the Main Metadata Editor.
        MP_ErrorReport = os.path.splitext(
            FGDCXML)[0] + "temp_MP_ErrorReport.xml"
        try:
            os.remove(MP_ErrorReport)
        except:
            pass

        #Remove FGDC XML file if the toggle to preserve 'stand-alone' file is configured to FALSE. This appears to be passed as a string rather than boolean.
        if str(CreateStandAloneXML) == "false":
            try:
                arcpy.Delete_management(FGDCXML)
                arcpy.AddMessage(
                    "The Wizard will now remove the stand-alone FGDC XML, as requested in the tool interface...\n"
                )
            except:
                arcpy.AddMessage(
                    "There was a problem removing the stand-alone XML file. Try removing the file (%s) manually from the working directory.\n"
                    % FGDCXML)

        #Remove the 'ArcpyTranslate.xml' temp file that gets created when exporting from ESRI metadata to FGDC.
        try:
            os.remove(os.path.join(WorkingDir, 'ArcpyTranslate.xml'))
        except:
            pass

    except arcpy.ExecuteError:
        arcpyError()
    except:
        pythonError()
    elif contacttype == "Metadata Contact":
        # set metadata contact elem
        mdContElem = ET.Element('mdContact')
        constrContact(mdContElem)
        root.insert(len(root), mdContElem)

    elif contacttype == "Distributor Contact":
        # set distributor contact elem
        disContElem = tree.find('distInfo/distributor')
        if disContElem is None:
            findaddmissingxmltag('distInfo/distributor')
        # create new contact info element
        contactTopElem = ET.Element('distorCont')
        constrContact(contactTopElem)
        disContElem.insert(len(disContElem), contactTopElem)

    tree.write(inputxml)

    # set input data arcpy Describe object
    tardesc = arcpy.Describe(inputmeta)
    # check if input data type is in the list
    if tardesc.datatype in [
            "FeatureClass", "Table", "Workspace", "ShapeFile", "FeatureDataset"
    ]:
        # import metadata back to feature class
        arcpy.MetadataImporter_conversion(inputxml, inputmeta)
        # synchronize metadata
        arcpy.SynchronizeMetadata_conversion(inputmeta, "SELECTIVE")

arcpy.AddMessage(" ")