def quick_import(input_dataset_path, dataset_name, root_dir_output):
    sheet_name = get_sheet_name(input_dataset_path)

    indata = "ARCINFO," + input_dataset_path
    outdata = root_dir_output + '/' + sheet_name + '_' + dataset_name
    print '********* arcpy.QuickImport with **********************'
    print indata
    print outdata
    try:
        arcpy.QuickImport_interop(indata, outdata)
    except:
        e = sys.exc_info()
        print e
    print '************************************************'
Ejemplo n.º 2
0
def run_MM_conv(riv_line_fold, scratch, river_folders, ranges):

    pnt = str(multiprocessing.current_process().name)
    pnt2 = pnt.replace("-", "_")
    pn = pnt2.replace(":", "_")
    print(pn)

    print(ranges)
    # print(river_folders)

    # iterate over top folder containing OS regions
    if ranges[0] == 0:
        sliceObj = slice(ranges[0], ranges[1])
    else:
        sliceObj = slice(ranges[0] - 1, ranges[1])

    grid_list = river_folders[sliceObj]  # turned off for testing
    print(grid_list)

    # grid_list = ['sn', 'sm', 'sh', 'ns', 'nr', 'nm', 'nl', 'nn', 'no', 'nj', 'nk', 'nh', 'ng', 'nf', 'na',
    #              'nb', 'nc', 'nd', 'hw', 'hx', 'hy', 'hz', 'ht', 'hu', 'hp'] # for testing
    print(river_folders)

    print(pn + " starting to loop folders")
    for fold in grid_list:
        # if fold in grid_list:
        # if fold == 'hy': # for testing
        root = os.path.join(riv_line_fold, fold)
        shp_test = os.listdir(root)
        if any(x[-4:] == '.shp' for x in shp_test):

            print("Shape file already exists for OS GRID: {0}".format(fold))

        else:
            os.chdir(root)
            gml_list = []
            for file in glob.glob("*.gz"):
                gml_list.append(file)

            gml_list_abs = [
                riv_line_fold + "\\" + fold + "\\" + s for s in gml_list
            ]
            # print(gml_list)
            arcpy.CreateFileGDB_management(scratch, pn + "_Main.gdb")
            fold_gdb_path = os.path.join(scratch, pn + "_Main.gdb")

            river_lines = []

            for i in gml_list_abs:
                # print(i)
                marker = i[-13:-7]
                # print(marker)
                gml_gdb = arcpy.CreateFileGDB_management(
                    scratch, pn + "_tr.gdb")

                gml_gdb_path = os.path.join(scratch, pn + "_tr.gdb")
                # print(gml)

                try:
                    print("try import OS GRID {0}".format(marker))
                    arcpy.QuickImport_interop(i, gml_gdb)  # This works.
                    print("OS GRID {0} imported".format(marker))
                    rivers_name = os.path.join(fold_gdb_path, marker)
                    #
                    mm_riv_ap = os.path.join(gml_gdb_path, "WatercourseLink")

                    #
                    if arcpy.Exists(mm_riv_ap):
                        arcpy.CopyFeatures_management(mm_riv_ap, rivers_name)
                        desc = arcpy.Describe(rivers_name)
                        what_shp = desc.shapeType
                        if what_shp == 'Polyline':
                            river_lines.append(rivers_name)

                        print(marker + " features copied to gdb")
                    else:
                        print("no features for " + marker)

                except Exception as ex:  # this is naughty but it saves hiccups if there is an unforseen error...
                    print("OS GRID {0} import failed".format(marker))
                    print(ex)

                # arcpy.Delete_management(gml_gdb)

            export_path = os.path.join(root, fold + "_mm_rivers.shp")

            try:
                print("merging data for OS GRID {0}".format(fold))
                arcpy.Merge_management(river_lines, export_path)
                print("data merged for {0}".format(fold))
                # for i in river_lines:
                #     arcpy.Delete_management(i)
                #     print(i + " deleted from memory")

            except Exception as ex:
                traceback.print_exc()
                # print(e)
    if os.path.isdir(os.path.join(scratch, pn + "_Main.gdb")):
        shutil.rmtree(os.path.join(scratch, pn + "_Main.gdb"))
    if os.path.isdir(os.path.join(scratch, pn + "_tr.gdb")):
        shutil.rmtree(os.path.join(scratch, pn + "_tr.gdb"))
import os, sys, arcpy
"""
arcpy.QuickImport_interop(
    "ARCINFO,E:/Personal/arcpy/data/2885/09",
    "E:/Personal/arcpy/output/quickimport/test13"
    )

"""


def dir_list_folder(head_dir, dir_name):
    """Return a list of the full paths of the subdirectories
    under directory 'head_dir' named 'dir_name'"""
    dirList = []
    for fn in os.listdir(head_dir):
        #dirfile = os.path.join(head_dir, fn)
        dirfile = head_dir + '/' + fn
        if os.path.isdir(dirfile):
            if fn.upper() == dir_name.upper():
                dirList.append(dirfile)
            else:
                # print "Accessing directory %s" % dirfile
                dirList += dir_list_folder(dirfile, dir_name)
    return dirList


def get_sheet_number_arr(coverage_dataset_path):
    parts = coverage_dataset_path.split('/')
    sheet_no_parts = parts[-3:-1]  # ['2885', 'D14']
    grid_no_str = sheet_no_parts[0]  # '2885'
    sub_grid_str_all = sheet_no_parts[1]  # 'D14'
Ejemplo n.º 4
0
schemaXSD = cwd + r"\\TEMP\\punkty_xml\\PRG_Adresy_schemat.xsd"
inputList = []

# Create list of xml files;
for file in os.listdir(puntky_xml_dir):
    if file.endswith('.xml'):
        inputList.append(file)

# Process: Quick Import
print('\nimporting', end='')
for inputFile in inputList:
    print('.', end='')
    arcpy.QuickImport_interop(
        "GML," + puntky_xml_dir + "/" + inputFile +
        ",\"RUNTIME_MACROS,\"\"IGNORE_APPLICATON_SCHEMA,no,XSD_DOC,\"\"\"\"" +
        schemaXSD +
        "\"\"\"\",VALIDATE_DATASET,no,FEATURE_TYPES_SCHEMA_MODE,XML_SCHEMA,SKIP_SCHEMA_LOCATION_IN_DATASET,No,MAP_FEATURE_COLLECTION,,GML_FEATURE_ELEMENTS,<Unused>,SRS_GEOMETRY_PARAMETERS,,SRS_AXIS_ORDER,,SRS_ANGLE_DIRECTION,,ENFORCE_PATH_CONTINUITY_BY,SNAPPING_END_POINTS,FEATURE_PROPERTIES_READER,,MAP_EMBEDDED_OBJECTS_AS,ATTRIBUTES,MAP_PREDEFINED_GML_PROPERTIES,NO,MAP_GEOMETRY_COLUMNS,YES,MAP_ALL_SUBSTITUTABLE_PROPERTIES,YES,ADD_NAMESPACE_PREFIX_TO_NAMES,,QNAMES_FOR_PROPERTIES_TO_IGNORE,,ATTRIBUTE_HANDLING,,MAP_COMPLEX_PROPERTIES_AS,\"\"\"\"Nested Attributes\"\"\"\",MAX_MULTI_LIST_LEVEL,,ADD_GEOMETRIES_AS_XML_FRAGMENTS,NO,XML_FRAGMENTS_AS_DOCUMENTS,YES,FLATTEN_XML_FRAGMENTS,NO,FLATTEN_XML_FRAGMENTS_OPEN_LIST_BRACE,,FLATTEN_XML_FRAGMENTS_CLOSE_LIST_BRACE,,FLATTEN_XML_FRAGMENTS_SEPARATOR,,GML_READER_GROUP,,USE_OLD_READER,NO,DISABLE_AUTOMATIC_READER_TYPE_SELECTION,NO,DISABLE_XML_NAMESPACE_PROCESSING,NO,EXPOSE_ATTRS_GROUP,,GML_EXPOSE_FORMAT_ATTRS,,USE_SEARCH_ENVELOPE,NO,SEARCH_ENVELOPE_MINX,0,SEARCH_ENVELOPE_MINY,0,SEARCH_ENVELOPE_MAXX,0,SEARCH_ENVELOPE_MAXY,0,CLIP_TO_ENVELOPE,NO,GML_RDR_ADV,,CONTINUE_ON_GEOM_ERROR,yes,SYSTEM_ENCODING,,CACHE_XSD,yes,CACHE_XSD_EXPIRY_TIME,,MULTI_VALUE_SIMPLE_PROPERTIES_AS_CSV,no,NETWORK_AUTHENTICATION,\"\"\"\"AUTH_INFO_GROUP,NO,AUTH_METHOD,<lt>Unused<gt>,NAMED_CONNECTION,<lt>Unused<gt>,AUTH_USERNAME,<lt>Unused<gt>,AUTH_PASSWORD,<Unused>\"\"\"\",_MERGE_SCHEMAS,YES\"\",META_MACROS,\"\"SourceIGNORE_APPLICATON_SCHEMA,no,SourceXSD_DOC,\"\"\"\""
        + schemaXSD +
        "\"\"\"\",SourceVALIDATE_DATASET,no,SourceFEATURE_TYPES_SCHEMA_MODE,XML_SCHEMA,SourceSKIP_SCHEMA_LOCATION_IN_DATASET,No,SourceMAP_FEATURE_COLLECTION,,SourceGML_FEATURE_ELEMENTS,<Unused>,SourceSRS_GEOMETRY_PARAMETERS,,SourceSRS_AXIS_ORDER,,SourceSRS_ANGLE_DIRECTION,,SourceENFORCE_PATH_CONTINUITY_BY,SNAPPING_END_POINTS,SourceFEATURE_PROPERTIES_READER,,SourceMAP_EMBEDDED_OBJECTS_AS,ATTRIBUTES,SourceMAP_PREDEFINED_GML_PROPERTIES,NO,SourceMAP_GEOMETRY_COLUMNS,YES,SourceMAP_ALL_SUBSTITUTABLE_PROPERTIES,YES,SourceADD_NAMESPACE_PREFIX_TO_NAMES,,SourceQNAMES_FOR_PROPERTIES_TO_IGNORE,,SourceATTRIBUTE_HANDLING,,SourceMAP_COMPLEX_PROPERTIES_AS,\"\"\"\"Nested Attributes\"\"\"\",SourceMAX_MULTI_LIST_LEVEL,,SourceADD_GEOMETRIES_AS_XML_FRAGMENTS,NO,SourceXML_FRAGMENTS_AS_DOCUMENTS,YES,SourceFLATTEN_XML_FRAGMENTS,NO,SourceFLATTEN_XML_FRAGMENTS_OPEN_LIST_BRACE,,SourceFLATTEN_XML_FRAGMENTS_CLOSE_LIST_BRACE,,SourceFLATTEN_XML_FRAGMENTS_SEPARATOR,,SourceGML_READER_GROUP,,SourceUSE_OLD_READER,NO,SourceDISABLE_AUTOMATIC_READER_TYPE_SELECTION,NO,SourceDISABLE_XML_NAMESPACE_PROCESSING,NO,SourceEXPOSE_ATTRS_GROUP,,SourceGML_EXPOSE_FORMAT_ATTRS,,SourceUSE_SEARCH_ENVELOPE,NO,SourceSEARCH_ENVELOPE_MINX,0,SourceSEARCH_ENVELOPE_MINY,0,SourceSEARCH_ENVELOPE_MAXX,0,SourceSEARCH_ENVELOPE_MAXY,0,SourceCLIP_TO_ENVELOPE,NO,SourceGML_RDR_ADV,,SourceCONTINUE_ON_GEOM_ERROR,yes,SourceSYSTEM_ENCODING,,SourceCACHE_XSD,yes,SourceCACHE_XSD_EXPIRY_TIME,,SourceMULTI_VALUE_SIMPLE_PROPERTIES_AS_CSV,no,SourceNETWORK_AUTHENTICATION,\"\"\"\"AUTH_INFO_GROUP,NO,AUTH_METHOD,<lt>Unused<gt>,NAMED_CONNECTION,<lt>Unused<gt>,AUTH_USERNAME,<lt>Unused<gt>,AUTH_PASSWORD,<Unused>\"\"\"\"\"\",METAFILE,GML,COORDSYS,\"\"\"\"\"\"ESRIWKT|ETRS_1989_Poland_CS92|PROJCS[\"\"\"\"ETRS_1989_Poland_CS92\"\"\"\",GEOGCS[\"\"\"\"GCS_ETRS_1989\"\"\"\",DATUM[\"\"\"\"D_ETRS_1989\"\"\"\",SPHEROID[\"\"\"\"GRS_1980\"\"\"\",6378137.0,298.257222101]],PRIMEM[\"\"\"\"Greenwich\"\"\"\",0.0],UNIT[\"\"\"\"Degree\"\"\"\",0.0174532925199433]],PROJECTION[\"\"\"\"Transverse_Mercator\"\"\"\"],PARAMETER[\"\"\"\"False_Easting\"\"\"\",500000.0],PARAMETER[\"\"\"\"False_Northing\"\"\"\",-5300000.0],PARAMETER[\"\"\"\"Central_Meridian\"\"\"\",19.0],PARAMETER[\"\"\"\"Scale_Factor\"\"\"\",0.9993],PARAMETER[\"\"\"\"Latitude_Of_Origin\"\"\"\",0.0],UNIT[\"\"\"\"Meter\"\"\"\",1.0]]\"\"\"\"\"\",IDLIST,,__FME_DATASET_IS_SOURCE__,true\"",
        outputDir + "\\" + inputFile + ".gdb")

for i in range(len(inputList)):
    inputList[i] = inputList[i] + ".gdb"

dictionary = get_databases_dictionary(inputList, outputDir)
print("Dictionary is ok")
arcpy.env.workspace = outputDir

# Delete old GeoDataBase if exists
if os.path.exists(outputDir + "\\allData.gdb"):
    shutil.rmtree(outputDir + "\\allData.gdb")
def mainFunction(
    mapInfoFolder, geodatabase, notIncludeConfigFile, renameConfigFile
):  # Get parameters from ArcGIS Desktop tool by seperating by comma e.g. (var1 is 1st parameter,var2 is 2nd parameter,var3 is 3rd parameter)
    try:
        # --------------------------------------- Start of code --------------------------------------- #

        root_len = len(os.path.abspath(str(mapInfoFolder)))
        # For each of the directories in the folder
        for root, dirs, files in os.walk(str(mapInfoFolder)):
            archive_root = os.path.abspath(root)[root_len:]
            # For each file
            for f in files:
                # Get the file path
                fullpath = os.path.join(root, f)
                fileName = os.path.join(archive_root, f)
                # If it is a tab file
                if fullpath.endswith(".TAB"):
                    arcpy.AddMessage("MapInfo TAB file found - " + fullpath +
                                     "...")
                    arcpy.AddMessage("Importing dataset...")

                    noGeometryTest = fullpath + "\\NoGeometry"
                    runImport = True
                    # Check geometry is invalid
                    if arcpy.Exists(noGeometryTest):
                        arcpy.AddWarning("Geometry is invalid...")
                        runImport = False
                    # If brackets in file name
                    if "(" in fileName:
                        arcpy.AddWarning(
                            "Filename is invalid because of \"(\"...")
                        runImport = False
                    # If brackets in file name
                    if ")" in fileName:
                        arcpy.AddWarning(
                            "Filename is invalid because of \")\"...")
                        runImport = False

                    # If can continue with import
                    if (runImport == True):
                        # Import MapInfo TAB file
                        arcpy.QuickImport_interop(
                            "MITAB,\"" + fullpath +
                            "\",\"RUNTIME_MACROS,\"\"FME_TABLE_PASSWORD,,_MITAB_FULL_ENHANCED_GEOMETRY,YES,ENCODING,,EXPOSE_ATTRS_GROUP,,MITAB_EXPOSE_FORMAT_ATTRS,,USE_SEARCH_ENVELOPE,NO,SEARCH_ENVELOPE_MINX,0,SEARCH_ENVELOPE_MINY,0,SEARCH_ENVELOPE_MAXX,0,SEARCH_ENVELOPE_MAXY,0,CLIP_TO_ENVELOPE,NO,_MERGE_SCHEMAS,YES\"\",META_MACROS,\"\"SourceFME_TABLE_PASSWORD,,Source_MITAB_FULL_ENHANCED_GEOMETRY,YES,SourceENCODING,,SourceEXPOSE_ATTRS_GROUP,,SourceMITAB_EXPOSE_FORMAT_ATTRS,,SourceUSE_SEARCH_ENVELOPE,NO,SourceSEARCH_ENVELOPE_MINX,0,SourceSEARCH_ENVELOPE_MINY,0,SourceSEARCH_ENVELOPE_MAXX,0,SourceSEARCH_ENVELOPE_MAXY,0,SourceCLIP_TO_ENVELOPE,NO\"\",METAFILE,MITAB,COORDSYS,,IDLIST,,__FME_DATASET_IS_SOURCE__,true\"",
                            os.path.join(arcpy.env.scratchFolder, "Data.gdb"))

                        arcpy.env.workspace = os.path.join(
                            arcpy.env.scratchFolder, "Data.gdb")
                        featureClassList = arcpy.ListFeatureClasses()
                        # If data imported
                        if (featureClassList):
                            # Loop through the list of feature classes
                            count = 0
                            for featureClass in featureClassList:
                                desc = arcpy.Describe(featureClass)
                                datasetRemoved = False

                                # Get a count for the dataset
                                rowCount = arcpy.GetCount_management(
                                    featureClass)
                                if (rowCount == 0) and (datasetRemoved
                                                        == False):
                                    arcpy.AddWarning(
                                        "Not importing dataset with no records - "
                                        + desc.name + "...")
                                    arcpy.Delete_management(
                                        featureClass, "FeatureClass")
                                    datasetRemoved = True

                                # Don't include datasets with "text" in name
                                if ("text" in str(
                                        desc.name)) and (datasetRemoved
                                                         == False):
                                    arcpy.AddWarning(
                                        "Not importing text dataset - " +
                                        desc.name + "...")
                                    arcpy.Delete_management(
                                        featureClass, "FeatureClass")
                                    datasetRemoved = True

                                # Remove polygon from feature class name
                                if (("_polygon" in str(desc.name))
                                        and (datasetRemoved == False)):
                                    datasetName = desc.catalogPath
                                    newDatasetName = datasetName.replace(
                                        "_polygon", "")

                                    renameDataset = True
                                    # If already been renamed
                                    if arcpy.Exists(newDatasetName):
                                        renameDataset = False

                                    if (renameDataset == True):
                                        # Rename feature class
                                        arcpy.AddMessage(
                                            "Removing polygon from feature class name..."
                                        )
                                        arcpy.Rename_management(
                                            datasetName, newDatasetName,
                                            "FeatureClass")

                                # Remove point from feature class name
                                if (("_point" in str(desc.name))
                                        and (datasetRemoved == False)):
                                    datasetName = desc.catalogPath
                                    newDatasetName = datasetName.replace(
                                        "_point", "")

                                    renameDataset = True
                                    # If already been renamed
                                    if arcpy.Exists(newDatasetName):
                                        renameDataset = False

                                    if (renameDataset == True):
                                        # Rename feature class
                                        arcpy.AddMessage(
                                            "Removing point from feature class name..."
                                        )
                                        arcpy.Rename_management(
                                            datasetName, newDatasetName,
                                            "FeatureClass")

                                # Remove line from feature class name
                                if (("_line" in str(desc.name))
                                        and (datasetRemoved == False)):
                                    datasetName = desc.catalogPath
                                    newDatasetName = datasetName.replace(
                                        "_line", "")

                                    renameDataset = True
                                    # If already been renamed
                                    if arcpy.Exists(newDatasetName):
                                        renameDataset = False

                                    if (renameDataset == True):
                                        # Rename feature class
                                        arcpy.AddMessage(
                                            "Removing line from feature class name..."
                                        )
                                        arcpy.Rename_management(
                                            datasetName, newDatasetName,
                                            "FeatureClass")

                                count = count + 1

                            # Loop through the list of feature classes
                            featureClassList = arcpy.ListFeatureClasses()
                            for featureClass in featureClassList:
                                copyDataset = True
                                desc = arcpy.Describe(featureClass)

                                # If configuration provided
                                if (notIncludeConfigFile):
                                    # Set CSV delimiter
                                    csvDelimiter = ","

                                    # Look through configuration file to see if any datasets in there to not include
                                    # Open the CSV file
                                    with open(notIncludeConfigFile,
                                              'rb') as csvFile:
                                        # Read the CSV file
                                        rows = csv.reader(
                                            csvFile, delimiter=csvDelimiter)

                                        # For each row in the CSV
                                        count = 0
                                        for row in rows:
                                            # Ignore the first line containing headers
                                            if (count > 0):
                                                # Get the name of the dataset to not include
                                                datasetNotInclude = row[0]

                                                # If the current feature class is in the list
                                                if ((desc.name).lower(
                                                ) == datasetNotInclude.lower()
                                                    ):
                                                    arcpy.AddWarning(
                                                        "Not including dataset - "
                                                        + desc.name + "...")
                                                    copyDataset = False
                                            count = count + 1

                                    # Look through configuration file to see if any datasets in there to rename
                                    # Open the CSV file
                                    with open(renameConfigFile,
                                              'rb') as csvFile:
                                        # Read the CSV file
                                        rows = csv.reader(
                                            csvFile, delimiter=csvDelimiter)

                                        # For each row in the CSV
                                        count = 0
                                        for row in rows:
                                            # Ignore the first line containing headers
                                            if (count > 0):
                                                # Get the name of the dataset to rename
                                                datasetRename = row[0]
                                                # Name to change dataset to
                                                datasetRenameTo = row[1]

                                                # If the current feature class is in the list
                                                if ((desc.name).lower() ==
                                                        datasetRename.lower()):
                                                    datasetName = desc.catalogPath
                                                    newDatasetName = datasetName.replace(
                                                        datasetRename,
                                                        datasetRenameTo)
                                                    featureClass = newDatasetName
                                                    arcpy.AddWarning(
                                                        "Renaming " +
                                                        desc.name + " to " +
                                                        datasetRenameTo +
                                                        "...")
                                                    arcpy.Rename_management(
                                                        datasetName,
                                                        newDatasetName,
                                                        "FeatureClass")
                                                    desc = arcpy.Describe(
                                                        featureClass)
                                            count = count + 1

                                # If can continue with copy
                                if (copyDataset == True):
                                    # Copy in the dataset to the geodatabase
                                    arcpy.CopyFeatures_management(
                                        featureClass,
                                        os.path.join(geodatabase, desc.name))
                        else:
                            arcpy.AddWarning("No datasets to import...")

                        # Delete database
                        arcpy.Delete_management(arcpy.env.workspace,
                                                "Workspace")

        # --------------------------------------- End of code --------------------------------------- #

        # If called from gp tool return the arcpy parameter
        if __name__ == '__main__':
            # Return the output if there is any
            if output:
                arcpy.SetParameterAsText(1, output)
        # Otherwise return the result
        else:
            # Return the output if there is any
            if output:
                return output
        # Logging
        if (enableLogging == "true"):
            # Log end of process
            logger.info("Process ended.")
            # Remove file handler and close log file
            logging.FileHandler.close(logMessage)
            logger.removeHandler(logMessage)
        pass
    # If arcpy error
    except arcpy.ExecuteError:
        # Build and show the error message
        errorMessage = arcpy.GetMessages(2)
        arcpy.AddError(errorMessage)
        # Logging
        if (enableLogging == "true"):
            # Log error
            logger.error(errorMessage)
            # Log end of process
            logger.info("Process ended.")
            # Remove file handler and close log file
            logging.FileHandler.close(logMessage)
            logger.removeHandler(logMessage)
        if (sendErrorEmail == "true"):
            # Send email
            sendEmail(errorMessage)
    # If python error
    except Exception as e:
        errorMessage = ""
        # Build and show the error message
        for i in range(len(e.args)):
            if (i == 0):
                errorMessage = unicode(e.args[i]).encode('utf-8')
            else:
                errorMessage = errorMessage + " " + unicode(
                    e.args[i]).encode('utf-8')
        arcpy.AddError(errorMessage)
        # Logging
        if (enableLogging == "true"):
            # Log error
            logger.error(errorMessage)
            # Log end of process
            logger.info("Process ended.")
            # Remove file handler and close log file
            logging.FileHandler.close(logMessage)
            logger.removeHandler(logMessage)
        if (sendErrorEmail == "true"):
            # Send email
            sendEmail(errorMessage)
Ejemplo n.º 6
0
out_gdb = out_folder+"/"+base+".gdb"
out_fc = out_gdb+"/%s_sDNA"%base

itn_startelev_field = "DirectedNode_NegativeOrientation_GradeSeparation"
itn_endelev_field = "DirectedNode_PositiveOrientation_GradeSeparation"

#check output gdb doesn't exist already
if os.path.exists(out_gdb):
    raise StandardError, "Output gdb already exists"

#then the actual work
arcpy.AddMessage("Importing now - this may take a long time, especially if road routing information is included")

arcpy.AddMessage("Calling Data Interoperability import...")
arcpy.AddMessage("(If this step fails, check you have ArcGIS Data Interoperability extension installed.  A license for the extension is NOT needed.")
arcpy.QuickImport_interop("GMLSF,"+in_file, out_gdb)
arcpy.AddMessage("Data Interop import complete, converting RoadLink layer to sDNA format...")

arcpy.CopyFeatures_management(out_gdb+"/RoadLink",out_fc)

arcpy.AddField_management(out_fc,"startelev","SHORT")
arcpy.AddField_management(out_fc,"endelev","SHORT")
arcpy.AddField_management(out_fc,"island","SHORT")

arcpy.CalculateField_management(out_fc,"startelev","!%s!"%itn_startelev_field,"PYTHON")
arcpy.CalculateField_management(out_fc,"endelev","!%s!"%itn_endelev_field,"PYTHON")
arcpy.CalculateField_management(out_fc,"island",'int(!NatureOfRoad!=="Traffic Island Link" or !NatureOfRoad!=="Traffic Island Link At Junction")',"PYTHON")

arcpy.DeleteField_management(out_fc,itn_startelev_field)
arcpy.DeleteField_management(out_fc,itn_endelev_field)
Ejemplo n.º 7
0
    kmlHand.close()
    geojsonHand.close()

except:
    print('Error retrieving earthquake data')

# Check if the user has the Data Interoperability extension for ArcGIS

if arcpy.CheckExtension('DataInteroperability') == 'Available':

    # Try using the QuickImport tool to create the output geodatabase.

    try:
        gdbPath = targetFolder + '/4.5_month' + dateString + '.gdb'
        arcpy.CheckOutExtension('DataInteroperability')
        arcpy.QuickImport_interop(geojsonPath, gdbPath)
        arcpy.CheckInExtension('DataInteroperability')

    except:
        print(arcpy.GetMessages())

    # Also try using the KMLToLayer tool to create output datasets.

    try:
        arcpy.KMLToLayer_conversion(kmlPath, targetFolder)

    except:
        print(arcpy.GetMessages())

# If the user does not have the required extension, then attempt use the online GeoJSON
# converter.