Esempio n. 1
0
def createfolders():
    tempfolder = "Temp"
    arcpy.CreateFolder_management(workspace, tempfolder)
    path_to_temp0 = join(workspace,tempfolder) 
    if arcpy.Exists(path_to_temp0):
        arcpy.AddMessage("Folder 'Temp' is created.")

    # creates folder Geo-space in the workspace
    geofolder = "GeoSpace"
    arcpy.CreateFolder_management(workspace, geofolder)
    path_to_geofolder0 = join(workspace,geofolder) 
    if arcpy.Exists(path_to_geofolder0):
        arcpy.AddMessage("Folder 'GeoSpace' is created.")

    # creates folder SOM-space in the workspace
    somfolder = "SomSpace"
    arcpy.CreateFolder_management(workspace, somfolder)
    path_to_somfolder0 = join(workspace,somfolder) 
    if arcpy.Exists(path_to_somfolder0):
        arcpy.AddMessage("Folder 'SomSpace' is created.")

    # creates output folder in the workspace
    output_folder = "output_folder"
    arcpy.CreateFolder_management(workspace, output_folder)
    path_to_outputfolder0 = join(workspace,output_folder) 
    if arcpy.Exists(path_to_outputfolder0):
        arcpy.AddMessage("Folder 'output_folder' is created.")
    
    return path_to_temp0, path_to_geofolder0, path_to_somfolder0, path_to_outputfolder0 
Esempio n. 2
0
        def mhello():
            foldername = path_directory.rsplit('\\', 1)[0]
            arcpy.CreateFolder_management(foldername, 'input')
            arcpy.CreateFolder_management(foldername, 'output')
            arcpy.CreateFolder_management(foldername, 'tmp')
            inputpath = str(foldername + '/input/')
            start_county_layer = "C:\Users\zwhitman\Documents\census\psu_app\input\us_counties.shp"
            global input_county
            input_county = inputpath + 'us_counties_joined_3857.shp'
            if os.path.isfile(input_county):
                controller.show_frame(PageState)
            else:
                arcpy.Copy_management(start_county_layer, input_county)
                arcpy.TableToDBASE_conversion(variable_file, inputpath)
                dbf_varfile = variable_file.rsplit('/', 1)[1]
                dbf_varfile = dbf_varfile[:-3] + "dbf"
                dbf_varfile = inputpath + dbf_varfile
                arcpy.AddField_management(dbf_varfile, "GEOID_2", "TEXT", "#",
                                          "#", "#", "#", "NULLABLE",
                                          "NON_REQUIRED", "#")
                arcpy.CalculateField_management(
                    dbf_varfile, "GEOID_2", "calc(!GEOID!)", "PYTHON_9.3",
                    "def calc(a):\\n     x = a[1:-1] \\n     return x\\n")
                arcpy.JoinField_management(input_county, "GEOID", dbf_varfile,
                                           "GEOID_2", "#")
                controller.show_frame(PageState)

            return
Esempio n. 3
0
def process():
    arcpy.CreateFolder_management(out_folder_path="C:/", out_name="EDIFICIOS")
    arcpy.CreateFileGDB_management(out_folder_path="C:/EDIFICIOS",
                                   out_name="GDB",
                                   out_version="CURRENT")
    arcpy.CreateFolder_management(out_folder_path="C:/EDIFICIOS",
                                  out_name="CARPETA")
    arcpy.CreateFileGDB_management(out_folder_path="C:/EDIFICIOS/CARPETA",
                                   out_name="GDB_E",
                                   out_version="CURRENT")

    #COPIAR PLANTILLA#
    arcpy.Copy_management(ly_taps_,
                          out_data="C:/EDIFICIOS/CARPETA/GDB_E.gdb/ly_taps",
                          data_type="FeatureClass")
    arcpy.Copy_management(ly_areain_,
                          out_data="C:/EDIFICIOS/CARPETA/GDB_E.gdb/ly_areain",
                          data_type="FeatureClass")
    arcpy.Copy_management(ly_troba_,
                          out_data="C:/EDIFICIOS/CARPETA/GDB_E.gdb/ly_troba",
                          data_type="FeatureClass")
    arcpy.Copy_management(ly_nodo_,
                          out_data="C:/EDIFICIOS/CARPETA/GDB_E.gdb/ly_nodo",
                          data_type="FeatureClass")

    #KMZ A FEATURE#
    EDIFICIO_KMZ = arcpy.KMLToLayer_conversion(
        EDI_KMZ,
        output_folder="C:/EDIFICIOS",
        output_data="EDIFICIO_KMZ",
        include_groundoverlay="NO_GROUNDOVERLAY")
    arcpy.AddField_management(in_table="C:/EDIFICIOS/EDIFICIO_KMZ.gdb/Points",
                              field_name="COD_TAP",
                              field_type="TEXT")
    arcpy.CalculateField_management(
        in_table="C:/EDIFICIOS/EDIFICIO_KMZ.gdb/Points",
        field="COD_TAP",
        expression="[Name]",
        expression_type="VB")

    #RELLENAR PLANTILLAS#
    arcpy.Append_management(inputs="C:/EDIFICIOS/EDIFICIO_KMZ.gdb/Points",
                            target="C:/EDIFICIOS/CARPETA/GDB_E.gdb/ly_taps",
                            schema_type="NO_TEST")
    arcpy.Append_management(inputs="C:/EDIFICIOS/EDIFICIO_KMZ.gdb/Polygons",
                            target="C:/EDIFICIOS/CARPETA/GDB_E.gdb/ly_troba",
                            schema_type="NO_TEST")
    arcpy.Append_management(inputs="C:/EDIFICIOS/EDIFICIO_KMZ.gdb/Polygons",
                            target="C:/EDIFICIOS/CARPETA/GDB_E.gdb/ly_areain",
                            schema_type="NO_TEST")
    arcpy.Append_management(inputs="C:/EDIFICIOS/EDIFICIO_KMZ.gdb/Polygons",
                            target="C:/EDIFICIOS/CARPETA/GDB_E.gdb/ly_nodo",
                            schema_type="NO_TEST")

    #RELLENAR CAMPOS CODIGO DE TROBA#
    arcpy.CalculateField_management(ly_taps, "MTCODNOD", CodExpression,
                                    "PYTHON_9.3")
Esempio n. 4
0
def exports_main(exp_f):
    arcpy.env.compression = "LZW"
    arcpy.env.overwriteOutput = True
    print(startTime)
    export_fold = os.path.join(exp_f, "BHI_BVI_merged")

    gdb_option = False
    if gdb_option is True:
        fileExt = ".gdb"
    else:
        fileExt = ""

    if os.path.exists(export_fold):
        print("export folder already exists")
    else:
        print("create export folder")
        os.makedirs(export_fold)

    BHI_gdb = os.path.join(export_fold, "BHI_5m" + fileExt)
    if BHI_gdb[-4:] == ".gdb":
        bhi_file_ext = ""
        if arcpy.Exists(BHI_gdb):
            arcpy.Delete_management(BHI_gdb)
            arcpy.CreateFileGDB_management(export_fold, "BHI_5m")
        else:
            arcpy.CreateFileGDB_management(export_fold, "BHI_5m")
    else:
        bhi_file_ext = ".tif"
        if arcpy.Exists(BHI_gdb):
            arcpy.Delete_management(BHI_gdb)
            arcpy.CreateFolder_management(export_fold, "BHI_5m")
        else:
            arcpy.CreateFolder_management(export_fold, "BHI_5m")

    BVI_gdb = os.path.join(export_fold, "BVI_5m" + fileExt)
    if BVI_gdb[-4:] == ".gdb":
        bvi_file_ext = ""
        if arcpy.Exists(BVI_gdb):
            arcpy.Delete_management(BVI_gdb)
            arcpy.CreateFileGDB_management(export_fold, "BVI_5m")
        else:
            arcpy.CreateFileGDB_management(export_fold, "BVI_5m")
    else:
        bvi_file_ext = ".tif"
        if arcpy.Exists(BVI_gdb):
            arcpy.Delete_management(BVI_gdb)
            arcpy.CreateFolder_management(export_fold, "BVI_5m")
        else:
            arcpy.CreateFolder_management(export_fold, "BVI_5m")
    BVI_exten = "**/*_GB_BVI.tif"
    BHI_exten = "**/*_GB_BHI.tif"

    collect_HR_data(exp_f, BHI_exten, BHI_gdb, bhi_file_ext)
    collect_HR_data(exp_f, BVI_exten, BVI_gdb, bvi_file_ext)
def workspaceFunc():
    if not os.path.exists(root): os.makedirs(root)
    try:
        if os.path.isfile(tempOutput):
            os.remove(tempOutput)
            arcpy.CreateFolder_management(root, "CommTempdata")
            arcpy.CreateFileGDB_management(tempOutput, "CommissionersTemp", "")
        else:
            arcpy.CreateFolder_management(root, "CommTempdata")
            arcpy.CreateFileGDB_management(tempOutput, "CommissionersTemp", "")
    except arcpy.ExecuteError:
        pass
Esempio n. 6
0
def inv_norm():
    """Invert and normalize each corridor."""
    lm_util.gprint("Inverting and normalizing each corridor")
    prev_ws = arcpy.env.workspace
    # could be multiple nlc folders
    nlc_idx = 0
    while True:
        nlc_str = ""
        if nlc_idx > 0:
            nlc_str = str(nlc_idx)
        if not os.path.exists(
                os.path.join(lm_env.DATAPASSDIR, "nlcc", "nlc" + nlc_str)):
            break
        arcpy.env.workspace = os.path.join(lm_env.DATAPASSDIR, "nlcc",
                                           "nlc" + nlc_str)
        # process each corridor raster in folder
        for input_raster in arcpy.ListRasters():
            # max score normalization with inversion
            inv_norm_raster = normalize_raster(Raster(input_raster),
                                               lp_env.NORMCORRNORMETH, True)
            if not os.path.exists(
                    os.path.join(lm_env.DATAPASSDIR, "nlcc", "nlc" + nlc_str,
                                 "inv_norm")):
                arcpy.CreateFolder_management(
                    os.path.join(lm_env.DATAPASSDIR, "nlcc", "nlc" + nlc_str),
                    "inv_norm")
            inv_norm_raster.save(
                os.path.join(lm_env.DATAPASSDIR, "nlcc", "nlc" + nlc_str,
                             "inv_norm", input_raster))
        nlc_idx += 1
    arcpy.env.workspace = prev_ws
Esempio n. 7
0
def create_gdb(folder):
    fecha = time.strftime('%d%b%y')
    hora = time.strftime('%H%M%S')
    nameFile = "BACKUP-{}-{}".format(fecha, hora)
    folder_gdb = arcpy.CreateFolder_management(folder, nameFile).getOutput(0)
    path_gdb = arcpy.CreateFileGDB_management(folder_gdb, name_gdb, "10.0")
    return os.path.join(folder, nameFile, name_gdb)
Esempio n. 8
0
def create_folder_in_scratch(folderName):
	# create the folders necessary for the job
	scratch = arcpy.env.scratchWorkspace
	#scratch = sys.path[0]
	folderPath = arcpy.CreateUniqueName(folderName, scratch)
	arcpy.CreateFolder_management(scratch, os.path.basename(folderPath))
	return folderPath
Esempio n. 9
0
def createGdb(carpeta):
    list_fc = []
    fecha = time.strftime('%d%b%y')
    hora = time.strftime('%H%M%S')
    nameFile = "Proceso-{}-{}".format(fecha, hora)
    FOLDER = arcpy.CreateFolder_management(carpeta, nameFile)
    GDB = arcpy.CreateFileGDB_management(FOLDER, nameGdb, "10.0")
    return os.path.join(carpeta, nameFile, nameGdb + ".gdb")
Esempio n. 10
0
 def create_gdb(self):
     fecha = time.strftime('%d%b%y')
     hora = time.strftime('%H%M%S')
     name_file = "Edificios_SIROPE-{}-{}".format(fecha, hora)
     folder = arcpy.CreateFolder_management(self.workspace, name_file)
     self.pathfolder = os.path.join(self.workspace, name_file)
     arcpy.CreateFileGDB_management(folder, self.nameGDB, "10.0")
     self.pathgdb = os.path.join(self.workspace, name_file, self.nameGDB + ".gdb")
Esempio n. 11
0
def setUpGlobalParameters(x, newfolder):
    arcpy.CreateFolder_management(r"D:\data\covid\\",
                                  "_" + tdate)  #Create Folder
    arcpy.CreateFileGDB_management(newfolder,
                                   "_" + tdate + ".gdb")  #Create FileGDB
    arcpy.AddMessage("Create Folder " + "Ugn" + tdate)
    arcpy.AddMessage("Create FileGDB " + "Ugn" + tdate)
    print("Create Folder " + "Ugn" + tdate)
    print("Create FileGDB " + "Ugn" + tdate)
Esempio n. 12
0
def splitDataByType(data_, shipTypes_):
    data_desc = arcpy.Describe(data_)
    print "--- Extracting ship types out of " + data_desc.file + "..."
    for shipType in shipTypes_:
        shipTypeFolder = out_folder + "\\" + shipType
        if not arcpy.Exists(shipTypeFolder):
            arcpy.CreateFolder_management(out_folder, shipType)
        shipTypeData = shipTypeFolder + "\\" + shipType + "_" + data_desc.file
        if not arcpy.Exists(shipTypeData):
            query = "HELCOM_Gro = '" + shipType.title() + "'"
            arcpy.FeatureClassToFeatureClass_conversion(
                data_, shipTypeFolder, shipType + "_" + data_desc.file, query)
        else:
            print "--- WARNING: " + shipType + " " + getMonthFromFileName(
                data_desc.file) + " " + year + " already exists..."
Esempio n. 13
0
def createFolder():
    try:
        OldReadmeLocation = os.path.join(OriginFolder + "/" +
                                         row.getValue(ProviderNameField) +
                                         "/" + "README.txt")
        forTesting = shutil.copyfile(
            OldReadmeLocation, DestinationFolder + "/" +
            row.getValue(ProviderNameField) + "/" + "README.txt")
    except:
        print "Provider" + row.getValue(
            ProviderNameField
        ) + "README.txt does not exist.  Check log to explore error."
    cursor = arcpy.SearchCursor(LookupTable)
    row = cursor.next()
    while row:
        arcpy.CreateFolder_management(DestinationFolder,
                                      row.getValue(ProviderNameField))
        finalDestination = os.path.join(DestinationFolder + "/" +
                                        row.getValue(ProviderNameField))
        copyText()
        row = cursor.next()
Esempio n. 14
0
def getRaster1(url,fileName,imageguids,selection_feature,ds_name,download_path,sde_file):
    print("arcpy is ok")
    #arcpy.env.workspace = download_path
    #arcpy.CreateFolder_management(arcpy.env.workspace, fileName)
    arcpy.CreateFolder_management(download_path, fileName)
    count = 0;
    where = ""
    for imageryguid in imageguids:
        if (count == 0):
            where += "imageryguid='" + imageryguid + "'"
        else:
            where += " or imageryguid='" + imageryguid + "'"
        count += 1
        downloadRasterWhere="imageryguid='" + imageryguid + "'"
        ##不标准代码
        print(downloadRasterWhere)
        print()
        url1=url.replace("https://xingyun.national-space.com/gxyh/rest/","http://xingyunserver.national-space.com:6080/arcgis/rest/")
        print(url1)
        print(ds_name)
        if ds_name=='DS_PSI':
            print("DS_type1")
            arcpy.DownloadRasters_management(url1,download_path+"/"+fileName, downloadRasterWhere,"", "", "", "", "","", "MAINTAIN_FOLDER")
        else:
            print("DS_type2")
            arcpy.ExportMosaicDatasetItems_management(sde_file+"/"+ds_name,download_path+"/"+fileName,"", downloadRasterWhere,"TIFF", "", "NONE","", "")

    arcpy.CreateFileGDB_management(download_path+"/"+fileName, "data", "Current")

    arcpy.ExportMosaicDatasetGeometry_management(sde_file+"/"+ds_name,
                                                 download_path+"/"+fileName+""+"/data.gdb/footprints",
                                                 where, "FOOTPRINT")
    copyQuickview(url,imageguids,fileName,download_path)
    #exportRaster(ds_name,where,fileName,download_path,sde_file)
    ZipRaster(download_path+"/"+fileName,download_path,fileName)
    print(fileName)
Esempio n. 15
0
  return False
 tabledescription = arcpy.Describe(tablename)
 for iIndex in tabledescription.indexes:
  if (iIndex.Name == indexname):
   return True
 return False
 

# ===========
# Folder prep
# ===========
# Check if folder exists and create it if not

# Output tables
gisTables_directory = zonalStatsDirectory + "/gisTables"
if not arcpy.Exists(gisTables_directory): arcpy.CreateFolder_management(baseDirectory, "gisTables")


# ==========
# Add layers
# ==========
# Define map
mxd = arcpy.mapping.MapDocument("CURRENT")
# Define dataframe
df = arcpy.mapping.ListDataFrames(mxd)[0]

# Add the catchments layer to the map
addLayer = arcpy.mapping.Layer(catchmentsFilePath)
arcpy.mapping.AddLayer(df, addLayer, "AUTO_ARRANGE")

Esempio n. 16
0
import sys, string, os, os.path, arcgisscripting, arcpy, datetime
from arcpy.sa import *
gp = arcgisscripting.create()
gp.CheckOutExtension("spatial")
arcpy.env.overwriteOutput = True
arcpy.env.resamplingMethod = "BILINEAR"

gp.cellSize = "250"
scaleFactor = "0.0001"

startTime = datetime.datetime.now()
print(u"\u2022" + " Start time: " + str(startTime))

extent = arcpy.env.snapRaster = "c:\\path_to_mask_raster\\mask_raster.tif"
inFolder = arcpy.env.workspace = "V:\\path_to_folder_containing_rasters\\"
outFolder = str(arcpy.CreateFolder_management(inFolder, "processing_results"))

rasterList = arcpy.ListRasters()
for raster in rasterList:
    outName = os.path.basename(raster).rstrip(os.path.splitext(raster)[1])
    print(u"\u2022" + " Processing raster: " + str(outName) + "...")
    arcpy.gp.Times_sa(raster, scaleFactor, "in_memory/ndvi")
    arcpy.gp.ExtractByMask_sa("in_memory/ndvi", extent, outFolder + "\\" + outName + "_p.tif")

print(u"\u2022" + " Cleaning workspace...")
arcpy.env.workspace = outFolder
itemList = arcpy.ListFiles()
for item in itemList:
    if str(os.path.splitext(item)[1].lstrip(".")) != str("tif"):
        arcpy.gp.Delete_management(item)
Esempio n. 17
0
    "MA", "CT", "RI", "ME", "NH", "VT", "NY", "DE", "MD", "NJ", "PA", "VA",
    "WV", "DC", "NC", "TN", "KY", "OH"
]
sourceFolder = "//IGSAGBEBWS-MJO7/projects/dataIn/environmental/land/nrcsSSURGO/spatial"
outputName = "NHDHRDV2"

# ===========
# Folder prep
# ===========

# Create general folders if they don't exist
# ------------------------------------------
# Set the main GIS directory. Create one if it doesn't exist.
main_directory = baseDirectory + "/gisFiles"
if not arcpy.Exists(main_directory):
    arcpy.CreateFolder_management(baseDirectory, "gisFiles")

# Create run specific folders if they don't exist
# -----------------------------------------------
# Set the run-specific sub-folder. Create one if it doesn't exist.
working_directory = main_directory + "/" + outputName
if not arcpy.Exists(working_directory):
    arcpy.CreateFolder_management(main_directory, outputName)

# Set the run-specific table database. Create one if it doesn't exist.
tableDB = working_directory + "/tables.gdb"
if not arcpy.Exists(tableDB):
    arcpy.CreateFileGDB_management(working_directory, "tables", "CURRENT")

# Set the run-specific vector database. Create one if it doesn't exist.
vectorDB = working_directory + "/vectors.gdb"
Esempio n. 18
0
        fileName = baseFilename + "-VER-" + str(version)
        fileName = fileName.replace('.', '_')
        fileName = fileName.replace(':', '_')
        fileName = fileName.replace(' ', '-')
        # 根据不同的子版本信息,创建相应的ArcSDE连接信息
        # arcpy.CreateDatabaseConnection_management("Database Connections", "Connection to 10.246.146.120.sde", "ORACLE", 'ipgis', 'DATABASE_AUTH',
        #                                           'JASFRAMEWORK',
        #                                           '123', 'SAVE_USERNAME')
        connFiles.append(fileName)

    for conn in connFiles:
        fileGeoDbLocation = basedir + os.sep + "databases"
        fileGeoDb = conn + ".gdb"
        print conn
        # 创建针对某个版本数据存储的文件地理数据库对象
        arcpy.CreateFolder_management(basedir, "databases")
        arcpy.CreateFileGDB_management(fileGeoDbLocation, fileGeoDb)

        # env.workspace = folderName + "\\" + conn + ".sde"
        # print env.workspace
        totaldest = fileGeoDbLocation + "\\" + fileGeoDb
        print totaldest
        if tbList:
            for lmitc in tbList:
                if lmitc[:lmitc.find('.')] == "SDE":
                    desttc = lmitc[lmitc.find('.') + 1:]
                    totaltcdest = totaldest + os.sep + desttc
                    print lmitc + " ---->  " + totaltcdest
                    try:
                        arcpy.Copy_management(lmitc, totaltcdest)
                    except arcpy.ExecuteError as ee:
Esempio n. 19
0
def databaseSetup(output_workspace,
                  output_gdb_name,
                  hu_dataset,
                  hu8_field,
                  hu12_field,
                  hucbuffer,
                  nhd_path,
                  elevation_projection_template,
                  alt_buff,
                  version=None):
    """Set up the local folders and copy hydrography data into input geodatabases.

	This tool creates folder corresponding to each local hydrologic unit, usually a HUC8, and fills those folders with the flowlines, inwalls, and outwalls that will be used later to hydro-enforce the digital elevation model for each hydrologic unit. This tool also creates a global geodatabase with a feature class for the whole domain.
	
	Parameters
	----------
	output_workspace : str
		Output directory where processing will occur.
	output_gdb_name : str
		Global file geodatabase to be created.
	hu_dataset : str
		Feature class that defines local folder geographic boundaries.
	hu8_field : str
		Field name in "hu_dataset" to dissolve boundaries to local folder extents.
	hu12_field : str
		Field name in "hu_dataset" from which inwalls are generated.
	hucbuffer : str
		Distance to buffer local folder bounds in map units.
	nhd_path : str
		Path to workspace containing NHD geodatabases.
	elevation_projection_template : str
		Path to DEM file to use as a projection template.
	alt_buff : str
		Alternative buffer to use on local folder boundaries.
	version : str
		Package version number.
	
	Returns
	-------
	None

	Notes
	-----
	As this tool moves through each local hydrologic unit it searches the *nhd_path* for a geodatabase with hydrography data with the same HUC-4 as the local hydrologic unit. If this cannot be found the tool will skip that local hydrologic unit. Non-NHD hydrography data can be used with this tool, but it must be named and organized exactly as the NHD hydrography.
	"""

    if version:
        arcpy.AddMessage('StreamStats Data Preparation Tools version: %s' %
                         (version))

    # set up geoprocessor, with spatial analyst license
    if arcpy.CheckExtension("Spatial") == "Available":
        arcpy.CheckOutExtension("Spatial")
    else:
        arcpy.addmessage('License Error')

    # Set script to overwrite if files exist
    arcpy.env.overwriteOutput = True

    localName = "local"
    subName = "subWatershed"
    GDB_name = "input_data.gdb"

    #set scratch and arcpy workspaces
    arcpy.env.workspace = output_workspace
    arcpy.env.scratchWorkspace = output_workspace

    #disable Z & M values
    arcpy.env.outputZFlag = "Disabled"
    arcpy.AddMessage('Z: ' + arcpy.env.outputZFlag)
    arcpy.env.outputMFlag = "Disabled"
    arcpy.AddMessage('M: ' + arcpy.env.outputMFlag)

    try:
        #name output fileGDB
        output_gdb = os.path.join(output_workspace, output_gdb_name + ".gdb")
        #output_gdb = output_workspace + "\\" + output_gdb_name + ".gdb"

        #create container geodatabase
        if arcpy.Exists(output_gdb):
            arcpy.Delete_management(output_gdb)

        arcpy.CreateFileGDB_management(output_workspace,
                                       output_gdb_name + ".gdb")

        #dissolve at 8 dig level and put in output workspace
        hu8_dissolve = arcpy.Dissolve_management(
            hu_dataset, os.path.join(output_gdb, "huc8index"), hu8_field)

        elev_spatial_ref = arcpy.Describe(
            elevation_projection_template
        ).spatialReference  # read the elevation spatial ref.
        orig_spatial_ref = arcpy.Describe(
            hu_dataset
        ).spatialReference  # read the local division spatial ref.

        # Setup loop to iterate thru each HUC in WBD dataset
        #fields = hu8_field
        with arcpy.da.SearchCursor(hu8_dissolve, hu8_field) as cursor:
            for row in cursor:
                #Get current huc 8
                current_hu8 = str(row[0])
                current_db = os.path.join(output_workspace, current_hu8,
                                          GDB_name)
                #current_db = output_workspace + "\\" + row[0] + "\\input_data.gdb"
                arcpy.AddMessage("")
                #arcpy.AddMessage("%s = \"%s\"" % (hu8_field, current_hu8))

                #check to make sure NHD exists and set variable names, if no NHD for HUC, skip it
                arcpy.AddMessage("Starting processing local folder %s...." %
                                 (current_hu8))
                arcpy.AddMessage("	Checking to see if NHD exists for %s" %
                                 (current_hu8[:4]))
                NHDExists = False
                if arcpy.Exists(
                        os.path.join(
                            nhd_path,
                            "NHD_H_" + current_hu8[:4] + "_HU4_GDB" + ".gdb")):
                    orig_4dig_NHD = os.path.join(
                        nhd_path,
                        "NHD_H_" + current_hu8[:4] + "_HU4_GDB" + ".gdb")
                    NHDExists = True
                else:
                    arcpy.AddMessage(
                        "     4 DIGIT NHD DOES NOT EXIST FOR THE CURRENT HUC")
                    arcpy.AddMessage(
                        "     Please download NHD for this HUC and/or ensure NHD geodatabase is named correctly"
                    )
                    NHDExists = False

                #If NHD exists for current HUC 8, then do the work
                if NHDExists:
                    #Create folder for HU inside output folder
                    hydrog_projection_template = os.path.join(
                        orig_4dig_NHD, "Hydrography", "NHDFlowline"
                    )  # get a file to generate hydrography clip.
                    hydrog_spatial_ref = arcpy.Describe(
                        hydrog_projection_template
                    ).spatialReference  # make spatial reference object for reproject later
                    arcpy.CreateFolder_management(output_workspace,
                                                  current_hu8)
                    arcpy.CreateFolder_management(
                        os.path.join(output_workspace, current_hu8), "Layers")
                    arcpy.CreateFolder_management(
                        os.path.join(output_workspace, current_hu8),
                        "tmp")  # make scratch workspace later for hydroDEM.

                    #Create file geodatabase to house data
                    arcpy.CreateFileGDB_management(
                        os.path.join(output_workspace, current_hu8), GDB_name)

                    #start output file creation
                    #----------------------------------
                    #WBD Processing
                    #----------------------------------
                    arcpy.AddMessage("  Doing WBD processing")

                    #create variables for huc buffers
                    hucbuffer_custom = os.path.join(
                        current_db, "local_buffer" + str(hucbuffer))
                    hucbuffer_custom_elev_dd83 = os.path.join(
                        current_db,
                        "local_buffer_elev" + str(hucbuffer) + "_dd83")
                    hucbuffer_custom_hydrog_dd83 = os.path.join(
                        current_db,
                        "local_buffer_hydrog" + str(hucbuffer) + "_dd83")
                    hucbuffer_alt = os.path.join(current_db,
                                                 "local_buffer%s" % (alt_buff))

                    #start process
                    arcpy.AddMessage(
                        "    Selecting current local hydrologic unit.")
                    arcpy.Select_analysis(
                        hu_dataset, os.path.join(current_db, subName),
                        "\"%s\" = \'%s\'" % (hu8_field, current_hu8))

                    arcpy.AddMessage("    Dissolving sub-watershed polygons")
                    arcpy.Dissolve_management(
                        os.path.join(current_db, subName),
                        os.path.join(current_db, localName), hu8_field)

                    arcpy.AddMessage(
                        "    Creating inner and outer wall polyline feature classes"
                    )
                    arcpy.PolygonToLine_management(
                        os.path.join(current_db, subName),
                        os.path.join(current_db, "huc12_line"))
                    arcpy.PolygonToLine_management(
                        os.path.join(current_db, localName),
                        os.path.join(current_db, "outer_wall"))
                    arcpy.Erase_analysis(
                        os.path.join(current_db, "huc12_line"),
                        os.path.join(current_db, "outer_wall"),
                        os.path.join(current_db, "inwall_edit"))

                    arcpy.AddMessage(
                        "    Creating user-defined buffered outwall dataset")
                    arcpy.Buffer_analysis(os.path.join(current_db, localName),
                                          hucbuffer_custom, hucbuffer, "FULL",
                                          "ROUND")
                    arcpy.AddMessage(
                        "    Creating %s meter buffered outwall dataset" %
                        (alt_buff))
                    arcpy.Buffer_analysis(os.path.join(current_db, localName),
                                          hucbuffer_alt,
                                          "%s METERS" % (alt_buff), "FULL",
                                          "ROUND")

                    arcpy.AddMessage(
                        "    Creating unprojected buffered outwall dataset for elevation and hydrography clips"
                    )
                    arcpy.Project_management(hucbuffer_custom,
                                             hucbuffer_custom_elev_dd83,
                                             elev_spatial_ref,
                                             in_coor_system=orig_spatial_ref)
                    arcpy.Project_management(hucbuffer_custom,
                                             hucbuffer_custom_hydrog_dd83,
                                             hydrog_spatial_ref,
                                             in_coor_system=orig_spatial_ref)

                    arcpy.AddMessage("    Creating sink point feature class")
                    arcpy.CreateFeatureclass_management(
                        os.path.join(output_workspace, current_hu8,
                                     "input_data.gdb"),
                        "sinkpoint_edit", "POINT", "", "", "",
                        os.path.join(current_db, localName))

                    #erase huc 12 line dataset after inwall is created
                    if arcpy.Exists(os.path.join(current_db, "huc12_line")):
                        arcpy.Delete_management(
                            os.path.join(current_db, "huc12_line"))

                    #----------------------------------
                    #NHD Processing
                    #----------------------------------
                    arcpy.AddMessage("  Doing NHD processing")

                    #Create NHD feature dataset within current HU database
                    arcpy.AddMessage(
                        "    Creating NHD feature dataset in local hydrologic unit workspace"
                    )
                    arcpy.CreateFeatureDataset_management(
                        current_db, "Hydrography", orig_spatial_ref)
                    arcpy.CreateFeatureDataset_management(
                        current_db, "Reference", orig_spatial_ref)

                    #process each feature type in NHD
                    featuretypelist = [
                        "NHDArea", "NHDFlowline", "NHDWaterbody"
                    ]
                    for featuretype in featuretypelist:

                        #clip unprojected feature
                        arcpy.AddMessage("      Clipping   " + featuretype)
                        arcpy.Clip_analysis(
                            os.path.join(orig_4dig_NHD, "Hydrography",
                                         featuretype),
                            hucbuffer_custom_hydrog_dd83,
                            os.path.join(current_db, featuretype + "_dd83"))

                        #project clipped feature
                        arcpy.AddMessage("      Projecting " + featuretype)
                        arcpy.Project_management(
                            os.path.join(current_db, featuretype + "_dd83"),
                            os.path.join(current_db, featuretype + "_project"),
                            orig_spatial_ref)
                        arcpy.CopyFeatures_management(
                            os.path.join(current_db, featuretype + "_project"),
                            os.path.join(current_db, "Hydrography",
                                         featuretype))

                        #delete unprojected and temporary projected NHD feature classes
                        arcpy.Delete_management(
                            os.path.join(current_db, featuretype + "_dd83"))
                        arcpy.Delete_management(
                            os.path.join(current_db, featuretype + "_project"))

                    #create editable dendrite feature class from NHDFlowline
                    arcpy.AddMessage(
                        "    Creating copy of NHDFlowline to preserve as original"
                    )
                    arcpy.CopyFeatures_management(
                        os.path.join(current_db, "Hydrography", "NHDFlowline"),
                        os.path.join(current_db, "Hydrography",
                                     "NHDFlowline_orig"))

                    arcpy.AddMessage("    Adding fields to NHDFlowline")
                    arcpy.AddField_management(
                        os.path.join(current_db, "Hydrography", "NHDFlowline"),
                        "comments", "text", "250")
                    arcpy.AddField_management(
                        os.path.join(current_db, "Hydrography", "NHDFlowline"),
                        "to_steward", "text", "50")
                    arcpy.AddMessage("    Finished local %s" % current_hu8)

                #if no NHD, skip the HUC
                else:
                    arcpy.AddMessage(
                        "     Processing skipped for this HUC--NO NHD")

            #del cursor, row

    # handle errors and report using gp.addmessage function
    except:
        #If we have messages of severity error (2), we assume a GP tool raised it,
        #  so we'll output that.  Otherwise, we assume we raised the error and the
        #  information is in errMsg.
        #
        if arcpy.GetMessages(2):
            arcpy.AddError(arcpy.GetMessages(2))
            arcpy.AddError(arcpy.GetMessages(2))
        else:
            arcpy.AddError(str(errMsg))
Esempio n. 20
0
if check_s == 'false':
    check_s = False
else:
    check_s = True

########################################################################################################################
#  set main settings and creating the working and scratch directory
########################################################################################################################

arcpy.env.overwriteOutput = True  # it´s possible to overwrite the results
arcpy.env.extent = s_init  # set the working extent
if arcpy.Exists(r'{}\{}'.format(
        folder,
        name)):  # if the working directory exists, it will be overwritten
    arcpy.Delete_management(r'{}\{}'.format(folder, name))
arcpy.CreateFolder_management(folder, name)
workspace = arcpy.env.workspace = r'{}\{}'.format(
    folder, name)  # set the working directory
arcpy.CreateFileGDB_management(
    workspace,
    "Ergebnistabellen.gdb")  # creates a geodatabase for the result tables
arcpy.CreateFolder_management(
    folder, "Scratch")  # creates the scratch workspace for temporary dataset
arcpy.env.scratchWorkspace = r'{}\Scratch'.format(folder)

arcpy.AddMessage(
    time.strftime("%H:%M:%S: ") +
    "Die Ergebnisdatenbank wurde im Verzeichnis {} erstellt.".format(folder))

########################################################################################################################
#  link and extract the base datasets
Esempio n. 21
0
def calc_lccs(normalize):
    try:
        if normalize:
            mosaicBaseName = "_corridors"
            writeTruncRaster = cfg.WRITETRUNCRASTER
            outputGDB = cfg.OUTPUTGDB
            SAVENORMLCCS = cfg.SAVENORMLCCS
        else:
            mosaicBaseName = "_NON_NORMALIZED_corridors"
            SAVENORMLCCS = False
            outputGDB = cfg.EXTRAGDB
            writeTruncRaster = False

        lu.dashline(1)
        gprint('Running script ' + _SCRIPT_NAME)
        linkTableFile = lu.get_prev_step_link_table(step=5)
        arcpy.env.workspace = cfg.SCRATCHDIR
        arcpy.env.scratchWorkspace = cfg.ARCSCRATCHDIR
        arcpy.env.compression = "NONE"

        if cfg.MAXEUCDIST is not None:
            gprint('Max Euclidean distance between cores')
            gprint('for linkage mapping set to ' +
                              str(cfg.MAXEUCDIST))

        if cfg.MAXCOSTDIST is not None:
            gprint('Max cost-weighted distance between cores')
            gprint('for linkage mapping set to ' +
                              str(cfg.MAXCOSTDIST))


        # set the analysis extent and cell size to that of the resistance
        # surface
        arcpy.env.extent = cfg.RESRAST
        arcpy.env.cellSize = arcpy.Describe(cfg.RESRAST).MeanCellHeight
        arcpy.env.snapRaster = cfg.RESRAST
        arcpy.env.mask = cfg.RESRAST

        linkTable = lu.load_link_table(linkTableFile)
        numLinks = linkTable.shape[0]
        numCorridorLinks = lu.report_links(linkTable)
        if numCorridorLinks == 0:
            lu.dashline(1)
            msg =('\nThere are no corridors to map. Bailing.')
            lu.raise_error(msg)


        if not cfg.STEP3 and not cfg.STEP4:
            # re-check for links that are too long or in case script run out of
            # sequence with more stringent settings
            gprint('Double-checking for corridors that are too long to map.')
            DISABLE_LEAST_COST_NO_VAL = True
            linkTable,numDroppedLinks = lu.drop_links(
                linkTable, cfg.MAXEUCDIST, cfg.MINEUCDIST, cfg.MAXCOSTDIST,
                cfg.MINCOSTDIST, DISABLE_LEAST_COST_NO_VAL)

        # Added to try to speed up:
        arcpy.env.pyramid = "NONE"
        arcpy.env.rasterStatistics = "NONE"

        # set up directories for normalized lcc and mosaic grids
        dirCount = 0
        gprint("Creating output folder: " + cfg.LCCBASEDIR)
        lu.delete_dir(cfg.LCCBASEDIR)
        arcpy.CreateFolder_management(path.dirname(cfg.LCCBASEDIR),
                                       path.basename(cfg.LCCBASEDIR))
        arcpy.CreateFolder_management(cfg.LCCBASEDIR, cfg.LCCNLCDIR_NM)
        clccdir = path.join(cfg.LCCBASEDIR, cfg.LCCNLCDIR_NM)
        gprint("")
        if normalize:
            gprint('Normalized least-cost corridors will be written '
                          'to ' + clccdir + '\n')
        PREFIX = cfg.PREFIX

        # Add CWD layers for core area pairs to produce NORMALIZED LCC layers
        numGridsWritten = 0
        coreList = linkTable[:,cfg.LTB_CORE1:cfg.LTB_CORE2+1]
        coreList = npy.sort(coreList)

        x = 0
        linkCount = 0
        endIndex = numLinks
        while x < endIndex:
            if (linkTable[x, cfg.LTB_LINKTYPE] < 1): # If not a valid link
                x = x + 1
                continue

            linkCount = linkCount + 1
            start_time = time.clock()

            linkId = str(int(linkTable[x, cfg.LTB_LINKID]))

            # source and target cores
            corex=int(coreList[x,0])
            corey=int(coreList[x,1])

            # Get cwd rasters for source and target cores
            cwdRaster1 = lu.get_cwd_path(corex)
            cwdRaster2 = lu.get_cwd_path(corey)

            if not arcpy.Exists(cwdRaster1):
                msg =('\nError: cannot find cwd raster:\n' + cwdRaster1)
            if not arcpy.Exists(cwdRaster2):
                msg =('\nError: cannot find cwd raster:\n' + cwdRaster2)
                lu.raise_error(msg)


            lccNormRaster = path.join(clccdir, str(corex) + "_" +
                                      str(corey))# + ".tif")
            arcpy.env.extent = "MINOF"

            link = lu.get_links_from_core_pairs(linkTable, corex, corey)

            offset = 10000

            # Normalized lcc rasters are created by adding cwd rasters and
            # subtracting the least cost distance between them.
            lcDist = (float(linkTable[link,cfg.LTB_CWDIST]) - offset)

            if normalize:
                statement = ('outras = arcpy.sa.Raster(cwdRaster1) '
                             '+ arcpy.sa.Raster(cwdRaster2) - lcDist; '
                             'outras.save(lccNormRaster)')
            else:
                statement = ('outras = arcpy.sa.Raster(cwdRaster1) '
                             '+ arcpy.sa.Raster(cwdRaster2); '
                             'outras.save(lccNormRaster)')

            count = 0
            while True:
                try:
                    exec(statement)
                except Exception:
                    count,tryAgain = lu.retry_arc_error(count,statement)
                    if not tryAgain:
                        exec(statement)
                else: break

            if normalize:
                try:
                    minObject = arcpy.GetRasterProperties_management(lccNormRaster, "MINIMUM")
                    rasterMin = float(str(minObject.getOutput(0)))
                except Exception:
                    lu.warn('\n------------------------------------------------')
                    lu.warn('WARNING: Raster minimum check failed in step 5. \n'
                        'This may mean the output rasters are corrupted. Please \n'
                        'be sure to check for valid rasters in '+ outputGDB)
                    rasterMin = 0
                tolerance = (float(arcpy.env.cellSize) * -10)
                if rasterMin < tolerance:
                    lu.dashline(1)
                    msg = ('WARNING: Minimum value of a corridor #' + str(x+1)
                           + ' is much less than zero ('+str(rasterMin)+').'
                           '\nThis could mean that BOUNDING CIRCLE BUFFER DISTANCES '
                           'were too small and a corridor passed outside of a '
                           'bounding circle, or that a corridor passed outside of the '
                           'resistance map. \n')
                    lu.warn(msg)

            arcpy.env.extent = cfg.RESRAST

            mosaicDir = path.join(cfg.LCCBASEDIR,'mos'+str(x+1))
            lu.create_dir(mosaicDir)
            mosFN = 'mos'#.tif' change and move
            mosaicRaster = path.join(mosaicDir,mosFN)

            if numGridsWritten == 0 and dirCount == 0:
                #If this is the first grid then copy rather than mosaic
                arcpy.CopyRaster_management(lccNormRaster, mosaicRaster)
            else:
                statement = (
                    'arcpy.MosaicToNewRaster_management('
                    'input_rasters=";".join([lccNormRaster, '
                    'lastMosaicRaster]), output_location=mosaicDir, '
                    'raster_dataset_name_with_extension=mosFN, '
                    'pixel_type="32_BIT_FLOAT", cellsize=arcpy.env.cellSize, '
                    'number_of_bands="1", mosaic_method="MINIMUM")')

                count = 0
                while True:
                    try:
                        lu.write_log('Executing mosaic for link #'+str(linkId))
                        exec(statement)
                        lu.write_log('Done with mosaic.')
                    except Exception:
                        count,tryAgain = lu.retry_arc_error(count,statement)
                        lu.delete_data(mosaicRaster)
                        lu.delete_dir(mosaicDir)
                        # Try a new directory
                        mosaicDir = path.join(cfg.LCCBASEDIR,'mos'+str(x+1)+ '_' + str(count))
                        lu.create_dir(mosaicDir)
                        mosaicRaster = path.join(mosaicDir,mosFN)
                        if not tryAgain:
                            exec(statement)
                    else: break
            endTime = time.clock()
            processTime = round((endTime - start_time), 2)

            if normalize == True:
                printText = "Normalized and mosaicked "
            else:
                printText = "Mosaicked NON-normalized "
            gprint(printText + "corridor for link ID #" + str(linkId) +
                    " connecting core areas " + str(corex) +
                    " and " + str(corey)+ " in " +
                    str(processTime) + " seconds. " + str(int(linkCount)) +
                    " out of " + str(int(numCorridorLinks)) + " links have been "
                    "processed.")

            # temporarily disable links in linktable - don't want to mosaic
            # them twice
            for y in range (x+1,numLinks):
                corex1 = int(coreList[y,0])
                corey1 = int(coreList[y,1])
                if corex1 == corex and corey1 == corey:
                    linkTable[y,cfg.LTB_LINKTYPE] = (
                        linkTable[y,cfg.LTB_LINKTYPE] + 1000)
                elif corex1==corey and corey1==corex:
                    linkTable[y,cfg.LTB_LINKTYPE] = (
                            linkTable[y,cfg.LTB_LINKTYPE] + 1000)

            numGridsWritten = numGridsWritten + 1
            if not SAVENORMLCCS:
                lu.delete_data(lccNormRaster)
                lu.delete_dir(clccdir)
                lu.create_dir(clccdir)
            else:
                if numGridsWritten == 100:
                    # We only write up to 100 grids to any one folder
                    # because otherwise Arc slows to a crawl
                    dirCount = dirCount + 1
                    numGridsWritten = 0
                    clccdir = path.join(cfg.LCCBASEDIR,
                                        cfg.LCCNLCDIR_NM + str(dirCount))
                    gprint("Creating output folder: " + clccdir)
                    arcpy.CreateFolder_management(cfg.LCCBASEDIR,
                                               path.basename(clccdir))

            if numGridsWritten > 1 or dirCount > 0:
                lu.delete_data(lastMosaicRaster)
                lu.delete_dir(path.dirname(lastMosaicRaster))

            lastMosaicRaster = mosaicRaster
            x = x + 1

        #rows that were temporarily disabled
        rows = npy.where(linkTable[:,cfg.LTB_LINKTYPE]>1000)
        linkTable[rows,cfg.LTB_LINKTYPE] = (
            linkTable[rows,cfg.LTB_LINKTYPE] - 1000)
        # ---------------------------------------------------------------------

        # Create output geodatabase
        if not arcpy.Exists(outputGDB):
            arcpy.CreateFileGDB_management(cfg.OUTPUTDIR, path.basename(outputGDB))

        arcpy.env.workspace = outputGDB

        arcpy.env.pyramid = "NONE"
        arcpy.env.rasterStatistics = "NONE"

        # ---------------------------------------------------------------------
        # convert mosaic raster to integer
        intRaster = path.join(outputGDB,PREFIX + mosaicBaseName)
        statement = ('outras = arcpy.sa.Int(arcpy.sa.Raster(mosaicRaster) '
                     '- offset + 0.5); '
                     'outras.save(intRaster)')
        count = 0
        while True:
            try:
                exec(statement)
            except Exception:
                count,tryAgain = lu.retry_arc_error(count,statement)
                if not tryAgain: exec(statement)
            else: break
        # ---------------------------------------------------------------------


        if writeTruncRaster:
            # -----------------------------------------------------------------
            # Set anything beyond cfg.CWDTHRESH to NODATA.
            truncRaster = (outputGDB + '\\' + PREFIX + mosaicBaseName +
                           '_truncated_at_' + lu.cwd_cutoff_str(cfg.CWDTHRESH))

            statement = ('outRas = arcpy.sa.Raster(intRaster)'
                         '* (arcpy.sa.Con(arcpy.sa.Raster(intRaster) '
                         '<= cfg.CWDTHRESH, 1)); '
                         'outRas.save(truncRaster)')

            count = 0
            while True:
                try:
                    exec(statement)
                except Exception:
                    count,tryAgain = lu.retry_arc_error(count,statement)
                    if not tryAgain: exec(statement)
                else: break
        # ---------------------------------------------------------------------
        # Check for unreasonably low minimum NLCC values
        try:
            mosaicGrid = path.join(cfg.LCCBASEDIR,'mos')
            # Copy to grid to test
            arcpy.CopyRaster_management(mosaicRaster, mosaicGrid)
            minObject = arcpy.GetRasterProperties_management(mosaicGrid, "MINIMUM")
            rasterMin = float(str(minObject.getOutput(0)))
        except Exception:
            lu.warn('\n------------------------------------------------')
            lu.warn('WARNING: Raster minimum check failed in step 5. \n'
                'This may mean the output rasters are corrupted. Please \n'
                'be sure to check for valid rasters in '+ outputGDB)
            rasterMin = 0
        tolerance = (float(arcpy.env.cellSize) * -10)
        if rasterMin < tolerance:
            lu.dashline(1)
            msg = ('WARNING: Minimum value of mosaicked corridor map is '
                   'much less than zero ('+str(rasterMin)+').'
                   '\nThis could mean that BOUNDING CIRCLE BUFFER DISTANCES '
                   'were too small and a corridor passed outside of a '
                   'bounding circle, or that a corridor passed outside of the '
                   'resistance map. \n')
            lu.warn(msg)


        gprint('\nWriting final LCP maps...')
        if cfg.STEP4:
            finalLinkTable = lu.update_lcp_shapefile(linkTable, lastStep=4,
                                                     thisStep=5)
        elif cfg.STEP3:
            finalLinkTable = lu.update_lcp_shapefile(linkTable, lastStep=3,
                                                     thisStep=5)
        else:
            # Don't know if step 4 was run, since this is started at step 5.
            # Use presence of previous linktable files to figure this out.
            # Linktable name includes step number.
            prevLinkTableFile = lu.get_prev_step_link_table(step=5)
            prevStepInd = len(prevLinkTableFile) - 5
            lastStep = prevLinkTableFile[prevStepInd]

            finalLinkTable = lu.update_lcp_shapefile(linkTable, lastStep,
                                                     thisStep=5)

        outlinkTableFile = lu.get_this_step_link_table(step=5)
        gprint('Updating ' + outlinkTableFile)
        lu.write_link_table(linkTable, outlinkTableFile)

        linkTableLogFile = path.join(cfg.LOGDIR, "linkTable_s5.csv")
        lu.write_link_table(linkTable, linkTableLogFile)

        linkTableFinalFile = path.join(cfg.OUTPUTDIR, PREFIX +
                                       "_linkTable_s5.csv")
        lu.write_link_table(finalLinkTable, linkTableFinalFile)
        gprint('Copy of final linkTable written to '+
                          linkTableFinalFile)

        gprint('Creating shapefiles with linework for links.')
        try:
            lu.write_link_maps(outlinkTableFile, step=5)
        except Exception:
            lu.write_link_maps(outlinkTableFile, step=5)

        # Create final linkmap files in output directory, and remove files from
        # scratch.
        lu.copy_final_link_maps(step=5)

        if not SAVENORMLCCS:
            lu.delete_dir(cfg.LCCBASEDIR)

        # Build statistics for corridor rasters
        arcpy.AddMessage('\nBuilding output statistics and pyramids '
                          'for corridor raster')
        lu.build_stats(intRaster)

        if writeTruncRaster:
            arcpy.AddMessage('Building output statistics '
                              'for truncated corridor raster')
            lu.build_stats(truncRaster)

        save_parameters()
        if cfg.OUTPUTFORMODELBUILDER:
            arcpy.CopyFeatures_management(cfg.COREFC, cfg.OUTPUTFORMODELBUILDER)

    # Return GEOPROCESSING specific errors
    except arcpy.ExecuteError:
        lu.dashline(1)
        gprint('****Failed in step 5. Details follow.****')
        lu.exit_with_geoproc_error(_SCRIPT_NAME)

    # Return any PYTHON or system specific errors
    except Exception:
        lu.dashline(1)
        gprint('****Failed in step 5. Details follow.****')
        lu.exit_with_python_error(_SCRIPT_NAME)

    return
Esempio n. 22
0
def mainFunction(updateFolder,fileName,updateMode,geodatabase): # Get parameters from ArcGIS Desktop tool by seperating by comma e.g. (var1 is 1st parameter,var2 is 2nd parameter,var3 is 3rd parameter)  
    try:
        # --------------------------------------- Start of code --------------------------------------- #
        
        # Get the arcgis version
        arcgisVersion = arcpy.GetInstallInfo()['Version']   

        # If a specific file is provided
        if (fileName):
            latestFile = os.path.join(updateFolder, fileName)
        # Otherwise get the latest file in a folder
        else:
            # Get the newest zip file from the update folder
            latestFile = max(glob.iglob(updateFolder + r"\*.zip"), key=os.path.getmtime)
      
        # Setup geodatabase to load data into in temporary workspace
        tempFolder = arcpy.CreateFolder_management(arcpy.env.scratchFolder, "WebData-" + str(uuid.uuid1()))
        arcpy.AddMessage("Copying datasets...")    
          
        # Extract the zip file to a temporary location
        zip = zipfile.ZipFile(latestFile, mode="r")
        zip.extractall(str(tempFolder))

        # Loop through the files in the extracted folder
        for file in os.listdir(str(tempFolder)):               
            # If it's a shapefile
            if file.endswith(".shp"):
               # Get count of the source dataset
               datasetCount = arcpy.GetCount_management(os.path.join(str(tempFolder), file))
               eachFeatureclass = file.replace(".shp","")
          
               # Check Dataset record count is more than 0
               if (long(str(datasetCount)) > 0):
                   # If update mode is then copy, otherwise delete and appending records                
                   if (updateMode == "New"):                                           
                       # Logging
                       arcpy.AddMessage("Copying over feature class - " + os.path.join(geodatabase, eachFeatureclass) + "...")
                       if (enableLogging == "true"):
                          logger.info("Copying over feature class - " + os.path.join(geodatabase, eachFeatureclass) + "...")
                                
                       # Copy feature class into geodatabase using the same dataset name
                       arcpy.CopyFeatures_management(os.path.join(str(tempFolder), file), os.path.join(geodatabase, eachFeatureclass), "", "0", "0", "0")

                       # Get dataset count
                       datasetCount = arcpy.GetCount_management(os.path.join(geodatabase, eachFeatureclass)) 
                       arcpy.AddMessage("Dataset record count - " + str(datasetCount))
                       if (enableLogging == "true"):
                           logger.info("Dataset record count - " + str(datasetCount))   
                   else:
                        # If dataset exists in geodatabase, delete features and load in new data
                        if arcpy.Exists(os.path.join(geodatabase, eachFeatureclass)):
                            # Logging
                            arcpy.AddMessage("Updating feature class - " + os.path.join(geodatabase, eachFeatureclass) + "...")
                            if (enableLogging == "true"):
                               logger.info("Updating feature class - " + os.path.join(geodatabase, eachFeatureclass) + "...")
             
                            arcpy.DeleteFeatures_management(os.path.join(geodatabase, eachFeatureclass))
                            arcpy.Append_management(os.path.join(str(tempFolder), file), os.path.join(geodatabase, eachFeatureclass), "NO_TEST", "", "")

                            # Get dataset count
                            datasetCount = arcpy.GetCount_management(os.path.join(geodatabase, eachFeatureclass)) 
                            arcpy.AddMessage("Dataset record count - " + str(datasetCount))
                            if (enableLogging == "true"):
                               logger.info("Dataset record count - " + str(datasetCount))   
                        else:
                            # Log warning
                            arcpy.AddWarning("Warning: " + os.path.join(geodatabase, eachFeatureclass) + " does not exist. Copying over...")
                            # Logging
                            if (enableLogging == "true"):
                                logger.warning(os.path.join(geodatabase, eachFeatureclass) + " does not exist. Copying over...")
                                
                            # Copy feature class into geodatabase using the same dataset name
                            arcpy.CopyFeatures_management(os.path.join(str(tempFolder), file), os.path.join(geodatabase, eachFeatureclass), "", "0", "0", "0")           
               else:
                   arcpy.AddWarning("Dataset " + eachFeatureclass + " is empty and won't be copied...")                        
                   # Logging
                   if (enableLogging == "true"):
                       logger.warning("Dataset " + eachFeatureclass + " is empty and won't be copied...")
                               
            # If it's a FGDB
            if file.endswith(".gdb"):
                # Assign the geodatabase workspace and load in the datasets to the lists
                arcpy.env.workspace = file
                featureclassList = arcpy.ListFeatureClasses()   
                tableList = arcpy.ListTables()       
      
                # Load the feature classes into the geodatabase if at least one is in the geodatabase provided
                if (len(featureclassList) > 0):        
                    # Loop through the feature classes
                    for eachFeatureclass in featureclassList:
                       # Get count of the source dataset
                       datasetCount = arcpy.GetCount_management(eachFeatureclass)                   
                       # Check Dataset record count is more than 0
                       if (long(str(datasetCount)) > 0):
                           # Create a Describe object from the dataset
                           describeDataset = arcpy.Describe(eachFeatureclass)
                           # If update mode is then copy, otherwise delete and appending records                
                           if (updateMode == "New"):                                           
                               # Logging
                               arcpy.AddMessage("Copying over feature class - " + os.path.join(geodatabase, eachFeatureclass) + "...")
                               if (enableLogging == "true"):
                                  logger.info("Copying over feature class - " + os.path.join(geodatabase, eachFeatureclass) + "...")
                                        
                               # Copy feature class into geodatabase using the same dataset name
                               arcpy.CopyFeatures_management(eachFeatureclass, os.path.join(geodatabase, describeDataset.name), "", "0", "0", "0")

                               # Get dataset count
                               datasetCount = arcpy.GetCount_management(os.path.join(geodatabase, describeDataset.name)) 
                               arcpy.AddMessage("Dataset record count - " + str(datasetCount))
                               if (enableLogging == "true"):
                                   logger.info("Dataset record count - " + str(datasetCount))   
                           else:
                                # If dataset exists in geodatabase, delete features and load in new data
                                if arcpy.Exists(os.path.join(geodatabase, eachFeatureclass)):
                                    # Logging
                                    arcpy.AddMessage("Updating feature class - " + os.path.join(geodatabase, eachFeatureclass) + "...")
                                    if (enableLogging == "true"):
                                       logger.info("Updating feature class - " + os.path.join(geodatabase, eachFeatureclass) + "...")
                     
                                    arcpy.DeleteFeatures_management(os.path.join(geodatabase, eachFeatureclass))
                                    arcpy.Append_management(os.path.join(arcpy.env.workspace, eachFeatureclass), os.path.join(geodatabase, eachFeatureclass), "NO_TEST", "", "")

                                    # Get dataset count
                                    datasetCount = arcpy.GetCount_management(os.path.join(geodatabase, eachFeatureclass)) 
                                    arcpy.AddMessage("Dataset record count - " + str(datasetCount))
                                    if (enableLogging == "true"):
                                       logger.info("Dataset record count - " + str(datasetCount))   
                                else:
                                    # Log warning
                                    arcpy.AddWarning("Warning: " + os.path.join(geodatabase, eachFeatureclass) + " does not exist. Copying over...")
                                    # Logging
                                    if (enableLogging == "true"):
                                        logger.warning(os.path.join(geodatabase, eachFeatureclass) + " does not exist. Copying over...")
                                        
                                    # Copy feature class into geodatabase using the same dataset name
                                    arcpy.CopyFeatures_management(eachFeatureclass, os.path.join(geodatabase, describeDataset.name), "", "0", "0", "0")           
                       else:
                           arcpy.AddWarning("Dataset " + eachFeatureclass + " is empty and won't be copied...")                        
                           # Logging
                           if (enableLogging == "true"):
                               logger.warning("Dataset " + eachFeatureclass + " is empty and won't be copied...")

                                                         
                if (len(tableList) > 0):    
                    # Loop through of the tables
                    for eachTable in tableList:
                       # Get count of the source dataset
                       datasetCount = arcpy.GetCount_management(eachTable)                   
                       # Check Dataset record count is more than 0
                       if (long(str(datasetCount)) > 0):
                           # Create a Describe object from the dataset
                           describeDataset = arcpy.Describe(eachTable)
                           # If update mode is then copy, otherwise delete and appending records                
                           if (updateMode == "New"):
                               # Logging
                               arcpy.AddMessage("Copying over table - " + os.path.join(geodatabase, eachTable) + "...")
                               if (enableLogging == "true"):
                                  logger.info("Copying over table - " + os.path.join(geodatabase, eachTable) + "...")
                                  
                               # Copy table into geodatabase using the same dataset name
                               arcpy.TableSelect_analysis(eachTable, os.path.join(geodatabase, describeDataset.name), "")

                               # Get dataset count
                               datasetCount = arcpy.GetCount_management(os.path.join(geodatabase, describeDataset.name)) 
                               arcpy.AddMessage("Dataset record count - " + str(datasetCount))
                               if (enableLogging == "true"):
                                   logger.info("Dataset record count - " + str(datasetCount))   
                           else:
                                # If dataset exists in geodatabase, delete features and load in new data
                                if arcpy.Exists(os.path.join(geodatabase, eachTable)):
                                    # Logging
                                    arcpy.AddMessage("Updating table - " + os.path.join(geodatabase, eachTable) + "...")
                                    if (enableLogging == "true"):
                                       logger.info("Updating table - " + os.path.join(geodatabase, eachTable) + "...")

                                    arcpy.DeleteFeatures_management(os.path.join(geodatabase, eachTable))
                                    arcpy.Append_management(os.path.join(arcpy.env.workspace, eachTable), os.path.join(geodatabase, eachTable), "NO_TEST", "", "")

                                    # Get dataset count
                                    datasetCount = arcpy.GetCount_management(os.path.join(geodatabase, eachTable)) 
                                    arcpy.AddMessage("Dataset record count - " + str(datasetCount))
                                    if (enableLogging == "true"):
                                       logger.info("Dataset record count - " + str(datasetCount))   
                                else:
                                    # Log warning
                                    arcpy.AddWarning("Warning: " + os.path.join(geodatabase, eachTable) + " does not exist. Copying over...")
                                    # Logging
                                    if (enableLogging == "true"):
                                        logger.warning(os.path.join(geodatabase, eachTable) + " does not exist. Copying over...")

                                    # Copy table into geodatabase using the same dataset name
                                    arcpy.TableSelect_analysis(eachTable, os.path.join(geodatabase, describeDataset.name), "")
                                    
        #################### Custom code for WCRC and BDC ####################
                           # For WCRC data updates
                           if "wcrc" in updateFolder.lower():
                               # For the property details view from WCRC
                               if "vw_propertydetails" in eachTable.lower():
                                   # Copy property details view into enterprise geodatabase
                                   arcpy.TableSelect_analysis(eachTable, os.path.join("D:\Data\Database Connections\GISData@WCCHCGIS1 (gisadmin).sde", "vw_WCRCPropertyDetails"), "")
                                   # Copy property spatial view into file geodatabase and dissolve on valuation ID
                                   arcpy.AddMessage("Copying over feature class - " + os.path.join("D:\Data\WCRC.gdb", "Property") + "...")
                                   if (enableLogging == "true"):
                                      logger.info("Copying over feature class - " + os.path.join("D:\Data\WCRC.gdb", "Property") + "...") 
                                   arcpy.CopyFeatures_management(os.path.join("D:\Data\Database Connections\GISData@WCCHCGIS1 (gisadmin).sde", "vwWCRCProperty"), os.path.join("D:\Data\WCRC.gdb", "PropertyParcel"), "", "0", "0", "0")
                                   arcpy.Dissolve_management(os.path.join("D:\Data\WCRC.gdb", "PropertyParcel"), os.path.join("D:\Data\WCRC.gdb", "Property"), "ValuationID", "", "MULTI_PART", "DISSOLVE_LINES")
                                   arcpy.JoinField_management(os.path.join("D:\Data\WCRC.gdb", "Property"), "ValuationID", os.path.join("D:\Data\WCRC.gdb", "PropertyParcel"), "ValuationID", "")

                                   # Get dataset count
                                   datasetCount = arcpy.GetCount_management(os.path.join("D:\Data\WCRC.gdb", "Property")) 
                                   arcpy.AddMessage("Dataset record count - " + str(datasetCount))
                                   if (enableLogging == "true"):
                                       logger.info("Dataset record count - " + str(datasetCount))   
                           # For BDC data updates
                           if "bdc" in updateFolder.lower():                             
                               # For the property match table from BDC and WCRC
                               if "matchtable" in eachTable.lower():
                                   # Update the West Coast match table
                                   # WCRC match table - Copy table and tidy up the fields
                                   arcpy.TableSelect_analysis("D:\Data\FTP\WCRC\WCRCPropertyToParcel.csv", os.path.join("D:\Data\WCRC.gdb", "CoreLogic_PropertyToParcel"), "")
                                   arcpy.AddField_management(os.path.join("D:\Data\WCRC.gdb", "CoreLogic_PropertyToParcel"), "ValuationID", "TEXT", "", "", "", "", "NULLABLE", "NON_REQUIRED", "")
                                   arcpy.AddField_management(os.path.join("D:\Data\WCRC.gdb", "CoreLogic_PropertyToParcel"), "ParcelID", "TEXT", "", "", "", "", "NULLABLE", "NON_REQUIRED", "")
                                   arcpy.CalculateField_management(os.path.join("D:\Data\WCRC.gdb", "CoreLogic_PropertyToParcel"), "ValuationID", "!ValRef_Formatted!", "PYTHON_9.3", "")
                                   arcpy.CalculateField_management(os.path.join("D:\Data\WCRC.gdb", "CoreLogic_PropertyToParcel"), "ParcelID", "!Parcel_ID!", "PYTHON_9.3", "")
                                   arcpy.DeleteField_management(os.path.join("D:\Data\WCRC.gdb", "CoreLogic_PropertyToParcel"), "QPID;Roll;Assessment;Suffix;ValRef_Formatted;Apportionment;Category;Building_Floor_Area;Building_Site_Cover;Parcel_ID;Physical_Address;Physical_Suburb;Physical_City;Legal_Description")
                                       
                                   # BDC match table - Tidy up the fields
                                   arcpy.AddField_management(eachTable, "ValuationID", "TEXT", "", "", "", "", "NULLABLE", "NON_REQUIRED", "")
                                   arcpy.AddField_management(eachTable, "ParcelID", "TEXT", "", "", "", "", "NULLABLE", "NON_REQUIRED", "")
                                   arcpy.CalculateField_management(eachTable, "ValuationID", "!val_id!", "PYTHON_9.3", "")
                                   arcpy.CalculateField_management(eachTable, "ParcelID", "!PAR_ID!", "PYTHON_9.3", "")
                                   arcpy.DeleteField_management(eachTable, "PERIMETER;LEGAL_ID;PAR_ID;LEGAL;HOW;ASSESS;FLAG;COMMENT;POLYGONID;Edited_By;Edit_Date;Descriptio;OBJECTID_12;LEGAL_1;OBJECTID_12_13;val_id;val1;root_val_id;ra_unique_id;POINT_X;POINT_Y")
                                   # Copy out the WCRC match table
                                   arcpy.TableSelect_analysis(os.path.join("D:\Data\WCRC.gdb", "CoreLogic_PropertyToParcel"), "in_memory\\PropertyToParcel", "")
                                   # Join the Buller match table
                                   arcpy.JoinField_management("in_memory\\PropertyToParcel", "ValuationID", eachTable, "ValuationID", "ValuationID")
                                   # Select out the non-Buller records
                                   arcpy.TableSelect_analysis("in_memory\\PropertyToParcel", "in_memory\\PropertyToParcel_NoBDC", "ValuationID_1 IS NULL")
                                   # Merge Buller match table with the WCRC match table 
                                   arcpy.Merge_management("in_memory\\PropertyToParcel_NoBDC;" + eachTable, os.path.join("D:\Data\Database Connections\GISData@WCCHCGIS1 (gisadmin).sde", "PropertyToParcel"), "")
                                   arcpy.DeleteField_management(os.path.join("D:\Data\Database Connections\GISData@WCCHCGIS1 (gisadmin).sde", "PropertyToParcel"), "ValuationID_1")

                               # For the property view from BDC
                               if "vwproperty" in eachTable.lower():
                                   # Copy property view into enterprise geodatabase
                                   arcpy.TableSelect_analysis(eachTable, os.path.join("D:\Data\Database Connections\GISData@WCCHCGIS1 (gisadmin).sde", "vw_BDCProperty"), "")
                                   # Copy property spatial view into file geodatabase and dissolve on valuation ID
                                   arcpy.AddMessage("Copying over feature class - " + os.path.join("D:\Data\BDC.gdb", "Property") + "...")
                                   if (enableLogging == "true"):
                                       logger.info("Copying over feature class - " + os.path.join("D:\Data\BDC.gdb", "Property") + "...")
                                   arcpy.CopyFeatures_management(os.path.join("D:\Data\Database Connections\GISData@WCCHCGIS1 (gisadmin).sde", "vwBDCProperty"), os.path.join("D:\Data\BDC.gdb", "PropertyParcel"), "", "0", "0", "0")
                                   arcpy.Dissolve_management(os.path.join("D:\Data\BDC.gdb", "PropertyParcel"), os.path.join("D:\Data\BDC.gdb", "Property"), "ValuationID", "", "MULTI_PART", "DISSOLVE_LINES")
                                   arcpy.JoinField_management(os.path.join("D:\Data\BDC.gdb", "Property"), "ValuationID", os.path.join("D:\Data\BDC.gdb", "PropertyParcel"), "ValuationID", "")

                                   # Get dataset count
                                   datasetCount = arcpy.GetCount_management(os.path.join("D:\Data\BDC.gdb", "Property")) 
                                   arcpy.AddMessage("Dataset record count - " + str(datasetCount))
                                   if (enableLogging == "true"):
                                       logger.info("Dataset record count - " + str(datasetCount))
                                       
                               # For the resource consent view from BDC
                               if "vwresourceconsent" in eachTable.lower():
                                   # Copy resource consent view into enterprise geodatabase
                                   arcpy.TableSelect_analysis(eachTable, os.path.join("D:\Data\Database Connections\GISData@WCCHCGIS1 (gisadmin).sde", "vw_BDCResourceConsent"), "")
                                   # Copy resource consent spatial view into file geodatabase
                                   arcpy.AddMessage("Copying over feature class - " + os.path.join("D:\Data\BDC.gdb", "ResourceConsent") + "...")
                                   if (enableLogging == "true"):
                                       logger.info("Copying over feature class - " + os.path.join("D:\Data\BDC.gdb", "ResourceConsent") + "...")
                                   arcpy.CopyFeatures_management(os.path.join("D:\Data\Database Connections\GISData@WCCHCGIS1 (gisadmin).sde", "vwBDCResourceConsent"), "in_memory\ResourceConsent", "", "0", "0", "0")
                                   arcpy.Dissolve_management("in_memory\ResourceConsent", os.path.join("D:\Data\BDC.gdb", "ResourceConsent"), "ConsentID", "", "MULTI_PART", "DISSOLVE_LINES")
                                   arcpy.JoinField_management(os.path.join("D:\Data\BDC.gdb", "ResourceConsent"), "ConsentID", "in_memory\ResourceConsent", "ConsentID", "")

                                    # Get dataset count
                                   datasetCount = arcpy.GetCount_management(os.path.join("D:\Data\BDC.gdb", "ResourceConsent")) 
                                   arcpy.AddMessage("Dataset record count - " + str(datasetCount))
                                   if (enableLogging == "true"):
                                       logger.info("Dataset record count - " + str(datasetCount))
                                       
                               # For the building consent view from BDC
                               if "vwbuildingconsent" in eachTable.lower():
                                   # Copy building consent view into enterprise geodatabase
                                   arcpy.TableSelect_analysis(eachTable, os.path.join("D:\Data\Database Connections\GISData@WCCHCGIS1 (gisadmin).sde", "vw_BDCBuildingConsent"), "")
                                   # Copy building consent spatial view into file geodatabase
                                   arcpy.AddMessage("Copying over feature class - " + os.path.join("D:\Data\BDC.gdb", "BuildingConsent") + "...")
                                   if (enableLogging == "true"):
                                       logger.info("Copying over feature class - " + os.path.join("D:\Data\BDC.gdb", "BuildingConsent") + "...")
                                   arcpy.CopyFeatures_management(os.path.join("D:\Data\Database Connections\GISData@WCCHCGIS1 (gisadmin).sde", "vwBDCBuildingConsent"), "in_memory\BuildingConsent", "", "0", "0", "0")
                                   arcpy.Dissolve_management("in_memory\BuildingConsent", os.path.join("D:\Data\BDC.gdb", "BuildingConsent"), "ConsentID", "", "MULTI_PART", "DISSOLVE_LINES")
                                   arcpy.JoinField_management(os.path.join("D:\Data\BDC.gdb", "BuildingConsent"), "ConsentID", "in_memory\BuildingConsent", "ConsentID", "")

                                    # Get dataset count
                                   datasetCount = arcpy.GetCount_management(os.path.join("D:\Data\BDC.gdb", "BuildingConsent")) 
                                   arcpy.AddMessage("Dataset record count - " + str(datasetCount))
                                   if (enableLogging == "true"):
                                       logger.info("Dataset record count - " + str(datasetCount))
                                       
                               # For the licence view from BDC
                               if "vwlicence" in eachTable.lower():
                                   # Copy licence view into enterprise geodatabase
                                   arcpy.TableSelect_analysis(eachTable, os.path.join("D:\Data\Database Connections\GISData@WCCHCGIS1 (gisadmin).sde", "vw_BDCLicence"), "Valuation_No <> ''")
                                   # Copy licence spatial view into file geodatabase
                                   arcpy.AddMessage("Copying over feature class - " + os.path.join("D:\Data\BDC.gdb", "Licence") + "...")
                                   if (enableLogging == "true"):
                                       logger.info("Copying over feature class - " + os.path.join("D:\Data\BDC.gdb", "Licence") + "...")
                                   arcpy.CopyFeatures_management(os.path.join("D:\Data\Database Connections\GISData@WCCHCGIS1 (gisadmin).sde", "vwBDCLicence"), "in_memory\Licence", "", "0", "0", "0")
                                   arcpy.Dissolve_management("in_memory\Licence", os.path.join("D:\Data\BDC.gdb", "Licence"), "LicenceNo", "", "MULTI_PART", "DISSOLVE_LINES")
                                   arcpy.JoinField_management(os.path.join("D:\Data\BDC.gdb", "Licence"), "LicenceNo", "in_memory\Licence", "LicenceNo", "")

                                    # Get dataset count
                                   datasetCount = arcpy.GetCount_management(os.path.join("D:\Data\BDC.gdb", "Licence")) 
                                   arcpy.AddMessage("Dataset record count - " + str(datasetCount))
                                   if (enableLogging == "true"):
                                       logger.info("Dataset record count - " + str(datasetCount))
                                       
                                # For the LIM view from BDC
                               if "vwlim" in eachTable.lower():
                                   # Copy lim view into enterprise geodatabase
                                   arcpy.TableSelect_analysis(eachTable, os.path.join("D:\Data\Database Connections\GISData@WCCHCGIS1 (gisadmin).sde", "vw_BDCLIM"), "")            
                                   # Copy lim spatial view into file geodatabase
                                   arcpy.AddMessage("Copying over feature class - " + os.path.join("D:\Data\BDC.gdb", "LIM") + "...")
                                   if (enableLogging == "true"):
                                       logger.info("Copying over feature class - " + os.path.join("D:\Data\BDC.gdb", "LIM") + "...")
                                   arcpy.CopyFeatures_management(os.path.join("D:\Data\Database Connections\GISData@WCCHCGIS1 (gisadmin).sde", "vwBDCLIM"), "in_memory\LIM", "", "0", "0", "0")
                                   arcpy.Dissolve_management("in_memory\LIM", os.path.join("D:\Data\BDC.gdb", "LIM"), "RecordID", "", "MULTI_PART", "DISSOLVE_LINES")
                                   arcpy.JoinField_management(os.path.join("D:\Data\BDC.gdb", "LIM"), "RecordID", "in_memory\LIM", "RecordID", "")

                                   # Get dataset count
                                   datasetCount = arcpy.GetCount_management(os.path.join("D:\Data\BDC.gdb", "LIM")) 
                                   arcpy.AddMessage("Dataset record count - " + str(datasetCount))
                                   if (enableLogging == "true"):
                                       logger.info("Dataset record count - " + str(datasetCount))  
                       else:
                           arcpy.AddWarning("Dataset " + eachTable + " is empty and won't be copied...")                        
                           # Logging
                           if (enableLogging == "true"):
                               logger.warning("Dataset " + eachTable + " is empty and won't be copied...")             
        
        # --------------------------------------- End of code --------------------------------------- #
        # If called from gp tool return the arcpy parameter   
        if __name__ == '__main__':
            # Return the output if there is any
            if output:
                # If ArcGIS desktop installed
                if (arcgisDesktop == "true"):
                    arcpy.SetParameter(1, output)
                # ArcGIS desktop not installed
                else:
                    return output 
        # Otherwise return the result          
        else:
            # Return the output if there is any
            if output:
                return output      
        # Logging
        if (enableLogging == "true"):
            # Log end of process
            logger.info("Process ended.")
            # Remove file handler and close log file        
            logMessage.flush()
            logMessage.close()
            logger.handlers = []
    # If arcpy error
    except arcpy.ExecuteError:           
        # Build and show the error message
        errorMessage = arcpy.GetMessages(2)   
        printMessage(errorMessage,"error")           
        # Logging
        if (enableLogging == "true"):
            # Log error          
            logger.error(errorMessage)
            # Log end of process
            logger.info("Process ended.")            
            # Remove file handler and close log file        
            logMessage.flush()
            logMessage.close()
            logger.handlers = []   
        if (sendErrorEmail == "true"):
            # Send email
            sendEmail(errorMessage)
    # If python error
    except Exception as e:
        errorMessage = ""         
        # Build and show the error message
        # If many arguments
        if (e.args):
            for i in range(len(e.args)):        
                if (i == 0):
                    # Python version check
                    if sys.version_info[0] >= 3:
                        # Python 3.x
                        errorMessage = str(e.args[i]).encode('utf-8').decode('utf-8')
                    else:
                        # Python 2.x
                        errorMessage = unicode(e.args[i]).encode('utf-8')
                else:
                    # Python version check
                    if sys.version_info[0] >= 3:
                        # Python 3.x
                        errorMessage = errorMessage + " " + str(e.args[i]).encode('utf-8').decode('utf-8')
                    else:
                        # Python 2.x
                        errorMessage = errorMessage + " " + unicode(e.args[i]).encode('utf-8')
        # Else just one argument
        else:
            errorMessage = e
        printMessage(errorMessage,"error")
        # Logging
        if (enableLogging == "true"):
            # Log error            
            logger.error(errorMessage)
            # Log end of process
            logger.info("Process ended.")            
            # Remove file handler and close log file        
            logMessage.flush()
            logMessage.close()
            logger.handlers = []   
        if (sendErrorEmail == "true"):
            # Send email
            sendEmail(errorMessage)            
Esempio n. 23
0
import time
from finalscript import shapeprocess
import arcpy
import os

# infeature = r'E:\Term3\GEOG-503\Final\testData\shapefile\NY_Counties.shp'
# clipArea = r'E:\Term3\GEOG-503\Final\testData\shapefile\NY_Counties.shp'
# outputWS = r'E:\Term3\GEOG-503\Final\testData\result\shapefile'
infeature = arcpy.GetParameterAsText(0)
clipArea = arcpy.GetParameterAsText(1)
outputWS = arcpy.GetParameterAsText(2)
arcpy.CreateFolder_management(os.path.split(outputWS)[0], os.path.split(outputWS)[1])
Parameter1 = [infeature, clipArea, outputWS]
StartTime = time.time()
shapeprocess.MyBatchClip(Parameter1)
EndTime = time.time()
# print 'Elapsed:  ' + str(EndTime - StartTime) + '  Seconds...'
arcpy.AddMessage('Elapsed:  ' + str(EndTime - StartTime) + '  Seconds...')


Esempio n. 24
0
        stream_threshold_numcells = arcpy.GetParameterAsText(2)
        parameters.append("Eþik Deðer: " + stream_threshold_numcells)
        arcpy.AddMessage("\nDeðerler Alýndý...")

    except:
        arcpy.AddMessage("\nError in input arguments: " + arcpy.GetMessages(2))
        raise Exception

    # Check and create output folders

    try:
        arcpy.AddMessage("\nCreating output folders...")
        thefolders = ["Intermediate", "Output"]
        for folder in thefolders:
            if not arcpy.Exists(workspace + folder):
                arcpy.CreateFolder_management(workspace, folder)
    except:
        arcpy.AddError("\nError creating output folders: " +
                       arcpy.GetMessages(2))
        raise Exception

    # Output files

    try:
        arcpy.AddMessage("\nSetting script variables...")
        # Intermediate and output directories
        outputws = workspace + os.sep + "Output" + os.sep
        interws = workspace + os.sep + "Intermediate" + os.sep
        inter = workspace + os.sep + "Intermediate"
        if not arcpy.Exists(inter + "\\data.gdb"):
            arcpy.CreateFileGDB_management(inter, "data", "CURRENT")
Esempio n. 25
0
    '2037B0167Z', '2037B0173Z', '2037B0183Z', '2037B0204Z', '2037B0225Z',
    '2037B0232Z', '2037B0241Z', '2037B0258Z', '2037B0261A', '2037B0263Z',
    '2037B0269Z', '2037B0271Z', '2037B0286Z', '2037B0301Z', '2037B0315Z',
    '2037B0322Z', '2037B0336Z', '2037B0343Z', '2037B0404Z', '2037B0425Z'
]

for trafCol_Num in trafCol_Array:
    mxd = arcpy.mapping.MapDocument(mxdpath)
    ddp = mxd.dataDrivenPages
    indexLayer = ddp.indexLayer
    print rootpath + topfolder + secondfolder + "\\" + trafCol_Num

    if arcpy.Exists(rootpath + topfolder):
        print "The local path " + rootpath + topfolder + " already exists"
    else:
        arcpy.CreateFolder_management(rootpath, topfolder)
        print "Created local path " + rootpath + topfolder

    if arcpy.Exists(rootpath + topfolder + secondfolder):
        print "The local path " + rootpath + topfolder + secondfolder + " already exists"
    else:
        arcpy.CreateFolder_management(rootpath, topfolder + secondfolder)
        print "Created local path " + rootpath + topfolder + secondfolder

    if arcpy.Exists(rootpath + topfolder + secondfolder + "\\" + trafCol_Num):
        print "The local path " + rootpath + topfolder + secondfolder + "\\" + trafCol_Num + " already exists"
    else:
        arcpy.CreateFolder_management(
            rootpath, topfolder + secondfolder + "\\" + trafCol_Num)
        print "Created local path " + rootpath + topfolder + secondfolder + "\\" + trafCol_Num
            arcpy.CheckOutExtension("Spatial")
        else:
            arcpy.AddError("Spatial Analyst Licensing Issue")
            raise LicenseError

        # Script arguments
        in16Raster = arcpy.GetParameterAsText(0)# single channel 16bit SAR image(HH or HV) exported from PCI as geotif.

        # Local variable
        inRastC=os.path.join(arcpy.env.scratchWorkspace,"inRastC")
        scImage1=os.path.join(arcpy.env.scratchWorkspace,"scImage1")
        fsMedRaster=os.path.join(arcpy.env.scratchWorkspace,"fsMedRaster")
        
        #Checks if output folders exist, if it does not creates them
        if not arcpy.Exists(os.path.join(arcpy.env.workspace, 'Final_results')):
            arcpy.CreateFolder_management(arcpy.env.workspace, 'Final_results')
        if not arcpy.Exists(os.path.join(arcpy.env.workspace, 'Results')):
            arcpy.CreateFolder_management(arcpy.env.workspace, 'Results')

        # Variable setup for automatic output file naming    
        outNFRaster= os.path.join(arcpy.env.workspace, 'Final_Results', (os.path.splitext(os.path.basename(in16Raster))[0] + "_8bit.tif"))
        outFRaster= os.path.join(arcpy.env.workspace, 'Results', (os.path.splitext(os.path.basename(in16Raster))[0] + "_8bit_MED3x3.tif"))

        # Lists the input/output file name and location
        arcpy.AddMessage("Name and location of your input file to scale: \n" + in16Raster)
        arcpy.AddMessage("Name and locationof your filtered scaled 8bit image is: \n" + outFRaster)
        arcpy.AddMessage("Name and location of your non-filtered scaled 8bit image is: \n" + outNFRaster)

        # Process: Copy Raster, this is done because grids have internal stats.
        arcpy.AddMessage ("Converting to GRID to extract statistics...")
        arcpy.CopyRaster_management(in16Raster, inRastC, "", "", "0", "NONE", "NONE", "")
# -*- coding: utf-8 -*-
import arcpy, random, exceptions
from arcpy import env
import os.path
import time
env.overwriteOutput = True
import datetime

ruta = arcpy.CreateFolder_management(
    os.path.expanduser("~") + "\\" + r"AppData\Local\Temp",
    str(random.randrange(0, 500000)))
print ruta
env.scratchWorkspace = r"%s" % (ruta)
try:
    ##Parametros

    fcEntrada = arcpy.GetParameterAsText(0)  # Capa que se va a procesar.
    grilla = arcpy.GetParameterAsText(
        1
    )  # Grilla de apoyo. El proceso se hará de forma individual por cada uno de los cuadros.
    capa_exclusion = arcpy.GetParameterAsText(2)
    if capa_exclusion == "----":
        capa_exclusion = ""
    rango = arcpy.GetParameterAsText(
        3)  # captura los cuadros que se van a procesar
    ruta_raiz = arcpy.GetParameterAsText(
        4
    )  # captura la ruta donde se van a crear los directorios de salida y finales
    expresion = arcpy.GetParameterAsText(
        5)  # captura la expresión a usar por el eliminate
    dic = {
    for i in PP_list:
        s=str(i)+ "_fishnet"
    PP_list_fishnet.append(s)
    PP_list.sort()
    PP_list_fishnet.sort()
############################################################
####name of admin layer to keep displaying
adminLyr='Atlantic Forests' #change as needed
out_folder_path = "C:\Data\WWF\Final_maps" 
for PP in PP_list:
    if PP == adminLyr.replace(" ","_"):
#CreateFolder to store outputs maps
# Set local variables
        out_name = PP
# Execute CreateFolder
        arcpy.CreateFolder_management(out_folder_path, out_name)
        
outPath=out_folder_path + "\\" + out_name

####list workspaces for inputs and outputs
#Define outpath and workspace
        
arcpy.env.workspace = out_folder_path
mxd = arcpy.mapping.MapDocument("CURRENT")
mxd.activeView = 'PAGE_LAYOUT'

#mxd = arcpy.mapping.MapDocument("CURRENT")


#Create a fresh layer list
for df in arcpy.mapping.ListDataFrames(mxd):
        arcpy.env.parallelProcessingFactor = "75%"
        arcpy.env.geographicTransformations = "WGS_1984_(ITRF00)_To_NAD_1983"
        arcpy.env.resamplingMethod = "BILINEAR"
        arcpy.env.pyramid = "PYRAMIDS -1 BILINEAR DEFAULT 75 NO_SKIP"

        # --------------------------------------------------------------------- Variables
        watershed_path = arcpy.Describe(inWatershed).CatalogPath
        watershedGDB_path = watershed_path[:watershed_path.find(".gdb") + 4]
        watershedFD_path = watershedGDB_path + os.sep + "Layers"
        userWorkspace = os.path.dirname(watershedGDB_path)
        outputFolder = userWorkspace + os.sep + "gis_output"
        tables = outputFolder + os.sep + "tables"
        stakeoutPoints = watershedFD_path + os.sep + "StakeoutPoints"

        if not arcpy.Exists(outputFolder):
            arcpy.CreateFolder_management(userWorkspace, "gis_output")
        if not arcpy.Exists(tables):
            arcpy.CreateFolder_management(outputFolder, "tables")

        ProjectDEM = watershedGDB_path + os.sep + os.path.basename(
            userWorkspace).replace(" ", "_") + "_Project_DEM"
        zUnits = "Feet"

        # Set path to log file and start logging
        textFilePath = userWorkspace + os.sep + os.path.basename(
            userWorkspace).replace(" ", "_") + "_EngTools.txt"
        logBasicSettings()

        # --------------------------------------------------------------------- Permanent Datasets
        outLine = watershedFD_path + os.sep + "tileLines"
        outPoints = watershedFD_path + os.sep + "StationPoints"
Esempio n. 30
0
import arcpy
from arcpy import env
arcpy.CheckOutExtension("spatial")
arcpy.gp.overwriteOutput = 1
arcpy.env.workspace = "F:/SA/TRMM/Y/"
rasters = arcpy.ListRasters("*", "tif")
mask = "F:\\SA\\TEMP\\RainGauges.shp"
arcpy.CreateFolder_management("F:/SA/OUT/", "Y")
for raster in rasters:
    out1 = "F:/SA/OUT/Y/" + raster[5:9] + "_RG.shp"
    arcpy.gp.ExtractValuesToPoints_sa(mask, raster, out1)
print("All done")

for i in range(14, 18):
    arcpy.env.workspace = "F:/SA/TRMM/M/" + str(2000 + i) + "/"
    rasters = arcpy.ListRasters("*", "tif")
    mask = "F:\\SA\\TEMP\\RainGauges.shp"
    arcpy.CreateFolder_management("F:/SA/OUT/", str(2000 + i))
    for raster in rasters:
        out1 = "F:/SA/OUT/" + str(2000 + i) + '/' + raster[5:11] + "_RG.shp"
        arcpy.gp.ExtractValuesToPoints_sa(mask, raster, out1)
print("All done")

for i in range(1, 13):
    arcpy.env.workspace = "F:/SA/IMERG/D/" + str(i) + "/"
    rasters = arcpy.ListRasters("*", "tif")
    mask = "F:\\Research\\2006_2010\\hb_area\\hb_RG75.shp"
    arcpy.CreateFolder_management("F:/SA/OUT/", str(i))
    for raster in rasters:
        out1 = "F:/SA/OUT/" + str(i) + "/" + raster[7:15] + "_RG.shp"
        arcpy.gp.ExtractValuesToPoints_sa(mask, raster, out1)