Exemple #1
0
class ReportWriter():
    "Writes the report files"

    def __init__(self, reportFormat, reportName=''):
        supportedFormats = ["CSV", "SHP", "KMZ"]
        if reportFormat not in supportedFormats:
            raise Exception("Report format not supported: " + reportFormat)
        self.reportFormat = reportFormat

        if ('' != reportName):
            self.overWriteOutput = True
            self.reportName = reportName
        else:
            self.overWriteOutput = False
            self.reportName = 'report'

    def write(self, fObject, doZip=False):

        folder = arcpy.env.scratchFolder
        name = self.reportName

        if "CSV" == self.reportFormat:
            fname = self._writeCSV(fObject, name, folder, doZip)
        elif "SHP" == self.reportFormat:
            fname = self._writeSHP(fObject, name, folder)
        elif "KMZ" == self.reportFormat:
            fname = self._writeKMZ(fObject, name, folder)
        else:
            raise Exception("Report format not implemented")

        return fname

    def _writeCSV(self, fObject, name, folder, doZip=False):

        if self.overWriteOutput:
            filePath = os.path.join(folder, name + '.csv')
            if arcpy.Exists(filePath): arcpy.Delete_management(filePath)
        else:
            filePath = arcpy.CreateScratchName(name, ".csv", "", folder)
        print filePath
        try:
            if isinstance(fObject, FieldObject):
                with open(filePath, 'wb') as csvfile:
                    fieldwriter = csv.writer(csvfile, delimiter=',', \
                                             quoting=csv.QUOTE_MINIMAL)

                    fieldwriter.writerow(fObject.getLabelsList())
                    for i in range(fObject.getNumberOfFeatures()):
                        featureList = fObject.getFeatureList(i, doFormat=True)
                        fieldwriter.writerow(featureList)
            else:
                fs = fObject.getFeatureSet()
                rows = arcpy.SearchCursor(fs)
                fieldnames = [f.name for f in arcpy.ListFields(fs)]

                allRows = []
                for row in rows:
                    rowlist = []
                    for field in fieldnames:
                        rowlist.append(row.getValue(field))
                    allRows.append(rowlist)

                with open(filePath, 'wb') as csvfile:
                    fieldwriter = csv.writer(csvfile, delimiter=',', \
                                             quoting=csv.QUOTE_MINIMAL)
                    fieldwriter.writerow(fieldnames)
                    for row in allRows:
                        fieldwriter.writerow(row)
        except Exception, e:
            raise e
            #raise Exception("Could not write to CSV file")

        if doZip:
            try:
                if self.overWriteOutput:
                    zipPath = os.path.join(folder, name + '.zip')
                    if arcpy.Exists(zipPath): arcpy.Delete_management(zipPath)
                else:
                    zipPath = arcpy.CreateScratchName(name, ".zip", "", folder)

                z = Zipper()
                z.zipFiles([filePath], zipPath)
                filePath = zipPath
            except:
                raise Exception("Could not zip CSV file")

        return filePath
        meanRaster = CreateConstantRaster(mean, "FLOAT", descR.MeanCellHeight,
                                          descR.extent)
        outRaster = (r - meanRaster) / stdvRaster

    elif analysisType == "Stretch":
        arcpy.AddMessage("Running Stretch Transformation ......")
        maxVal = arcpy.GetRasterProperties_management(r, "MAXIMUM")
        maxRaster = CreateConstantRaster(maxVal, "FLOAT", descR.MeanCellHeight,
                                         descR.extent)

        minVal = arcpy.GetRasterProperties_management(r, "MINIMUM")
        minRaster = CreateConstantRaster(minVal, "FLOAT", descR.MeanCellHeight,
                                         descR.extent)

        inputMax = arcpy.GetParameterAsText(2)
        if not (arcpy.Exists(inputMax)):
            inputMax = 1
        inputMaxRaster = CreateConstantRaster(inputMax, "FLOAT",
                                              descR.MeanCellHeight,
                                              descR.extent)
        inputMin = arcpy.GetParameterAsText(3)
        if not (arcpy.Exists(inputMin)):
            inputMin = 0
        inputMinRaster = CreateConstantRaster(inputMin, "FLOAT",
                                              descR.MeanCellHeight,
                                              descR.extent)

        outRaster = (r - minRaster) * (inputMaxRaster - inputMinRaster) / (
            maxRaster - minRaster) + inputMinRaster

    elif analysisType == "Normalize":
                nodeID = row[1]
                row[f + 2] = nodeDict[streamID][nodeID][field]
                cursor.updateRow(row)


#enable garbage collection
gc.enable()

try:
    print("Step 2: Measure Channel Width")

    #keeping track of time
    startTime = time.time()

    # Check if the output exists
    if not arcpy.Exists(nodes_fc):
        arcpy.AddError("\nThis output does not exist: \n" +
                       "{0}\n".format(nodes_fc))
        sys.exit("This output does not exist: \n" + "{0}\n".format(nodes_fc))

    if overwrite_data is True:
        env.overwriteOutput = True
    else:
        env.overwriteOutput = False

    # Determine input spatial units
    proj_nodes = arcpy.Describe(nodes_fc).spatialReference
    proj_rb = arcpy.Describe(rb_fc).spatialReference
    proj_lb = arcpy.Describe(lb_fc).spatialReference

    # Check to make sure the rb_fc/lb_fc and input points are
Exemple #4
0
def mainFunction(
    downloadLink, updateMode, geodatabase, featureDataset
):  # Get parameters from ArcGIS Desktop tool by seperating by comma e.g. (var1 is 1st parameter,var2 is 2nd parameter,var3 is 3rd parameter)
    try:
        # --------------------------------------- Start of code --------------------------------------- #

        # Download the file from the link
        file = urllib2.urlopen(downloadLink)
        # Download in chunks
        fileChunk = 16 * 1024
        with open(os.path.join(arcpy.env.scratchFolder, "Data.zip"),
                  'wb') as output:
            while True:
                chunk = file.read(fileChunk)
                if not chunk:
                    break
                # Write chunk to output file
                output.write(chunk)
        output.close()

        # Unzip the file to the scratch folder
        arcpy.AddMessage("Extracting zip file...")
        zip = zipfile.ZipFile(os.path.join(arcpy.env.scratchFolder,
                                           "Data.zip"),
                              mode="r")
        zip.extractall(arcpy.env.scratchFolder)

        # Get the newest unzipped database from the scratch folder
        database = max(glob.iglob(arcpy.env.scratchFolder + r"\*.gdb"),
                       key=os.path.getmtime)

        # Assign the geodatabase workspace and load in the datasets to the lists
        arcpy.env.workspace = database
        featureclassList = arcpy.ListFeatureClasses()
        tableList = arcpy.ListTables()

        arcpy.AddMessage("Copying datasets...")
        # Load the feature classes into the geodatabase if at least one is in the geodatabase provided
        if (len(featureclassList) > 0):
            # Loop through the feature classes
            for eachFeatureclass in featureclassList:
                # Create a Describe object from the dataset
                describeDataset = arcpy.Describe(eachFeatureclass)
                # If feature dataset provided, add that to path
                if featureDataset:
                    outputDataset = os.path.join(
                        geodatabase + "\\" + featureDataset,
                        describeDataset.name)
                else:
                    outputDataset = os.path.join(geodatabase,
                                                 describeDataset.name)
                exportData = "true"
                # If update mode is then copy, otherwise delete and appending records
                if (updateMode == "New"):
                    # Copy feature class into geodatabase using the same dataset name
                    arcpy.CopyFeatures_management(eachFeatureclass,
                                                  outputDataset, "", "0", "0",
                                                  "0")
                else:
                    # If dataset exists in geodatabase, delete features and load in new data
                    if arcpy.Exists(outputDataset):
                        arcpy.DeleteFeatures_management(outputDataset)
                        arcpy.Append_management(
                            os.path.join(arcpy.env.workspace,
                                         eachFeatureclass), outputDataset,
                            "NO_TEST", "", "")
                    else:
                        exportData = "false"
                        # Log warning
                        arcpy.AddWarning(
                            "Warning: " + outputDataset +
                            " does not exist and won't be updated")
                        # Logging
                        if (enableLogging == "true"):
                            logger.warning(
                                outputDataset +
                                " does not exist and won't be updated")
                if (exportData.lower() == "true"):
                    datasetRecordCount = arcpy.GetCount_management(
                        outputDataset)
                    arcpy.AddMessage(
                        str(outputDataset) + " record count - " +
                        str(datasetRecordCount) + "...")
                    # Logging
                    if (enableLogging == "true"):
                        logger.info(
                            str(outputDataset) + " record count - " +
                            str(datasetRecordCount) + "...")
        if (len(tableList) > 0):
            # Loop through of the tables
            for eachTable in tableList:
                # Create a Describe object from the dataset
                describeDataset = arcpy.Describe(eachTable)
                outputDataset = os.path.join(geodatabase, describeDataset.name)
                exportData = "true"

                # If update mode is then copy, otherwise delete and appending records
                if (updateMode == "New"):
                    # Copy feature class into geodatabase using the same dataset name
                    arcpy.TableSelect_analysis(eachTable, outputDataset, "")
                else:
                    # If dataset exists in geodatabase, delete features and load in new data
                    if arcpy.Exists(os.path.join(geodatabase, eachTable)):
                        arcpy.DeleteRows_management(
                            os.path.join(geodatabase, eachTable))
                        arcpy.Append_management(
                            os.path.join(arcpy.env.workspace, eachTable),
                            outputDataset, "NO_TEST", "", "")
                    else:
                        exportData = "false"
                        # Log warning
                        arcpy.AddWarning(
                            "Warning: " + outputDataset +
                            " does not exist and won't be updated")
                        # Logging
                        if (enableLogging == "true"):
                            logger.warning(
                                outputDataset +
                                " does not exist and won't be updated")
                if (exportData.lower() == "true"):
                    datasetRecordCount = arcpy.GetCount_management(
                        outputDataset)
                    arcpy.AddMessage(
                        str(outputDataset) + " record count - " +
                        str(datasetRecordCount) + "...")
                    # Logging
                    if (enableLogging == "true"):
                        logger.info(
                            str(outputDataset) + " record count - " +
                            str(datasetRecordCount) + "...")

        # --------------------------------------- End of code --------------------------------------- #
        # If called from gp tool return the arcpy parameter
        if __name__ == '__main__':
            # Return the output if there is any
            if output:
                # If ArcGIS desktop installed
                if (arcgisDesktop == "true"):
                    arcpy.SetParameterAsText(1, output)
                # ArcGIS desktop not installed
                else:
                    return output
        # Otherwise return the result
        else:
            # Return the output if there is any
            if output:
                return output
        # Logging
        if (enableLogging == "true"):
            # Log end of process
            logger.info("Process ended.")
            # Remove file handler and close log file
            logMessage.flush()
            logMessage.close()
            logger.handlers = []
    # If arcpy error
    except arcpy.ExecuteError:
        # Build and show the error message
        errorMessage = arcpy.GetMessages(2)
        printMessage(errorMessage, "error")
        # Logging
        if (enableLogging == "true"):
            # Log error
            logger.error(errorMessage)
            # Log end of process
            logger.info("Process ended.")
            # Remove file handler and close log file
            logMessage.flush()
            logMessage.close()
            logger.handlers = []
        if (sendErrorEmail == "true"):
            # Send email
            sendEmail(errorMessage)
    # If python error
    except Exception as e:
        errorMessage = ""
        # Build and show the error message
        # If many arguments
        if (e.args):
            for i in range(len(e.args)):
                if (i == 0):
                    # Python version check
                    if sys.version_info[0] >= 3:
                        # Python 3.x
                        errorMessage = str(
                            e.args[i]).encode('utf-8').decode('utf-8')
                    else:
                        # Python 2.x
                        errorMessage = unicode(e.args[i]).encode('utf-8')
                else:
                    # Python version check
                    if sys.version_info[0] >= 3:
                        # Python 3.x
                        errorMessage = errorMessage + " " + str(
                            e.args[i]).encode('utf-8').decode('utf-8')
                    else:
                        # Python 2.x
                        errorMessage = errorMessage + " " + unicode(
                            e.args[i]).encode('utf-8')
        # Else just one argument
        else:
            errorMessage = e
        printMessage(errorMessage, "error")
        # Logging
        if (enableLogging == "true"):
            # Log error
            logger.error(errorMessage)
            # Log end of process
            logger.info("Process ended.")
            # Remove file handler and close log file
            logMessage.flush()
            logMessage.close()
            logger.handlers = []
        if (sendErrorEmail == "true"):
            # Send email
            sendEmail(errorMessage)
Exemple #5
0
    value = export_dict[k]

for use in fcs_in_workspace(VectorLocation):
    count = 0
    runID = str(flag) + '_' + str(use)
    # print runID
    usepath = VectorLocation + os.sep + str(use)
    # print dem
    usepath = usepath.replace('\\\\', '\\')
    #print export_dict
    out = export_dict[str(usepath)]

    outFC_use = out + os.sep + runID
    #print outFC_use

    if arcpy.Exists(outFC_use):
        print "Already complete analysis for {0}".format(use)
        continue

    print "Run to " + str(runID)
    arcpy.env.overwriteOutput = True
    start_loop = datetime.datetime.now()
    arcpy.AddField_management(fc, "CoOccur_Acres", "DOUBLE", "#", "#", "#",
                              "#", "NULLABLE", "NON_REQUIRED", "#")
    with arcpy.da.SearchCursor(fc, ("EntityID", "CoOccur_Acres")) as clipper:
        for rcrd in clipper:
            if rcrd[1] != None:
                continue
            else:
                ent = rcrd[0]
                lyr = "Spe_{0}_lyr".format(ent)
        if row[1] is None:  # "if not filter" did not work on double data type
            print "{} ----> EMPTY VALUE FOUND!".format(row[0])
        # This can be uncommented to print all successfully populated values:
        #else:
        #print "Defect_Id: " + str(row[0]) + " ----> " + str(row[1])
print("Testing for missing Chainage values completed")

# Testing: XSP created field values. Are any missing/empty?
print("Testing for missing XSP values:")
with arcpy.da.SearchCursor(de05v3, ("Defect_Id", "XSP")) as cursor:
    for row in cursor:
        if not filter(None, row[1]):
            print "{} ----> EMPTY VALUE FOUND!".format(row[0])
        # This can be uncommented to print all successfully populated values:
        #else:
        #print "Defect_Id: " + str(row[0]) + " ----> " + str(row[1])
print("Testing for missing XSP values completed")

# Delete superseded feature classes and tables:
# Create list of superseded fc/tables
deleteSS = [de05Lyr, de05fc, de05Filter, linearRef]
# Loop through list, deleting the files that are matched
for fcSS in deleteSS:
    print("Deleting superseded data: " + fcSS
          )  # Print each deleted featureclass/table
    if arcpy.Exists(fcSS):
        arcpy.Delete_management(fcSS)

# Print time taken to run the script
print("Workflow complete in ", time.time() - startTime, " seconds")
def create_gdb(out_folder, out_name, out_path):
    if not arcpy.Exists(out_path):
        arcpy.CreateFileGDB_management(out_folder, out_name, "CURRENT")
        print 'Created GDB {0}'.format(out_name)
Exemple #8
0
def project(dataframe, workspace_in, workspace_out, cellSize, template=False):
    arcpy.env.workspace = workspace_in

    for i in range(len(dataframe)):
        infile = dataframe['Input File Name'][i]
        outfile = dataframe['Output File Name'][i]
        print("*** infile: ", infile, ", outfile: ", outfile)
        outfc = os.path.join(workspace_out, outfile + "_Projected")

        # if not dataframe['Process?'][i] == "Yes":
        #     continue
        dsc = arcpy.Describe(infile)
        if dsc.spatialReference.Name == "Unknown":
            print('skipped this fc due to undefined coordinate system: ' +
                  infile)
            continue
        if arcpy.Exists(outfc):
            print(
                "An output file with this name already exists; skipping projecting this row"
            )
            if template:
                arcpy.env.snapRaster = outfc
                print("Now snapping to existing raster")
            continue
        # if (infile == "gm_lc_v3_2_2"):
        #     continue

        try:
            outProj = int(dataframe['Output Projection'][i])
        except:
            outProj = dataframe['Output Projection'][i]

        try:
            inProj = int(dataframe['Input Projection'][i])
        except:
            inProj = dataframe['Input Projection'][i]

        if (dataframe['File Type'][i] == 'Feature Class'):

            print("Feature Class")
            print(infile, outfc, inProj, outProj)
            arcpy.Project_management(
                infile,
                outfc,
                out_coor_system=arcpy.SpatialReference(outProj),
                in_coor_system=arcpy.SpatialReference(inProj))
            print(arcpy.GetMessages())

        elif (dataframe['File Type'][i] == 'Raster'):
            print("Raster")
            print(infile, outfc, inProj, outProj)
            arcpy.ProjectRaster_management(
                infile,
                outfc,
                out_coor_system=arcpy.SpatialReference(outProj),
                in_coor_system=arcpy.SpatialReference(inProj),
                cell_size=cellSize,
                resampling_type=dataframe['Resampling Type (for Raster)'][i])
            print(arcpy.GetMessages())
            if template:
                arcpy.env.snapRaster = outfc
                print("Now snapping to template raster")
Exemple #9
0
#------------------------------------------------------------------------------
arcpy.AddMessage(" ");
arcpy.AddMessage(str_script);
arcpy.AddMessage("Version: " + str(num_version));
arcpy.AddMessage("By " + str_author);
arcpy.AddMessage("Last Modified: " + str_last_mod);
arcpy.AddMessage(" ");

#------------------------------------------------------------------------------
#- Step 10
#- Verify that the connections exist and are good
#------------------------------------------------------------------------------
arcpy.AddMessage("Validating ArcCatalog Connections:");
         
ags_con = "GIS Servers\\" + ags_con_name + ".ags";
if arcpy.Exists(ags_con):
   arcpy.AddMessage("   Service will be deployed to " + ags_con);
   
else:
   arcpy.AddMessage(" ");
   arcpy.AddMessage("  Connection named GIS Servers\\" + ags_con_name + ".ags not found.");
   ags_con2 = os.environ['USERPROFILE'] + "\\AppData\\Roaming\\ESRI\\Desktop10.3\\ArcCatalog\\" + ags_con_name + ".ags"
   
   if arcpy.Exists(ags_con2):
      ags_con = ags_con2;
      arcpy.AddMessage("   Service will be deployed to " + ags_con);
      
   else:
      arcpy.AddMessage(" ");
      arcpy.AddMessage("  No luck checking " + ags_con2);
      ags_con3 = os.environ['USERPROFILE'] + "\\AppData\\Roaming\\ESRI\\Desktop10.2\\ArcCatalog\\" + ags_con_name + ".ags"
Exemple #10
0
def delete_field(featureClass, field):
    if arcpy.Exists(featureClass):
        try:
            arcpy.DeleteField_management(featureClass, field)
        except:
            config.run_error_message(featureClass, "Delete field failure")
Exemple #11
0
def polygon_cursor_loop(featureClass, pointFeatureClass):
    # This loop will add and calculate
    if not arcpy.Exists(featureClass):
        raise ExistsError
    if not arcpy.Exists(pointFeatureClass):
        raise ExistsError
    try:
        polyDupeFlag = 1
        nextPolyDupeID, nextPolyTrueDupeID = 0, 0
        objNumber, MBRratio, MBRarea, Xlen, Ylen, = 0, 0, 0, 0, 0
        polygonCursor, polygonRow, pointCursor, pointRow = None, None, None, None
        describeFeatureClass = arcpy.Describe(featureClass)
        shapeField = describeFeatureClass.shapeFieldName
        polyDupeDict = {}
        polyTrueDupeDict = {}
        pointCursor = arcpy.InsertCursor(pointFeatureClass)
        polygonCursor = arcpy.UpdateCursor(featureClass)
        polygonRow = next(polygonCursor)
        while polygonRow:
            geometry = polygonRow.getValue(shapeField)
            polyCentroid = str(geometry.centroid)
            polyTrueCentroid = str(geometry.trueCentroid)
            MBR = str(geometry.extent)
            if polyCentroid != polyTrueCentroid:
                polygonRow.setValue("FIP", "YES")
            pArea = geometry.area
            pointRow = pointCursor.newRow()
            Xcent = polyCentroid.split(" ")[0]
            Ycent = polyCentroid.split(" ")[1]
            Xmin = MBR.split(" ")[0]
            Ymin = MBR.split(" ")[1]
            Xmax = MBR.split(" ")[2]
            Ymax = MBR.split(" ")[3]
            Xlen = float(Xmax) - float(Xmin)
            Ylen = float(Ymax) - float(Ymin)
            MBRarea = Xlen * Ylen
            MBRratio = pArea / MBRarea
            polygonRow.setValue("P_AREA", pArea)
            polygonRow.setValue("RATIO", MBRratio)
            objNumber = objNumber + 1
            geomPoint = arcpy.CreateObject("Point")
            geomPoint.X = str(Xcent)
            geomPoint.Y = str(Ycent)
            geomPoint.ID = str(objNumber)
            pointRow.shape = geomPoint
            pointRow.setValue("LINKER", polygonRow.getValue("LINKER"))
            #pointRow.ID = geomPoint.ID
            pointCursor.insertRow(pointRow)
            try:
                polyDupeID = polyDupeDict[polyCentroid]
                polygonRow.setValue("DUPE", polyDupeFlag)
            except:
                nextPolyDupeID = nextPolyDupeID + 1
                polyDupeID = nextPolyDupeID
                polyDupeDict[polyCentroid] = polyDupeID
            int_ring = 0
            partnum = 0
            partcount = geometry.partCount
            while partnum < partcount:
                part = geometry.getPart(partnum)
                pnt = next(part)
                pntcount = 0
                while pnt:
                    pnt = next(part)
                    pntcount += 1
                    if not pnt:
                        pnt = next(part)
                        if pnt:
                            int_ring = int_ring + 1
                partnum += 1
            if int_ring > 0:
                polygonRow.setValue("VOIDS", int_ring)
            polygonCursor.updateRow(polygonRow)
            polygonRow = next(polygonCursor)
    except:
        config.run_error_message(featureClass, "Polygon cursor failure")
    finally:
        del polygonCursor, polygonRow, pointCursor, pointRow
    gc.collect()
Exemple #12
0
def get_count(featureClass):
    if arcpy.Exists(featureClass):
        result = arcpy.GetCount_management(featureClass)
        return str(result)
Exemple #13
0
PopWeight = "Tot_P_GNAF_Weight"

BufferSize = '2 Kilometers'
SearchRadius = 2000
CellSize = 200
MinHotSpotCount = 50

out_path = r"F:\Suicide_Vs_Population\LITS_20200729"
out_gdb = "Incidents_Away_08_17"
out_name = "NSW_Tot_P"
LogFile = r"F:\Suicide_Vs_Population\LITS_20200729\NSW_Tot_P_log"

HoldWorking = True

out_gdb = "{}\{}".format(out_path, out_gdb)
if not arcpy.Exists(out_gdb):
	gdb_path, gdb_name = os.path.split(out_gdb)
	arcpy.CreateFileGDB_management(gdb_path, gdb_name)

RasterPop = "{}\KDr_PopRes_{}".format(out_gdb, out_name)
PopAlias = "KDr_PopRes_{}".format(out_name)
RasterTopic = "{}\KDr_{}".format(out_gdb, out_name)
RasterAlias = "KDr_{}".format(out_name)
TopicKDv = "{}\KDv_{}".format(out_gdb, out_name)
PointSet = "{}\{}".format(out_gdb, out_name)
OutputKDr = "{}\KDr_PopDiff_{}".format(out_gdb, out_name)
OutputKDv = "{}\KDv_PopDiff_{}".format(out_gdb, out_name)
PopPointSet = "{}\PopPoints_{}".format(out_gdb, out_name)

# The Raster math conversion to vectors will deliver a vecorized raster in the gridcode range 1 - 20
# 20 represents the Most DIFFERENT locations where suicide event density exceeds population density.
 def delete_existing_gdb(self, file_to_delete):
     if arcpy.Exists(file_to_delete):
         arcpy.Delete_management(file_to_delete)
     else:
         pass
Exemple #15
0
def add_field(fc,field,Type = 'TEXT'):
    if arcpy.Exists(fc):
        TYPE = [i.name for i in arcpy.ListFields(fc) if i.name == field]
        if not TYPE:
            arcpy.AddField_management (fc, field, Type, "", "", 500)
def daymet_parameters(config_path, data_name='PPT'):
    """Calculate GSFLOW DAYMET Parameters

    Parameters
    ----------
    config_path : str
        Project configuration file (.ini) path.
    data_name : {'PPT', 'TMAX', 'TMIN', 'ALL'}
        DAYMET data type (the default is 'PPT').

    Returns
    -------
    None

    """
    # Initialize hru_parameters class
    hru = support.HRUParameters(config_path)

    # Open input parameter config file
    inputs_cfg = ConfigParser.ConfigParser()
    try:
        inputs_cfg.readfp(open(config_path))
    except Exception as e:
        logging.error('\nERROR: Config file could not be read, '
                      'is not an input file, or does not exist\n'
                      '  config_file = {}\n'
                      '  Exception: {}\n'.format(config_path, e))
        sys.exit()

    # Log DEBUG to file
    log_file_name = 'daymet_normals_log.txt'
    log_console = logging.FileHandler(filename=os.path.join(
        hru.log_ws, log_file_name),
                                      mode='w')
    log_console.setLevel(logging.DEBUG)
    log_console.setFormatter(logging.Formatter('%(message)s'))
    logging.getLogger('').addHandler(log_console)
    logging.info('\nGSFLOW DAYMET Parameters')

    # DAYMET
    daymet_ws = inputs_cfg.get('INPUTS', 'daymet_folder')
    daymet_proj_method = inputs_cfg.get('INPUTS', 'prism_projection_method')
    daymet_cs = inputs_cfg.getint('INPUTS', 'prism_cellsize')
    calc_jh_coef_flag = inputs_cfg.getboolean('INPUTS',
                                              'calc_prism_jh_coef_flag')

    # Check input paths
    if not arcpy.Exists(hru.polygon_path):
        logging.error('\nERROR: Fishnet ({}) does not exist'.format(
            hru.polygon_path))
        sys.exit()
    # Check that DAYMET folder is valid
    if not os.path.isdir(daymet_ws):
        logging.error(
            '\nERROR: DAYMET folder ({}) does not exist'.format(daymet_ws))
        sys.exit()
    proj_method_list = ['BILINEAR', 'CUBIC', 'NEAREST']
    if daymet_proj_method.upper() not in proj_method_list:
        logging.error('\nERROR: DAYMET projection method must be: {}'.format(
            ', '.join(proj_method_list)))
        sys.exit()
    logging.debug('  Projection method:    {}'.format(
        daymet_proj_method.upper()))

    # Check other inputs
    if daymet_cs <= 0:
        logging.error('\nERROR: DAYMET cellsize must be greater than 0\n')
        sys.exit()

    # Set ArcGIS environment variables
    arcpy.CheckOutExtension('Spatial')
    env.overwriteOutput = True
    env.pyramid = 'PYRAMIDS 0'
    env.workspace = hru.param_ws
    env.scratchWorkspace = hru.scratch_ws

    # DAYMET data names
    if data_name == 'ALL':
        data_name_list = ['PPT', 'TMAX', 'TMIN']
    else:
        data_name_list = [data_name]

    # Set month list
    month_list = ['{:02d}'.format(m) for m in range(1, 13)]
    # month_list.extend(['annual'])

    # Check fields
    logging.info('\nAdding DAYMET fields if necessary')
    for data_name in data_name_list:
        for month in month_list:
            support.add_field_func(hru.polygon_path,
                                   '{}_{}'.format(data_name, month), 'DOUBLE')

    # Process each DAYMET data type
    logging.info('\nProjecting/clipping DAYMET mean monthly rasters')
    for data_name in data_name_list:
        logging.info('\n{}'.format(data_name))
        daymet_normal_re = re.compile(
            'daymet_(?P<type>%s)_30yr_normal_(?P<month>\d{2}).img$' %
            data_name, re.IGNORECASE)

        # Search all files & subfolders in DAYMET folder
        #   for images that match data type
        input_raster_dict = dict()
        for root, dirs, files in os.walk(daymet_ws):
            for file_name in files:
                daymet_normal_match = daymet_normal_re.match(file_name)
                if daymet_normal_match:
                    month_str = daymet_normal_match.group('month')
                    input_raster_dict[month_str] = os.path.join(
                        daymet_ws, root, file_name)
        if not input_raster_dict:
            logging.error(
                '\nERROR: No DAYMET rasters were found matching the following '
                'pattern:\n  {}\n\n'.format(daymet_normal_re.pattern))
            logging.error()
            sys.exit()

        # DAYMET input data workspace
        # input_ws = os.path.join(daymet_ws, data_name.lower())
        # if not os.path.isdir(input_ws):
        #    logging.error('\nERROR: The DAYMET {} folder does not exist'.format(
        #        data_name.lower()))
        #    sys.exit()

        # DAYMET output data workspace
        output_ws = os.path.join(hru.param_ws, data_name.lower() + '_rasters')
        if not os.path.isdir(output_ws):
            os.mkdir(output_ws)

        # Remove all non year/month rasters in DAYMET temp folder
        logging.info('  Removing existing DAYMET files')
        for item in os.listdir(output_ws):
            if daymet_normal_re.match(item):
                os.remove(os.path.join(output_ws, item))

        # Extract, project/resample, clip
        # Process images by month
        zs_daymet_dict = dict()
        # env.extent = hru.extent
        for month in month_list:
            logging.info('  Month: {}'.format(month))

            # Projected/clipped DAYMET raster
            input_raster = input_raster_dict[month]
            # input_name = 'daymet_{}_30yr_normal_800mM2_{}_bil.bil'.format(
            #    data_name.lower(), input_month)
            # input_raster = os.path.join(input_ws, input_name)
            output_name = 'daymet_{}_normal_{}.img'.format(
                data_name.lower(), month)
            output_raster = os.path.join(output_ws, output_name)

            # Set preferred transforms
            input_sr = arcpy.sa.Raster(input_raster).spatialReference
            transform_str = support.transform_func(hru.sr, input_sr)
            if transform_str:
                logging.debug('  Transform: {}'.format(transform_str))

            # Project DAYMET rasters to HRU coordinate system
            # DEADBEEF - Arc10.2 ProjectRaster does not extent
            support.project_raster_func(input_raster, output_raster, hru.sr,
                                        daymet_proj_method.upper(), daymet_cs,
                                        transform_str,
                                        '{} {}'.format(hru.ref_x, hru.ref_y),
                                        input_sr, hru)
            # arcpy.ProjectRaster_management(
            #    input_raster, output_raster, hru.sr,
            #    daymet_proj_method.upper(), daymet_cs, transform_str,
            #    '{} {}'.format(hru.ref_x, hru.ref_y),
            #    input_sr)

            # Save parameters for calculating zonal stats
            zs_field = '{}_{}'.format(data_name, month)
            zs_daymet_dict[zs_field] = [output_raster, 'MEAN']

            # Cleanup
            del input_raster, output_raster, output_name
            del input_sr, transform_str, zs_field

        # Cleanup
        # arcpy.ClearEnvironment('extent')

        # Calculate zonal statistics
        logging.info('\nCalculating DAYMET zonal statistics')
        support.zonal_stats_func(zs_daymet_dict, hru.polygon_path,
                                 hru.point_path, hru)
        del zs_daymet_dict
def blockIntersect():
    arcpy.AddMessage("     Begining overlay Processing")
    if arcpy.Exists("wireless_block_" + theST):
        arcpy.Delete_management("wireless_block_" + theST)
    theCnt = int(
        arcpy.GetCount_management(theFD + "BB_Service_Wireless").getOutput(0))
    theBlock = theBlockGDB + "Block_" + theST
    myCnt = 1
    if theCnt > 0:  #if there are records in the wireless shape class
        #rows = arcpy.SearchCursor(theFD + "BB_Service_Wireless")
        rows = arcpy.SearchCursor(theFD + "BB_Service_Wireless")
        for row in rows:  #while  < theCnt:
            myID = row.getValue("OBJECTID")
            #arcpy.AddMessage("     Performing overlay " + str(myCnt) + " of " + str(theCnt) + " and O-ID: " + str(myID))
            myQry = "TRANSTECH <> 60 AND OBJECTID = " + str(myID)
            myLyr = theST + "NotSatellite" + str(myCnt)
            #arcpy.AddMessage("theFD=" + theFD)
            #arcpy.AddMessage("myLyr=" + myLyr)
            #arcpy.AddMessage("myQry=" + myQry)
            #arcpy.MakeFeatureLayer_management (theFD + "BB_Service_Wireless", myLyr, myQry)
            #arcpy.AddMessage("cnt=" + arcpy.GetCount_management(myLyr).getOutput(0))
            if int(
                    arcpy.GetCount_management(myLyr).getOutput(0)
            ) > 0:  #there are no records in myLyr, it is a satellite record
                theOFC = "wireless_block_" + theST + "_" + str(myCnt)
                theOFCP = "wireless_block_" + theST + "_" + str(myCnt) + "_prj"
                myFCs = [theOFC, theOFCP]
                for myFC in myFCs:
                    if arcpy.Exists(myFC):
                        arcpy.Delete_management(myFC)
                #arcpy.AddMessage("myLyr=" + myLyr)
                #arcpy.AddMessage("theBlock=" + theBlock)
                #arcpy.AddMessage("theOFC=" + theOFC)
                arcpy.Intersect_analysis([myLyr, theBlock], theOFC)
                #arcpy.AddMessage("after Intersect_analysis")
                arcpy.Project_management(
                    theOFC, theOFCP,
                    "PROJCS['North_America_Albers_Equal_Area_Conic',GEOGCS['GCS_North_American_1983',DATUM['D_North_American_1983',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Albers'],PARAMETER['False_Easting',0.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',-96.0],PARAMETER['Standard_Parallel_1',20.0],PARAMETER['Standard_Parallel_2',60.0],PARAMETER['Latitude_Of_Origin',40.0],UNIT['Meter',1.0]]",
                    "NAD_1983_To_WGS_1984_1",
                    "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]]"
                )
                arcpy.AddField_management(theOFCP, "PCT", "DOUBLE", "5", "2",
                                          "")
                theExp = "([SHAPE_Area]) /( [ALAND10] + [AWATER10] )*100"
                arcpy.CalculateField_management(theOFCP, "PCT", theExp, "VB",
                                                "")
                arcpy.Delete_management(myLyr)
                if arcpy.Exists(theOFC):
                    arcpy.Delete_management(theOFC)
                myQry = "PCT > 100"
                myLyr = theST + "gtOne" + str(myCnt)
                arcpy.MakeFeatureLayer_management(theOFCP, myLyr, myQry)
                if int(arcpy.GetCount_management(myLyr).getOutput(0)) > 0:
                    arcpy.CalculateField_management(myLyr, "PCT", "100",
                                                    "PYTHON", "")
                arcpy.Delete_management(myLyr)
                arcpy.CopyRows_management(theOFCP, theOFC)
                if arcpy.Exists(theOFCP):
                    arcpy.Delete_management(theOFCP)
                del theExp, theOFCP, theOFC, myFC, myFCs
            myCnt = myCnt + 1
    del myLyr, myQry, theBlock, theCnt, myCnt, row, rows, myID
    return ()
Exemple #18
0
def appendSDMValues(gp, unitCell, TrainPts):
    try:
        arcpy.AddMessage("\n" + "=" * 10 + " arcsdm values  " + "=" * 10)
        with open(
                os.path.join(os.path.dirname(__file__), "arcsdm_version.txt"),
                "r") as myfile:
            data = myfile.readlines()
        #Print version information
        arcpy.AddMessage("%-20s %s" % ("", data[0]))
        installinfo = arcpy.GetInstallInfo()

        arcpy.AddMessage("%-20s %s (%s)" %
                         ("Arcgis environment: ", installinfo['ProductName'],
                          installinfo['Version']))

        if not gp.workspace:
            gp.adderror('Workspace not set')
            raise arcpy.ExecuteError("Workspace not set!")
        if not (arcpy.Exists(gp.workspace)):
            gp.adderror('Workspace %s not found' % (gp.workspace))
            raise arcpy.ExecuteError('Workspace %s not found' % (gp.workspace))
        desc = arcpy.Describe(gp.workspace)
        gp.addmessage("%-20s %s (%s)" %
                      ("Workspace: ", gp.workspace, desc.workspaceType))

        if not gp.scratchworkspace:
            gp.adderror('Scratch workspace mask not set')
        wdesc = arcpy.Describe(gp.scratchworkspace)
        gp.addmessage(
            "%-20s %s (%s)" %
            ("Scratch workspace:", gp.scratchworkspace, wdesc.workspaceType))
        # TODO: These should be moved to common CHECKENV class/function TR

        # Tools wont work if type is different from eachother (joins do not work filesystem->geodatabase! TR
        if (wdesc.workspaceType != desc.workspaceType):
            gp.AddError(
                "Workspace and scratch workspace must be of the same type!")
            raise arcpy.ExecuteError("Workspace type mismatch")

        mapUnits = getMapUnits()
        mapUnits = mapUnits.lower().strip()
        if not mapUnits.startswith('meter'):
            gp.addError(
                'Incorrect output map units: Check units of study area.')
        conversion = getMapConversion(mapUnits)
        #gp.addMessage("Conversion from map units to km^2: " + str(conversion));
        gp.addmessage("%-20s %s" % ('Map Units:', mapUnits))

        if not gp.mask:
            gp.adderror('Study Area mask not set')
            raise arcpy.ExecuteError("Mask not set. Check Environments!")
            #AL
        else:
            if not arcpy.Exists(gp.mask):
                gp.addError("Mask " + gp.mask + " not found!")
                raise arcpy.ExecuteError("Mask not found")
            #gp.AddMessage("Mask set");
            desc = gp.describe(gp.mask)
            gp.addMessage(
                "%-20s %s" %
                ("Mask:", "\"" + desc.name + "\" and it is " + desc.dataType))
            if (desc.dataType == "FeatureLayer"
                    or desc.dataType == "FeatureClass"):
                arcpy.AddWarning(
                    'Warning: You should only use single value raster type masks!'
                )
            gp.addMessage("%-20s %s" %
                          ("Mask size:", str(getMaskSize(mapUnits))))
            #gp.AddMessage("Masksize: " + str(getMaskSize(mapUnits)));

        if not gp.cellsize:
            gp.adderror('Study Area cellsize not set')
        if (gp.cellsize == "MAXOF"):
            arcpy.AddWarning("Cellsize should have definitive value?")
            #raise arcpy.ExecuteError("SDMValues: Cellsize must have value");

        cellsize = arcpy.env.cellSize  #float(str(arcpy.env.cellSize).replace(",","."))
        gp.addmessage("%-20s %s" % ("Cell Size:", cellsize))
        #gp.addMessage("Debug: " + str(conversion));
        total_area = getMaskSize(
            mapUnits
        )  # Now the getMaskSize returns it correctly in sqkm   : * cellsize **2 * conversion
        #gp.addMessage("Debug));
        unitCell = float(unitCell)
        num_unit_cells = total_area / unitCell
        num_tps = gp.GetCount_management(TrainPts)
        gp.addmessage("%-20s %s" % ('# Training Sites:', num_tps))
        gp.addmessage("%-20s %s" %
                      ("Unit Cell Area:", "{}km^2, Cells in area: {} ".format(
                          unitCell, num_unit_cells)))

        if (num_unit_cells == 0):
            raise arcpy.ExecuteError("ERROR: 0 Cells in Area!")
            #AL
        priorprob = num_tps / num_unit_cells
        if not (0 < priorprob <= 1.0):
            arcpy.AddError(
                'Incorrect no. of training sites or unit cell area. TrainingPointsResult {}'
                .format(priorprob))
            raise arcpy.ExecuteError
            #raise SDMError('Incorrect no. of training sites or unit cell area. TrainingPointsResult {}'.format(priorprob))
        gp.addmessage("%-20s %0.6f" % ('Prior Probability:', priorprob))
        #gp.addmessage("Debug priorprob:" + str(getPriorProb(TrainPts, unitCell)))

        gp.addmessage("%-20s %s" %
                      ('Training Set:', gp.describe(TrainPts).catalogpath))
        gp.addmessage("%-20s %s" %
                      ('Study Area Raster:', gp.describe(gp.mask).catalogpath))
        gp.addmessage("%-20s %s" %
                      ('Study Area Area:', str(total_area) + "km^2"))
        #gp.addmessage('Map Units to Square Kilometers Conversion: %f'%conversion)
        arcpy.AddMessage("")
        # Empty line at end
    except arcpy.ExecuteError as e:
        if not all(e.args):
            arcpy.AddMessage("Calculate weights caught arcpy.ExecuteError: ")
            args = e.args[0]
            args.split('\n')
            arcpy.AddError(args)
        arcpy.AddMessage("-------------- END EXECUTION ---------------")
        raise
    except:
        # get the traceback object
        tb = sys.exc_info()[2]
        # tbinfo contains the line number that the code failed on and the code from that line
        tbinfo = traceback.format_tb(tb)[0]
        gp.addError(tbinfo)
        # concatenate information together concerning the error into a message string
        #pymsg = "PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n    " + \
        #    str(sys.exc_type)+ ": " + str(sys.exc_value) + "\n"
        # generate a message string for any geoprocessing tool errors
        if len(gp.GetMessages(2)) > 0:
            msgs = "SDM GP ERRORS:\n" + gp.GetMessages(2) + "\n"
            gp.AddError(msgs)
        #gp.AddError(pymsg)
        raise
        'name_prefix': 'NUACI2000_'
    },
    2005: {
        'data_dir': u'E:/Data/city_extraction/2005/',
        'name_prefix': 'NUACI2005_'
    },
    2010: {
        'data_dir': u'E:/Data/city_extraction/2010/',
        'name_prefix': 'NUACI2010_'
    },
    'output': {
        'data_dir': u'E:/Data/city_extraction/cell_statistics/',
        'name_prefix': 'NUACI_'
    },
    'begin_id': 90,
    'end_id': 134
}

if __name__ == '__main__':
    for id in range(config['begin_id'], config['end_id'] + 1):
        raster_list = []
        for year in range(1990, 2010 + 1, 5):
            file_path = config[year]['data_dir'] + config[year][
                'name_prefix'] + str(id) + '.tif'
            if arcpy.Exists(file_path):
                raster_list.append(file_path)
        output_raster = arcpy.sa.CellStatistics(raster_list, 'MAXIMUM')
        output_raster.save(config['output']['data_dir'] +
                           config['output']['name_prefix'] + str(id) + '.tif')
        print(id, raster_list, 'finish')
Exemple #20
0
        ##  Loop through raster data, clipping and creating arrays of data:  Date Q Ppt
        rasSq = 1013.02**2 / 1000  ##  ppt [mm -> m] and cellsize (x*y) [m*m]
        precip = []
        date = []
        q = []

        for day in rrule.rrule(rrule.DAILY, dtstart=start, until=end):
            folder = "C:\\Recharge_GIS\\Precip\\800m\\Daily\\"
            yr = day.year
            if (yr <= 1991):
                arcpy.env.overwriteOutput = True  ## Ensure overwrite capability
                arcpy.env.workspace = folder + str(day.year) + "a"
                ras = folder + str(day.year) + "a\\" + "PRISM_NM_" + str(
                    day.year) + day.strftime('%m') + day.strftime(
                        '%d') + ".tif"
                if arcpy.Exists(ras):
                    try:
                        arcpy.CheckOutExtension("Spatial")
                        mask = "C:\\Recharge_GIS\\nm_gauges.gdb\\nm_wtrshds"
                        rasPart = arcpy.sa.ExtractByMask(ras, geo)
                        if day == beginPrecip:
                            rasPart.save(folder + str(day.year) + "a\\" +
                                         str(gPoly) + "_rasterClipTest.tif")
                        arr = arcpy.RasterToNumPyArray(rasPart,
                                                       nodata_to_value=0)
                        arrVal = np.multiply(arr, rasSq)
                        arrSum = arrVal.sum()
                        print("Sum of precip on " + str(day) + ":  " +
                              str(arrSum))
                        precip.append(arrSum)
                        date.append(day)
def projected_comps(in_lyr, fc, prj_final, region_name, temp_gdb, final_gdb,
                    prj_location):
    # Check if out location was already created
    if not arcpy.Exists(temp_gdb):
        path, gdb_file = os.path.split(temp_gdb)
        create_gdb(path, gdb_file, temp_gdb)
    if not arcpy.Exists(final_gdb):
        path, gdb_file = os.path.split(final_gdb)
        create_gdb(path, gdb_file, final_gdb)

    prj_name = prj_final.replace('.prj', '')
    prj_name = prj_name.replace(' ', '_')

    in_fc = in_lyr

    # Locations of WGS and desired projections
    wgs_coord_file = prj_location + os.sep + 'WGS 1984.prj'
    prj_file = prj_location + os.sep + prj_final
    # Extraction spatial info from these prj file
    dsc_wgs = arcpy.Describe(wgs_coord_file)
    wgs_coord_sys = dsc_wgs.spatialReference
    dsc_prj = arcpy.Describe(prj_file)
    prj_sr = dsc_prj.spatialReference
    prj_datum = prj_sr.GCS.datumName

    if not arcpy.Exists(temp_gdb):
        path, file_temp = os.path.split(temp_gdb)
        create_gdb(path, file_temp, temp_gdb)
    if not arcpy.Exists(final_gdb):
        path, file_final = os.path.split(final_gdb)
        create_gdb(path, file_final, final_gdb)

    if prj_datum == "D_WGS_1984":

        fc_other_geo = region_name + "_" + str(fc) + "_WGS84"
        out_other_fc = temp_gdb + os.sep + fc_other_geo

        prj_fc_name = fc_other_geo + "_" + prj_name
        prj_fc = final_gdb + os.sep + prj_fc_name

        if not arcpy.Exists(out_other_fc):
            arcpy.Project_management(in_fc, out_other_fc, wgs_coord_sys)
            print(arcpy.GetMessages(0))

        if not arcpy.Exists(prj_fc):
            arcpy.Project_management(out_other_fc, prj_fc, prj_sr)
            print(arcpy.GetMessages(0))
        else:
            print str(prj_fc) + " already exists"
    else:

        prj_fc_name = region_name + "_" + str(fc) + "_" + prj_name
        prj_fc = final_gdb + os.sep + prj_fc_name

        if not arcpy.Exists(prj_fc):
            arcpy.Project_management(in_fc, prj_fc, prj_sr)
            # print(arcpy.GetMessages(0))
            print 'Completed Projecitons  for {0}'.format(prj_fc)
        else:
            pass
    return prj_fc
Exemple #22
0
start_script = datetime.datetime.now()
print "Script started at {0}".format(start_script)

species_neg = []
with open(folderDict, 'rU') as inputFile:
    header = next(inputFile)
    for line in inputFile:
        line = line.split(',')
        finalGDB = str(line[0])
        finalGDB = finalGDB.strip('\n')
        use = str(line[1])
        use = use.strip('\n')
        outtable = finalGDB + os.sep + "Topo_results_" + use

        if arcpy.Exists(outtable):
            intable = outtable
        else:
            intable = topolist

        print finalGDB

        for fc in functions.fcs_in_workspace(finalGDB):
            #print fc
            current_update = {}
            CHchecklist = fc.split("_")
            CHcheck = CHchecklist[0]
            if CHcheck != 'Topo':
                continue
            elif str(CHchecklist[1]) == 'T':
                continue
Exemple #23
0
def create_structures_fc(gdb):
    """
    Create Historic Structures feature class
    """
    arcpy.env.workspace = gdb
    fc = "HistoricStructures"
    spatref = arcpy.SpatialReference(
        "WGS 1984 Web Mercator (auxiliary sphere)")
    if arcpy.Exists(fc):
        arcpy.Delete_management(os.path.join(gdb, fc))
    has_m = "DISABLED"
    has_z = "DISABLED"
    # Execute CreateFeatureclass
    arcpy.CreateFeatureclass_management(gdb, fc, "POINT", "", has_m, has_z,
                                        spatref)
    arcpy.AddField_management(fc, "ResourceID", "LONG", "", "", "",
                              "Resource ID", "NULLABLE", "NON_REQUIRED", "")
    arcpy.AddField_management(fc, "PropName", "TEXT", "", "", 100,
                              "Resource Name", "NULLABLE", "NON_REQUIRED", "")
    arcpy.AddField_management(fc, "StrucType", "LONG", "", "", "",
                              "Structure Type", "NON_NULLABLE", "NON_REQUIRED",
                              "")
    arcpy.AddField_management(fc, "BldType", "TEXT", "", "", 3,
                              "Building Type", "NON_NULLABLE", "NON_REQUIRED",
                              "")
    arcpy.AddField_management(fc, "StyleType", "TEXT", "", "", 3,
                              "Architectural Style", "NON_NULLABLE",
                              "NON_REQUIRED", "")
    arcpy.AddField_management(fc, "Eligibility", "TEXT", "", "", 3,
                              "Eligibility Status", "NON_NULLABLE",
                              "NON_REQUIRED", "")
    arcpy.AddField_management(fc, "ConstYr", "LONG", "", "", "",
                              "Construction Year", "NULLABLE", "NON_REQUIRED",
                              "")
    arcpy.AddField_management(fc, "Address", "TEXT", "", "", 200, "Address",
                              "NULLABLE", "NON_REQUIRED", "")
    arcpy.AddField_management(fc, "Notes", "TEXT", "", "", 200, "Notes",
                              "NULLABLE", "NON_REQUIRED", "")
    arcpy.AddGlobalIDs_management(fc)
    arcpy.AddField_management(fc, "EPProject", "TEXT", "", "", 20,
                              "EPEI Project ID", "NULLABLE", "NON_REQUIRED",
                              "")
    arcpy.SetSubtypeField_management(fc, "StrucType")
    for item in subtypes.items():
        arcpy.AddSubtype_management(fc, item[0], item[1])
    arcpy.SetDefaultSubtype_management(fc, 5)
    for item in subtypes.items():
        arcpy.AssignDomainToField_management(fc, "BldType", item[1], item[0])
    arcpy.AssignDomainToField_management(
        fc, "StyleType", "Styles",
        ['0', '1', '2', '3', '4', '5', '6', '7', '8'])
    arcpy.AssignDomainToField_management(
        fc, "Eligibility", "Eligibility",
        ['0', '1', '2', '3', '4', '5', '6', '7', '8'])
    arcpy.AssignDefaultToField_management(
        fc, "Eligibility", "U", ['0', '1', '2', '3', '4', '5', '6', '7', '8'])
    arcpy.AssignDefaultToField_management(
        fc, "StyleType", "NS", ['0', '1', '2', '3', '4', '5', '6', '7', '8'])
    arcpy.AssignDefaultToField_management(
        fc, "BldType", "OT", ['0', '1', '2', '3', '4', '6', '7', '8'])
    arcpy.AssignDefaultToField_management(fc, "BldType", "NAT", '5')
    return os.path.join(gdb, fc)
create_gdb(OutFolderGDB, TopoGDB, outfileTopogdbpath)
DissolveList = [Dissolve, JoinFieldFC]
print DissolveList

for fc in fcs_in_workspace(InGDB):
    try:
        arcpy.Delete_management("fc_lyr")
        arcpy.Delete_management("outfc_lyr")

        name = str(fc) + "_STD"
        arcpy.MakeFeatureLayer_management(fc, "fc_lyr")
        outfc = outFilegdbpath + os.sep + str(name)
        topofc = outfileTopogdbpath + os.sep + str(fc)
        outFailedFC = outFilefailgdbpath + os.sep + str(fc)
        count = int(arcpy.GetCount_management(fc).getOutput(0))
        if arcpy.Exists(outfc):
            print str(outfc) + ": Already exists"
            continue
        elif arcpy.Exists(topofc):
            print str(outfc) + ":Topo Already exists"
            continue
        else:
            try:
                print "Start dissolve: " + str(fc)
                arcpy.Dissolve_management("fc_lyr", outfc, DissolveList, "",
                                          "MULTI_PART", "DISSOLVE_LINES")
                print "Dissolved: " + str(fc)
            except:
                topofc = outfileTopogdbpath + os.sep + str(fc)
                if arcpy.Exists(topofc):
                    continue
Exemple #25
0
def CreateGDB(OutFolder, OutName, outpath):
    if not arcpy.Exists(outpath):
        arcpy.CreateFileGDB_management(OutFolder, OutName, "CURRENT")
def create_gdb(out_folder, out_name, outpath):
    if not arcpy.Exists(outpath):
        arcpy.CreateFileGDB_management(out_folder, out_name, "CURRENT")
    logfile.close()


### START HERE ###
if len(sys.argv) <> 3 or not os.path.exists(sys.argv[1]) or not os.path.exists(
        sys.argv[2]):
    usage()
else:
    addMsgAndPrint('  ' + versionString)
    gdb = os.path.abspath(sys.argv[1])
    ows = os.path.abspath(sys.argv[2])
    arcpy.env.QualifiedFieldNames = False
    arcpy.env.overwriteoutput = True
    ## fix the new workspace name so it is guaranteed to be novel, no overwrite
    newgdb = ows + '/xx' + os.path.basename(gdb)
    if arcpy.Exists(newgdb):
        arcpy.Delete_management(newgdb)
    addMsgAndPrint('  Copying ' + os.path.basename(gdb) +
                   ' to temporary geodatabase...')
    try:
        arcpy.AddMessage("Using Arcpy Copy")
        arcpy.Copy_management(gdb, newgdb)
    except:
        if arcpy.Exists(newgdb):
            arcpy.Delete_management(newgdb)
        arcpy.AddMessage("Using Shutil CopyTree")
        shutil.copytree(gdb,
                        newgdb)  #Not elegent but got around a crashing issue
    main(newgdb, ows, gdb)
    addMsgAndPrint('\n  Deleting temporary geodatabase...')
    arcpy.env.workspace = ows
##                                                                              "SQL_SERVER",
##                                                                              userDefinedServer,
##                                                                              "DATABASE_AUTH",
##                                                                              UserName,
##                                                                              Password,
##                                                                              "SAVE_USERNAME",
##                                                                              state,
##                                                                              "#",
##                                                                              "TRANSACTIONAL",
##                                                                              "sde.DEFAULT")
##        except Exception as e:
##            print " cursor execute create db connection: {}".format((e))
    
    print "Delete hifld table if it exists, if yes delete it..."
    try:
        if arcpy.Exists(FC):
            arcpy.DeleteFeatures_management(FC)
    except Exception as e:
        print " arcpy delete hifld table exception: {}".format((e))
        
    print "Copy shapefile to sql server..."
    # Name Alias IsNullable Required Length Type Scale Precision MergeRule joinDelimiter DataSource OutputFieldName -1 -1
    try:
        arcpy.FeatureClassToFeatureClass_conversion(in_features=tempShapefilePath, \
                                        out_path=outPath, \
                                        out_name=outName, \
                                        where_clause=whereClause, \
                                        field_mapping='SiteNumber "SiteNumber" true true false 80 Text 0 0 ,First,#,Runways,SiteNumber,-1,-1;\
                                                                StateAbbv "StateAbbv" true true false 80 Text 0 0 ,First,#,Runways,StateAbbv,-1,-1;\
                                                                ID "ID" true true false 80 Text 0 0 ,First,#,Runways,ID,-1,-1;\
                                                                Length "Length" true true false 10 Long 0 10 ,First,#,Runways,Length_ft,-1,-1;\
                    oras.save(results)

        #    else:
        #        print "Raster Names Do Not Match[",fRas," <> ",ras,"]"

if options.oneras:
    (temp_path, initRaster) = os.path.split(options.oneras)
    initRasType, initSP, initLay = initRaster.split("_", 2)
    for bRas in arraylistB:
        (temp_path, ras) = os.path.split(bRas)
        rasType, stressPeriod, layer = ras.split("_", 2)
        if initRasType == rasType:
            if initLay == layer:
                outputName = rasType + "_diff_" + stressPeriod + "_" + layer
                ras2 = arcpy.Raster(bRas)
                ras1 = arcpy.Raster(options.oneras)
                oras = ras2 - ras1
                results = oWorkspace + "\\" + outputName
                if arcpy.TestSchemaLock(results):
                    print ras2, ' minus ', ras1, ' equals ', outputName
                    oras.save(results)
                elif not arcpy.Exists(results):
                    print ras2, ' minus ', ras1, ' equals ', outputName
                    oras.save(results)
                else:
                    print "Output SKIPPED [Schema lock present]. Can't save ", results
            else:
                print "Layers Do Not Match [", initLay, "<>", layer, "]"
        else:
            print "Raster Types Do Not Match[", initRasType, "<>", rasType, "]"
print "End of Execution"
Exemple #30
0
##############################
# args
#   inFc = featureClass
#   minSeparation (in mm on map)
#   maxPlotAtScale  = 500000

inFc = sys.argv[1]
minSeparation_mm = float(sys.argv[2])
maxPlotAtScale = float(sys.argv[3])

addMsgAndPrint(versionString)

#test for valid input:
# inFc exists and has item PlotAtScale
if not arcpy.Exists(inFc):
    forceExit()
fields = arcpy.ListFields(inFc)
fieldNames = []
for field in fields:
    fieldNames.append(field.name)
if not 'PlotAtScale' in fieldNames:
    addMsgAndPrint('No field PlotAtScale in '+fc+', aborting.')
    forceExit()

gdb = os.path.dirname(inFc)
if arcpy.Describe(gdb).dataType == 'FeatureDataset':
    gdb = os.path.dirname(gdb)

if os.path.basename(inFc) == 'OrientationPoints':
    addMsgAndPrint( 'Populating OrientationPointsDicts' )