Ejemplo n.º 1
0
def initiate(list_min_area_rot, list_min_rate_increase, local_first_yr,
             local_last_yr, first_loop, ident):
    #llamo los directorios y los guardo en listas.
    for subdir, dir_list, files in os.walk(inp_dir):
        break

    num_rot = local_last_yr - local_first_yr + 1
    print('cantidad de elementos de una rotacion =', num_rot)
    # Read in the list of states to process
    #state_file = open(analysis_dir+os.sep+list_states, 'rb')
    #lines = state_file.readlines()
    lines = ['cba']

    # merged raster containing all states data
    to_merge_files = []
    merged_ras = 'merged'

    #loop para todos los estados.
    for line in lines:
        frst = False

        state = line.split()[0]
        # out_dir contains the output and intermediate analysis stuff
        if (local_first_yr > 2006):
            out_dir = str(output_dir) + os.sep + str(state)
        else:
            out_dir = str(output_dir)
        if not os.path.exists(out_dir):
            os.makedirs(out_dir)

        state_ras_files = []
        range_of_yrs = []

        for i in range(num_rot):
            range_of_yrs.append(local_first_yr + i)

        logger.info('Evaluating ' + state)
        logger.info('CROP ROTATIONS FOR ' + state)
        #aca abrimos la carpeta correspondiente al anio en  el que estamos.
        for j in range(len(range_of_yrs)):
            for position, item in enumerate(dir_list):
                #print (position, item)
                if (str(range_of_yrs[j]) in item):
                    #buscamos el raster que corresponde al anio en el que estamos.
                    list_files = glob.glob(inp_dir + os.sep +
                                           str(range_of_yrs[j]) + os.sep +
                                           str(range_of_yrs[j]) +
                                           '_reclasificado.tif')
                    print(
                        range_of_yrs[j],
                        inp_dir + os.sep + str(range_of_yrs[j]) + os.sep +
                        str(range_of_yrs[j]) + '_reclasificado.tif')
                    #print('aca si entra',list_files)
                    if list_files:
                        if frst == False:
                            local_first_yr = range_of_yrs[j]
                            frst = True
                        state_ras_files.append(''.join(list_files))
                    else:
                        print(
                            'No se leyeron bien los archivos TIF de entrada.')
        # env.workspace = "C:\\Users\\jo_ro\\Google Drive\\Maestria en Estadistica\\ArcGIS\\CropRotations-master\\entrada"
        # #set local variables
        # inRaster01 = "Est_12_13"
        # inRaster02 = "Est_13_14"
        # inRaster03 = "Est_14_15"
        # state_ras_files = [inRaster01,inRaster02,inRaster03]

        if len(state_ras_files) > num_rot:
            state_ras_files = state_ras_files[:num_rot]
            local_last_yr = local_first_yr + num_rot - 1

        logger.info('List of raster files ')
        for k in range(len(state_ras_files)):
            logger.info(state_ras_files[k])
        logger.info('First Year ' + str(local_first_yr))
        logger.info('Last Year ' + str(local_last_yr))

        #hasta aca funcionaria bien.
        ras = computeCropRotations(state, out_dir, state_ras_files,
                                   local_first_yr, local_last_yr,
                                   list_min_area_rot, list_min_rate_increase,
                                   first_loop)
        to_merge_files.append(ras)

    # Delete all arcgis files except the merged raster
    for i in range(len(to_merge_files)):
        logger.info('Deleting raster ' + os.path.split(to_merge_files[i])[1])
        rasters_to_delete.append(to_merge_files[i])

    if (bool_merge_ras_files and first_loop == True):
        logger.info('Merging all crop rotation rasters...')
        try:
            if (len(to_merge_files) > 1):
                arcpy.MosaicToNewRaster_management(to_merge_files, output_dir,
                                                   merged_ras, "",
                                                   "8_BIT_UNSIGNED", "", "1",
                                                   "LAST", "FIRST")
            else:
                arcpy.Rename_management(to_merge_files, merged_ras)
        except:
            logger.info(arcpy.GetMessages())
            #delete_interim_files()

    # cdl_map maps CDL IDs to crop names e.g. 1 is mapped to Corn
    cdl_map_file = csv.reader(
        open(analysis_dir + os.sep + CDL_VAL_NAME_MAP, 'r'))
    cdl_map = {}
    # cdl_map1 = {}
    # cdl_map2 = {}
    # cdl_map3 = {}

    #aca asigna el numero que le corresponde a cada anio correctamente.
    for row in cdl_map_file:
        cdl_map[int(row[0])] = row[1]
    # for row in cdl_map_file:
    #     cdl_map1[int(row[0])] = row[1]
    #     cdl_map2[int(row[0])] = row[2]
    #     cdl_map3[int(row[0])] = row[3]

    # Write a use friendly version of the EXISTING_ROTATIONS file
    prev_rot_file = csv.reader(
        open(output_dir + os.sep + EXISTING_ROTATIONS, 'r'))
    human_readable_rot = open(output_dir + os.sep + READABLE_ROTS, 'w')
    #human_readable_rot2 = open(base_dir+os.sep+READABLE_ROTS,'w')
    #numero de pixeles por combinacion
    para_has = csv.reader(open(output_dir + os.sep + STATS + '.csv', 'r'))
    i = 0
    num_pix = []
    for str_row in para_has:
        i += 1
        if i > 1:
            num_pix.append(str_row[num_rot + 1])
    j = 0
    for str_row in prev_rot_file:
        j += 1
        line = []
        #row contiene todas las combinaciones
        row = [int(float(x)) for x in str_row]
        line.append(row[0])

        for i in range(1, len(row)):
            try:
                line.append(cdl_map[row[i]])
            except:
                print(row[i])
        #print(str_row)
        line.append(num_pix[j - 1])
        # for i in range(1,len(row)):
        #     if i==1:
        #         try:
        #             line.append(cdl_map1[row[i]])
        #         except:
        #             print(row[i])
        #     if i==2:
        #         try:
        #             line.append(cdl_map2[row[i]])
        #         except:
        #             print(row[i])
        #     if i==3:
        #         try:
        #             line.append(cdl_map3[row[i]])
        #         except:
        #             print(row[i])
        csv.writer(human_readable_rot).writerow(line)
        #csv.writer(human_readable_rot2).writerow(line)

    # Delete all intermediate excel files
    list_csv_files = glob.glob(out_dir + os.sep + '*.csv*')
    for k in range(len(list_csv_files)):
        csv_files_to_delete.append(list_csv_files[k])
Ejemplo n.º 2
0
def temporal_kernel_density(inFeatureClass, outWorkSpace, outTemporalName, start_time, end_time, time_interval,
                            kernel_pop_field,
                            kernel_cell_size, kernel_search_rad, kernel_area_unit, kernel_out_values="DENSITIES",
                            kernel_method="PLANAR", bin_start=None, compactBool=True):
    """ This tool will split a feature class into multiple kernel densities based on a datetime field and a
    a set time interval. The result will be a time enabled moasic with Footprint. """
    try:
        if arcpy.Exists(outWorkSpace):
            arcpy.env.workspace = outWorkSpace
            arcpy.env.overwriteOutput = True
            san.arc_print("The current work space is: {0}.".format(outWorkSpace), True)
            # Set up Work Space Environments
            out_workspace_path_split = os.path.split(outWorkSpace)
            workSpaceTail = out_workspace_path_split[1]
            # arcpy.env.scratchWorkspace = out_workspace_path_split[0]
            inFeatureClassTail = os.path.split(inFeatureClass)[1]
            san.arc_print("Gathering describe object information from workspace and input feature class.")
            fc_desc = arcpy.Describe(inFeatureClass)
            # spatial_ref = fc_desc.spatialReference
            ws_desc = arcpy.Describe(outWorkSpace)
            workspace_is_geodatabase = ws_desc.dataType == "Workspace" or ws_desc.dataType == "FeatureDataset"
            if not workspace_is_geodatabase:
                arcpy.AddWarning("You neeed a valid geodatabase as workspace to create a moasic dataset,"
                                 " this tool will put raw raster files in the output workspace selected.")
            fin_output_workspace = outWorkSpace
            temporal_table_path = os.path.join(outWorkSpace, outTemporalName)
            try:
                san.arc_print("Attempting to create Temporal Table in output workspace.")

                arcpy.CreateTable_management(fin_output_workspace, outTemporalName)
                san.add_new_field(temporal_table_path,
                                  arcpy.ValidateFieldName("KernelDensityName", fin_output_workspace), "TEXT")
                san.add_new_field(temporal_table_path, arcpy.ValidateFieldName("Bin_Number", fin_output_workspace),
                                  "LONG")
                san.add_new_field(temporal_table_path, arcpy.ValidateFieldName("DT_Start_Bin", fin_output_workspace),
                                  "DATE", field_alias="Start Bin Datetime")
                san.add_new_field(temporal_table_path, arcpy.ValidateFieldName("DT_End_Bin", fin_output_workspace),
                                  "DATE", field_alias="End Bin Datetime")
                san.add_new_field(temporal_table_path, arcpy.ValidateFieldName("TXT_Start_Bin", fin_output_workspace),
                                  "TEXT", field_alias="Start Bin String")
                san.add_new_field(temporal_table_path, arcpy.ValidateFieldName("TXT_End_Bin", fin_output_workspace),
                                  "TEXT", field_alias="End Bin String")
                san.add_new_field(temporal_table_path, arcpy.ValidateFieldName("Extract_Query", fin_output_workspace),
                                  "TEXT")
                san.add_new_field(temporal_table_path, arcpy.ValidateFieldName("Raster_Path", fin_output_workspace),
                                  "TEXT")
            except:
                arcpy.AddWarning("Could not create Moasic Dataset Time Table. Time enablement is not possible.")
                pass
            try:
                arcpy.RefreshCatalog(outWorkSpace)
            except:
                san.arc_print("Could not refresh catalog.")
                pass
            # Set up Time Deltas and Parse Time String
            san.arc_print("Constructing Time Delta from input time period string.", True)
            time_magnitude, time_unit = san.alphanumeric_split(str(time_interval))
            time_delta = san.parse_time_units_to_dt(time_magnitude, time_unit)
            san.arc_print(
                "Using datetime fields to generate new feature classes in {0}.".format(
                    str(workSpaceTail)))
            san.arc_print("Getting start and final times in start time field {0}.".format(start_time))
            start_time_min, start_time_max = san.get_min_max_from_field(inFeatureClass, start_time)
            # Establish whether to use end time field or only a start time (Single Date Field)
            if san.field_exist(inFeatureClass, end_time) and end_time:
                san.arc_print("Using start and end time to grab feature classes whose bins occur within an events "
                              "start or end time.")
                end_time_min, end_time_max = san.get_min_max_from_field(inFeatureClass, end_time)
                start_time_field = start_time
                end_time_field = end_time
                start_time_range = start_time_min
                end_time_range = end_time_max
            else:
                san.arc_print("Using only first datetime start field to construct time bin ranges.")
                start_time_field = start_time
                end_time_field = start_time
                start_time_range = start_time_min
                end_time_range = start_time_max
            if isinstance(bin_start, datetime.datetime) or isinstance(bin_start, datetime.date):
                start_time_range = bin_start
                san.arc_print("Bin Start Time was selected, using {0} as bin starting time period."
                              .format(str(bin_start_time)))
            time_bins = san.construct_time_bin_ranges(start_time_range, end_time_range, time_delta)
            san.arc_print("Constructing queries based on datetime ranges.")
            temporal_queries = san.construct_sql_queries_from_time_bin(time_bins, inFeatureClass, start_time_field,
                                                                       end_time_field)
            # Transition to kernel density creation
            time_counter = 0
            temporal_record_table = []
            san.arc_print("Generating kernel densities based on {0} queries.".format(len(temporal_queries)), True)
            for query in temporal_queries:
                try:
                    time_counter += 1
                    san.arc_print("Determining name and constructing query for new feature class.", True)
                    newFCName = "Bin_{0}_{1}".format(time_counter,
                                                     arcpy.ValidateTableName(inFeatureClassTail, fin_output_workspace))
                    if not workspace_is_geodatabase:
                        newFCName = newFCName[0:13]  # Truncate Name if not workspace.
                    san.arc_print(
                        "Created Kernel Density {0} with query '{1}' and created a new raster in {2}".format(
                            newFCName, str(query), workSpaceTail), True)
                    temporary_layer = arcpy.MakeFeatureLayer_management(inFeatureClass, newFCName, query)
                    # Break up general density to have pop field set to none if no actually field exists.
                    if not san.field_exist(inFeatureClass, kernel_pop_field):
                        kernel_pop_field = "NONE"
                    out_raster = arcpy.sa.KernelDensity(temporary_layer, kernel_pop_field, kernel_cell_size,
                                                        kernel_search_rad,
                                                        kernel_area_unit, kernel_out_values, kernel_method)

                    san.arc_print("Saving output raster: {0}.".format(newFCName))
                    raster_path = os.path.join(fin_output_workspace, str(newFCName))
                    out_raster.save(raster_path)
                    start_date_time = time_bins[time_counter - 1][0]
                    end_date_time = time_bins[time_counter - 1][1]
                    start_bin_time_string = str(start_date_time)
                    end_bin_time_string = str(end_date_time)
                    if not workspace_is_geodatabase:
                        arcpy.AddWarning("DBF tables can only accept date fields, not datetimes."
                                         " Please check string field.")
                        start_date_time = start_date_time.date()
                        end_date_time = end_date_time.date()
                    temporal_record_table.append([newFCName, time_counter, start_date_time, end_date_time,
                                                  start_bin_time_string, end_bin_time_string, query, raster_path])
                    del out_raster
                except Exception as e:
                    san.arc_print(
                        "The feature raster ID {0}, could not be saved. Check arguments".format(
                            str(newFCName)))
                    arcpy.AddWarning(str(e.args[0]))
                    pass

            san.arc_print("Adding record values to Temporal Table with an insert cursor.")
            table_fields = san.get_fields(temporal_table_path)
            with arcpy.da.InsertCursor(temporal_table_path, table_fields) as cursor:
                for records in temporal_record_table:
                    cursor.insertRow(records)
                san.arc_print("Finished inserting records for database.")
                del cursor

            if compactBool:
                try:
                    san.arc_print("Compacting workspace.", True)
                    arcpy.Compact_management(outWorkSpace)
                except:
                    san.arc_print("Not a Compact capable workspace.")
                    pass
            san.arc_print("Tool execution complete.", True)
            pass
        else:
            san.arc_print("The desired workspace does not exist. Tool execution terminated.", True)
            arcpy.AddWarning("The desired workspace does not exist.")

    except arcpy.ExecuteError:
        print(arcpy.GetMessages(2))
    except Exception as e:
        san.arc_print(str(e.args[0]))
Ejemplo n.º 3
0
def main():
    try:
        outGDBFullPath = arcpy.GetParameterAsText(0)
        ##        outGDBName = arcpy.GetParameterAsText(1)
        inputFC = arcpy.GetParameterAsText(1)
        routeFieldName = arcpy.GetParameterAsText(2)
        fromDateField = arcpy.GetParameterAsText(3)
        toDateField = arcpy.GetParameterAsText(4)
        defaultFromDate = arcpy.GetParameterAsText(5)
        inputCalibrationFC = arcpy.GetParameterAsText(6)
        inputCalibrationMField = arcpy.GetParameterAsText(7)
        lrsNetworks = [arcpy.GetParameterAsText(8)]

        inputDsc = arcpy.Describe(inputFC)
        sr = inputDsc.spatialReference

        #Get the route ID field from the source so we can create the schema using the same field length
        routeField = arcpy.ListFields(inputFC, routeFieldName)[0]
        lrsActivities = [
            'Calibrate Route', 'Cartographic Realignment', 'Create Route',
            'Extend Route', 'Realign Gap Route', 'Realign Overlapping Route',
            'Realign Route', 'Reassign Route', 'Retire Route', 'Reverse Route',
            'Shorten Route'
        ]

        arcpy.AddMessage('Creating Roads and Highways schema')
        GenerateRAndHSchema.makeALRS(outGDBFullPath, sr, lrsNetworks,
                                     lrsActivities, routeField.length)
        tempFC = os.path.join(outGDBFullPath, 'TempInput')
        routeTable = os.path.join(outGDBFullPath, 'Route')
        centerlineFC = os.path.join(outGDBFullPath, 'Centerline')
        centerlineSequence = os.path.join(outGDBFullPath, 'CenterlineSequence')
        calibrationPoint = os.path.join(outGDBFullPath, 'CalibrationPoint')
        #Delete the routeID from the route and calibration point feature classes so they can take on the route ID of the source
        arcpy.DeleteField_management(routeTable, ['ROUTEID'])
        arcpy.AddField_management(centerlineFC, "ROADWAYIDGUID", "GUID")
        arcpy.AddField_management(centerlineSequence, "ROADWAYIDGUID", "GUID")

        upperInputFieldNames = []
        for field in inputDsc.fields:
            upperInputFieldNames += [field.name.upper()]
        if 'ROUTENAME' in upperInputFieldNames:
            arcpy.DeleteField_management(routeTable, ['ROUTENAME'])
            arcpy.AddMessage('Deleted ROUTENAME')
        arcpy.DeleteField_management(calibrationPoint, ['ROUTEID'])

        arcpy.AddMessage('Loading routes')
        routeFields = addMissingFieldsToTarget(inputFC, routeTable,
                                               [fromDateField, toDateField])
        inFeatures = arcpy.SearchCursor(inputFC)
        routeTableCursor = arcpy.InsertCursor(routeTable)
        cCursor = arcpy.InsertCursor(centerlineFC)
        csCursor = arcpy.InsertCursor(centerlineSequence)
        arcpy.AddMessage('Writing routes.')
        x = 1
        try:
            for feature in inFeatures:
                arcpy.AddMessage('Loading route with RouteID ' +
                                 str(feature.getValue(routeFieldName)))
                guid = '{' + str(uuid.uuid4()) + '}'
                newRoute = routeTableCursor.newRow()
                for field in routeFields:
                    newRoute.setValue(field, feature.getValue(field))
                if fromDateField is not None and fromDateField != '' and fromDateField != '#':
                    newRoute.FROMDATE = feature.getValue(fromDateField)
                else:
                    newRoute.FROMDATE = defaultFromDate
                if toDateField is not None and toDateField != '' and toDateField != '#':
                    newRoute.TODATE = feature.getValue(toDateField)
                routeTableCursor.insertRow(newRoute)
                newCS = csCursor.newRow()
                newCS.ROUTEID = feature.getValue(routeFieldName)
                newCS.FROMDATE = defaultFromDate
                newCS.ROADWAYID = x
                newCS.ROADWAYIDGUID = guid
                newCS.NETWORKID = 1
                csCursor.insertRow(newCS)
                newCenterline = cCursor.newRow()
                newCenterline.Shape = feature.getValue(inputDsc.shapeFieldName)
                newCenterline.RoadwayID = x
                newCenterline.ROADWAYIDGUID = guid
                newCenterline.FROMDATE = defaultFromDate
                cCursor.insertRow(newCenterline)
                x += 1

        finally:
            del routeTableCursor
            del cCursor
            del csCursor
            del inFeatures
            del feature

        #Load calibration points
        arcpy.AddMessage('Loading calibration points')
        calibrationFields = addMissingFieldsToTarget(inputCalibrationFC,
                                                     calibrationPoint,
                                                     [inputCalibrationMField])
        inCalCursor = arcpy.SearchCursor(inputCalibrationFC)
        outCalCursor = arcpy.InsertCursor(calibrationPoint)

        try:
            for calPnt in inCalCursor:
                newCal = outCalCursor.newRow()
                for field in calibrationFields:
                    ##arcpy.AddMessage('Calculating calibration point field: ' + field)
                    newCal.setValue(field, calPnt.getValue(field))
                newCal.MEASURE = calPnt.getValue(inputCalibrationMField)
                newCal.NETWORKID = 1
                newCal.Shape = calPnt.Shape
                outCalCursor.insertRow(newCal)
        finally:
            del inCalCursor
            del outCalCursor

        #Build indexes
        idx.RecreateIndexes(centerlineSequence, calibrationPoint, routeTable,
                            [routeFieldName])

        #Set file geodatabase as output
        arcpy.SetParameterAsText(9, outGDBFullPath)
    except GenerateRAndHSchema.DataExistsError:
        pass
    except arcpy.ExecuteError:
        msgs = arcpy.GetMessages(2)
        arcpy.AddError(msgs)
    except:
        # Get the traceback object
        tb = sys.exc_info()[2]
        tbinfo = traceback.format_tb(tb)[0]

        # Concatenate information together concerning the error into a message string
        pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(
            sys.exc_info()[1])
        msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages(2) + "\n"

        # Return python error messages for use in script tool or Python Window
        arcpy.AddError(pymsg)
        arcpy.AddError(msgs)
Ejemplo n.º 4
0
            # ignore nulls if box checked
            if null_check:
                validate_domains(in_fc, domain_dic, True)
            else:
                validate_domains(in_fc, domain_dic)
            # delete ERROR_MESSAGE field if no problems found
            del_error_field(in_fc)
        else:
            arcpy.AddMessage(
                "The workspace selected does not contain any domains!")

        arcpy.AddMessage("Done!")

    except arcpy.ExecuteError:
        # Get the tool error messages
        msgs = arcpy.GetMessages(1)  # severity of warning or higher
        arcpy.AddError(msgs)

    except:
        # Get the traceback object
        tb = sys.exc_info()[2]
        tbinfo = traceback.format_tb(tb)[-1]
        errinfo = str(sys.exc_info()[1])

        # Concatenate information together concerning the error into a message string
        pymsg = "*PYTHON ERRORS*:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + errinfo
        msgs = "\n*ArcPy ERRORS*:\n" + arcpy.GetMessages(1) + "\n"

        # Return python error messages for use in script tool or Python Window
        arcpy.AddError(pymsg)
        arcpy.AddError(msgs)
    #Builds a point based feature class that includes the extracted values from the selected rasters.
    arcpy.sa.ExtractMultiValuesToPoints(geoPoints, targetSet, "NONE")
    message2 = arcpy.GetMessageCount()
    print "ExtractMultiValuesToPoints Tool: " + arcpy.GetMessage(message2 - 1)

    #Extracts the tabular information from the point file into a csv formatted table.
    geographyTable = filename[0] + "_Table.csv"
    arcpy.TableToTable_conversion(geoPoints, outworkspace, geographyTable)
    arcpy.DeleteFeatures_management(geoPoints)
    messageC = arcpy.GetMessageCount()
    print "TableToTable_Conversion Tool: " + arcpy.GetMessage(messageC - 1)
    print "Table saved as csv file: " + outworkspace + geographyTable

except arcpy.ExecuteError:
    print arcpy.GetMessages(2)

finally:
    arcpy.CheckInExtension("Spatial")
    print "You have checked in Spatial Analyst Extension."

# Data cleaning, transformation, and visualization using Python Pandas module
fullPath = outworkspace + geographyTable
#fullPath = outworkspace+"\\"+geographyTable
sd = pd.read_csv(fullPath)  #loads csv file into pandas dataframe

# Transforms column names and create new dataframe to compare primary presidential candidates.
sd.rename(columns={
    'G16PREDCLI': 'Clinton',
    'G16PRERTRU': 'Trump',
    'G16PRELJOH': 'Johnson'
    def execute(self, parameters, messages):

        arcpy.env.overwriteOutput = True

        for param in parameters:
            arcpy.AddMessage("Parameter: %s = %s" % (param.name, param.valueAsText))

        input_bathymetry = parameters[0].valueAsText
        depths = parameters[1].valueAsText
        input_environment = parameters[2].valueAsText
        environment_append = parameters[3].valueAsText
        temporary_directory = parameters[4].valueAsText
        output_raster = parameters[5].valueAsText

        # Define the options the script will use later
        # load depth strings from Includes.py
        depths_list = load_depth_string(depths)
        arcpy.AddMessage(depths_list)

        if not os.path.exists(temporary_directory):
            os.makedirs(temporary_directory)

        arcpy.ResetEnvironments()
        arcpy.env.overwriteOutput = "true"

        # Set environment variables
        env.mask = ""
        arcpy.AddMessage("Mask is: " + str(arcpy.env.mask))
        description = arcpy.Describe(os.path.join(input_bathymetry, "bath" + str(int(float(depths_list[0])))))
        cellsize1 = description.children[0].meanCellHeight
        env.cellSize = cellsize1
        arcpy.AddMessage("Cell size is: " + str(arcpy.env.cellSize))
        arcpy.AddMessage(os.path.join(input_bathymetry, "bath" + str(int(float(depths_list[0])))))
        extraster = Raster(os.path.join(input_bathymetry, "bath" + str(int(float(depths_list[0])))))
        extent1 = extraster.extent
        env.extent = extent1
        arcpy.AddMessage("Extent is: " + str(arcpy.env.extent))
        arcpy.env.workspace = temporary_directory
        spf = arcpy.Describe(os.path.join(input_bathymetry, "bath" + str(int(float(depths_list[0]))))).spatialReference
        arcpy.AddMessage("Coord sys is: " + str(spf.name))

        try:
            # loop through the layers
            for item in depths_list:
                depth = int(float(item))
                arcpy.AddMessage("Resizing layer " + str(depth) + " " + input_environment + "/" + environment_append + str(depth))
                arcpy.ProjectRaster_management(os.path.join(input_environment, environment_append + str(depth)),
                                               os.path.join(temporary_directory, environment_append + "a" + str(depth)), spf)
                TempData = arcpy.sa.ApplyEnvironment(os.path.join(temporary_directory, environment_append + "a" + str(depth)))
                arcpy.AddMessage("Extracting " + str(depth) + " to mask")
                outExtractByMask = ExtractByMask(TempData, os.path.join(input_bathymetry, "bath" + str(depth)))
                outExtractByMask.save(temporary_directory + "/clip" + str(depth))
                arcpy.AddMessage("Adding " + str(depth) + " to final layer")
                arcpy.Mosaic_management(temporary_directory + "/clip" + str(depth), temporary_directory + "/clip" + str(int(float(depths_list[0]))),
                                        "LAST")
                if depth == int(float(depths_list[-1])):
                    arcpy.AddMessage("Creating the final layer for you, which will be called " + str(output_raster))
                    arcpy.CopyRaster_management(temporary_directory + "/clip" + str(int(float(depths_list[0]))), output_raster)

                arcpy.AddMessage("next layer " + str(depth))

        except:
            arcpy.AddMessage(arcpy.GetMessages())
            arcpy.AddMessage("Something has gone wrong likely with this: " + str(depth))

        arcpy.AddMessage("Processing complete")
Ejemplo n.º 7
0
def split_line_pt():
    """
    メソッド名 : SplitLineAtPoint メソッド
    概要       : ポイントでラインを分断
    """
    try:
        arcpy.AddMessage(u"処理開始:")

        in_line_fc = arcpy.GetParameterAsText(0)
        in_point_fc = arcpy.GetParameterAsText(1)
        out_pt_fc = arcpy.GetParameterAsText(2)

        # ワークスペース
        wstype = arcpy.Describe(os.path.dirname(out_pt_fc)).workspacetype

        # ワークスペースにすでに同一のフィーチャクラス名がないかチェック
        if arcpy.Exists(out_pt_fc):
            raise AlreadyExistError

        # カーソル作成に使用するフィールド情報を create_fieldinfo 関数を用いて取得
        search_fields_name, search_fields_type, use_fields_name, spref = create_fieldinfo(
            in_line_fc, out_pt_fc)

        # フィーチャクラスの検索カーソル作成
        incur = arcpy.da.SearchCursor(in_line_fc, search_fields_name)
        # フィーチャクラスの挿入カーソル作成
        outcur = arcpy.da.InsertCursor(out_pt_fc, use_fields_name)

        for inrow in incur:

            newValue = []
            # 出力がShape ファイルの場合、NULL 値を格納できないため
            # フィールドのタイプに合わせて、空白や 0 を格納する
            if wstype == "FileSystem":
                for j, value in enumerate(inrow):
                    if value == None:
                        if search_fields_type[j] == "String":
                            newValue.append("")
                        elif search_fields_type[j] in [
                                "Double", "Integer", "Single", "SmallInteger"
                        ]:
                            newValue.append(0)
                        else:
                            newValue.append(value)
                    else:
                        newValue.append(value)
            # GDB は NULL 値を格納可能
            else:
                newValue = list(inrow)

            cutlines = []
            cutlines.insert(0, newValue[-1])

            # 入力ポイントのリストを作成
            points = [
                row for row in arcpy.da.SearchCursor(in_point_fc,
                                                     ["SHAPE@", "OID@"])
            ]

            # ラインと重なっているポイントのみを配列に入れる
            overlap_pt = []
            for pt in points:
                if newValue[-1].contains(pt[0]):
                    a = newValue[-1].queryPointAndDistance(pt[0])
                    overlap_pt.append([a, pt[-1]])
            # ラインの始点から近い順に並べかえ
            overlap_pt.sort(key=lambda x: x[0][1])

            # cutできなかったポイントのOID格納用リスト
            error_list = []
            # ラインと重なっているポイントが0個の場合
            if len(overlap_pt) == 0:
                # 入力ラインをそのまま出力
                outcur.insertRow(newValue)
            # ラインと重なっているポイントが1個の場合 cut_line メソッドへ
            elif len(overlap_pt) == 1:
                cutline1, cutline2 = cut_line(newValue[-1],
                                              overlap_pt[0][0][0])

                # カットできなかったらポイントのOIDを格納
                if cutline1.length == 0 or cutline2.length == 0:
                    error_list.append(overlap_pt[0][-1])
                else:
                    newValue[-1] = cutline1
                    outcur.insertRow(newValue)
                    newValue[-1] = cutline2
                    outcur.insertRow(newValue)

            # ラインと重なっているポイントが2個以上の場合、再帰処理の recut_line メソッドへ
            else:
                end = []
                # ラインに対して再帰的にポイントでカットする
                end = recut_line(cutlines, overlap_pt, end, error_list)
                # 最終系のラインの配列から重複しているラインを削除
                unique_end = []
                for overlap_line in end[0]:
                    if overlap_line not in unique_end:
                        unique_end.append(overlap_line)
                # 最終系のラインの配列分繰り返して newValue に値を入れる
                for out_line in unique_end:
                    newValue[-1] = out_line
                    outcur.insertRow(newValue)

        # 後始末
        del outcur
        del incur

        if len(error_list) != 0:
            for error in error_list:
                arcpy.AddMessage(u"入力ポイント:{0}の OBJECTID:{1} のポイントでの分断に失敗しました。"
                                 u"手動で分断してください。".format(
                                     os.path.basename(in_point_fc), error))

        arcpy.AddMessage(u"処理終了:")
    except AlreadyExistError:
        arcpy.AddError(u"{0}はすでに存在しています".format(out_pt_fc))
    except arcpy.ExecuteError:
        arcpy.AddError(arcpy.GetMessages(2))
    except Exception as e:
        arcpy.AddError(e.args[0])
def RunTest():
    try:
        arcpy.AddMessage("Starting Test: LocalPeaks")
        
        if arcpy.CheckExtension("Spatial") == "Available":
            arcpy.CheckOutExtension("Spatial")
        else:
            # Raise a custom exception
            raise Exception("LicenseError")        
        
        # WORKAROUND
        print "Creating New Scratch Workspace (Workaround)"    
        TestUtilities.createScratch()
            
        # Verify the expected configuration exists
        inputPolygonFC =  os.path.join(TestUtilities.inputGDB, "samplePolygonArea")
        inputSurface =  os.path.join(TestUtilities.defaultGDB, "Jbad_SRTM_USGS_EROS")
        outputPointsFC =  os.path.join(TestUtilities.outputGDB, "LocalPeaks")
        toolbox = TestUtilities.toolbox
        
        # Check For Valid Input
        objects2Check = []
        objects2Check.extend([inputPolygonFC, inputSurface, toolbox])
        for object2Check in objects2Check :
            desc = arcpy.Describe(object2Check)
            if desc == None :
                raise Exception("Bad Input")
            else :
                print "Valid Object: " + desc.Name 
        
        # Set environment settings
        print "Running from: " + str(TestUtilities.currentPath)
        print "Geodatabase path: " + str(TestUtilities.geodatabasePath)
        
        arcpy.env.overwriteOutput = True
        arcpy.env.scratchWorkspace = TestUtilities.scratchGDB
        arcpy.ImportToolbox(toolbox, "VandR")
    
        inputFeatureCount = int(arcpy.GetCount_management(inputPolygonFC).getOutput(0)) 
        print "Input FeatureClass: " + str(inputPolygonFC)
        print "Input Feature Count: " +  str(inputFeatureCount)
            
        if (inputFeatureCount < 1) :
            print "Invalid Input Feature Count: " +  str(inputFeatureCount)                    
           
        numberOfPeaks = 3
           
        ########################################################3
        # Execute the Model under test:   
        arcpy.FindLocalPeaks_VandR(inputPolygonFC, numberOfPeaks, inputSurface, outputPointsFC)
        ########################################################3
    
        # Verify the results    
        outputFeatureCount = int(arcpy.GetCount_management(outputPointsFC).getOutput(0)) 
        print "Output FeatureClass: " + str(outputPointsFC)
        print "Output Feature Count: " +  str(outputFeatureCount)
                
        if (outputPointsFC < 3) :
            print "Invalid Output Feature Count: " +  str(outputFeatureCount) 
            raise Exception("Test Failed")
            
        # WORKAROUND: delete scratch db
        print "Deleting Scratch Workspace (Workaround)"    
        TestUtilities.deleteScratch()        
        
        print "Test Successful"
                
    except arcpy.ExecuteError: 
        # Get the tool error messages 
        msgs = arcpy.GetMessages() 
        arcpy.AddError(msgs) 
    
        # return a system error code
        sys.exit(-1)
        
    except Exception as e:
        # Get the traceback object
        tb = sys.exc_info()[2]
        tbinfo = traceback.format_tb(tb)[0]
    
        # Concatenate information together concerning the error into a message string
        pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(sys.exc_info()[1])
        msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages() + "\n"
    
        # Return python error messages for use in script tool or Python Window
        arcpy.AddError(pymsg)
        arcpy.AddError(msgs)
    
        # return a system error code
        sys.exit(-1)
        
    finally:
        # Check in the 3D Analyst extension
        arcpy.CheckInExtension("Spatial")        
Ejemplo n.º 9
0
def calculatePercentAreaOfPolygonAInPolygonB(input_geodatabase, fcPolygon1,
                                             fcPolygon2):

    # Import the necessary modules
    import arcpy
    import sys
    import traceback

    # Set the current workspace
    arcpy.env.workspace = input_geodatabase

    # Define custom exception
    class gdbEmptyError(Exception):
        pass

    # Test if there are any feature classes in the specified folder or geodatabase
    try:
        fclist = []
        # Walk through the input folder or geodatabase and find all feature classes; add their names to the fclist
        checkwalk = arcpy.da.Walk(input_geodatabase, datatype="FeatureClass")
        for dirpath, dirnames, filenames in checkwalk:
            for file in filenames:
                fclist.append(str(file))

        # If fclist is empty, then there were no feature classes in the specified folder or geodatabase
        if not fclist:
            raise gdbEmptyError
        else:
            pass

    except gdbEmptyError:
        print(
            "gdbEmptyError: Input geodatabase or folder appears to contain no feature classes, or may not exist."
        )

    try:

        # Calculate geodesic area (most accurate) of fcPolygon2 in square kilometers.
        arcpy.AddGeometryAttributes_management(fcPolygon2, "AREA_GEODESIC",
                                               "KILOMETERS")

        # Find areas where both polygon feature classes overlap.
        poly_inter = "polygons_intersect"
        arcpy.Intersect_analysis([fcPolygon1, fcPolygon2], poly_inter)

        # Calculate geodesic area (most accurate) of fcPolygon2 within area of fcPolygon1 in square kilometers.
        arcpy.AddGeometryAttributes_management(poly_inter, "AREA_GEODESIC",
                                               "KILOMETERS")

        # Create a dictionary
        poly_dict = dict()
        # Search the intersect output layer with a cursor
        with arcpy.da.SearchCursor(poly_inter,
                                   ["FIPS", "AREA_GEO"]) as scursor:
            for row in scursor:
                fips = row[0]
                if fips in poly_dict.keys():
                    poly_dict[fips] += row[
                        1]  # If an identical id has already been read, add the geodesic area to the area already recorded for that id
                else:
                    poly_dict[fips] = row[
                        1]  # If the id has not already been read, create an entry for that id and record the geodesic area

        # Create a new field in fcPolygon2 to store the aggregated geodesic areas of the intersect output
        inter_area = "inter_area"
        arcpy.AddField_management(fcPolygon2, inter_area, "DOUBLE")

        # Use the update cursor to populate the new field
        with arcpy.da.UpdateCursor(fcPolygon2,
                                   ["FIPS", inter_area]) as ucursor:
            for row in ucursor:
                if row[0] in poly_dict.keys():
                    row[1] = poly_dict[row[0]]
                else:
                    row[1] = 0
                ucursor.updateRow(row)

        # Create a new field in fcPolygon2 to store the calculated percent value
        inter_pct = "inter_pct"
        arcpy.AddField_management(fcPolygon2, inter_pct, "DOUBLE")

        # Populate the percent value field by dividing the intersection output area by the fcPolygon2 area
        arcpy.CalculateField_management(fcPolygon2, inter_pct,
                                        "!inter_area!/!AREA_GEO!", "PYTHON3")

    except arcpy.ExecuteError:
        # Get the tool error messages
        msgs = arcpy.GetMessages(2)

        # Return tool error messages for use with a script tool
        arcpy.AddError(msgs)

        # Print tool error messages for use in Python
        print("Tool Error:", msgs)

    except:
        # Get the traceback object
        tb = sys.exc_info()[2]
        tbinfo = traceback.format_tb(tb)[0]

        # Put error information into a message string
        pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(
            sys.exc_info()[1])
        msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages(2) + "\n"

        # Return python error messages for use in script tool or Python window
        arcpy.AddError(pymsg)
        arcpy.AddError(msgs)

        # Print Python error messages for use in Python / Python window
        print(pymsg)
        print(msgs)
def make_workspace_copy(inputfeatures,theworkspace,dotopologycheck,dosimplify,dosimplify_method, dosimplify_tolerance, thefield):
    """This function tests the input features for the topology error 'Must Be Single Part',
    and returns the Origin Feature's Object ID of the errant features to the calling module. Beware:
    the origing feature's object ID is that of the COPY of the input features. The object ID's of the copy
    may be different from the source "inputfeautures"!!!!. This is why the function passes back the name of the COPY so that the processing can
    continue on that feature class where the topologically errant features will be correctly identified
    by the values in the export topology errors geoprocessing tool."""

##    arcpy.AddMessage("funcs.make_workspace_copy")

    #Process the
    #roads with the simplify_line tool with the point_remove option at a tolerance of 0.001 meters so that redundant vertices on staight lines are removed.
    #If the user specifies their own parameters for simplify_line, THAT ARE NOT POINT_REMOVE AND THE TOLERANCE IS > 0.001 METERS, that is done additionally,
    #afterwards:

    #this section makes the feature class datasets, feature class names, and topology name:
    badfids =set()
    fdname = "KDOT_Topology_Check" #the feature dataset name for the topology check
    fdnamepath = theworkspace + "\\"+ fdname #the complete pathname of the feature dataset
    tpname = "CheckTopology" #the name of the topology
    topology_name = fdnamepath + "\\" + tpname #the complete pathname of the topology
##    arcpy.AddMessage("make_workspace_copy, fdnamepath: "+fdnamepath)
##    arcpy.AddMessage("make_workspace_copy, topology_name: "+topology_name)
    fcname = arcpy.ParseTableName(inputfeatures, theworkspace) #Split the inputfeatures to find the name from the path.
    namelist = fcname.split(", ") #the feature class name without the path. Used in creating a copy in the feature dataset.
##    arcpy.AddMessage('fcname = '+ namelist[2])
    topology_featureclass = fdnamepath +'\\' + namelist[2] + '_check' #the copy of inputfeatures used for the topology check
    topology_featureclass_errors = namelist[2] + '_errors' # the basename used for the export topology errors tool
##    arcpy.AddMessage(topology_featureclass)
    topology_featureclass_errors_line = fdnamepath +'\\' + namelist[2] + '_errors_line' #the output name of LINE errors from the export topology errors tool

    #Delete if the feature dataset currently exists:
    doesexistfd = arcpy.Exists(fdnamepath)
    try:
       if doesexistfd:
           arcpy.AddMessage('Previous topology check feature dataset exists. Now deleteing ')
           arcpy.Delete_management(fdnamepath)
    except arcpy.ExecuteError:
        print arcpy.GetMessages(2)
    except Exception as e:
        print e.args[0]

    #Re-create the topology feature dataset:
    arcpy.AddMessage('Generating the topology check scratch feature dataset')
    arcpy.CreateFeatureDataset_management(theworkspace, fdname, inputfeatures)

    #Make a copy of the input roads in the feature dataset that contains the topology:
    try:
        arcpy.AddMessage('Generating a copy of the input feature class in the scratch feature dataset')
        #This replaces the function "arcpy.CopyFeatures_management" so that we can retain the original FID:
##        make_copies_of_features(inputfeatures,  topology_featureclass, "Original_OID")
        make_copies_of_features(inputfeatures,  topology_featureclass, thefield)
##        arcpy.CopyFeatures_management(inputfeatures, topology_featureclass)
    except arcpy.ExecuteError:
        print arcpy.GetMessages(2)
    except Exception as e:
        print e.args[0]

    #Perform the topology check, if checked ON in input parameters:
##    if(VERBOSE): arcpy.AddMessage('make_workspace_copy, dotopology = ' + str(dotopologycheck))
##    if(dotopologycheck == True):
    if(str(dotopologycheck) == 'true'):
        arcpy.AddMessage('Creating the topology')
        arcpy.CreateTopology_management(fdnamepath, tpname)

        #Add the input roads to the topology
        arcpy.AddMessage('Adding the copy of the input features to the topology')
        arcpy.AddFeatureClassToTopology_management(topology_name, topology_featureclass, 1, 1)
        #Add a rule:
        arcpy.AddMessage('Adding rule "Must Be Single Part" to the topology')
        arcpy.AddRuleToTopology_management(topology_name,"Must Be Single Part (Line)", topology_featureclass)
        #Validate the topology:
        arcpy.AddMessage('Validating the topology')
        arcpy.ValidateTopology_management(topology_name)
        #Export the errant features to a feature class
        arcpy.AddMessage('Exporting the topologically-errant feature to feature class ' + topology_featureclass_errors)
        arcpy.ExportTopologyErrors_management(topology_name,fdnamepath,topology_featureclass_errors)
        arcpy.AddMessage("Completed exporting topology errors")

        #Extract the values from field "OriginObjectID". This is a field generated to identify the OID's of errant features. This is the ORIGINAL OID of the input features. Do NOT confuse this with "OriginalOID":
##        arcpy.AddMessage('Retrieving the object ID''s of the errant features')
        with arcpy.da.SearchCursor(topology_featureclass_errors_line,["OriginObjectID"]) as cursor:
            for row in cursor:
##                arcpy.AddMessage(str(row[0]))
                badfids.add(row[0])

    #Perform at the least, the default line simplification of 0.001 meters or 0.00328084 feet
    #SimplifyLine(mergedFeatures, simplifiedFeatures, dosimplify_method, dosimplify_tolerance, "RESOLVE_ERRORS", "KEEP_COLLAPSED_POINTS", "CHECK")
    simplified_featureclass = fdnamepath +'\\_simplified_roads'
    arcpy.SimplifyLine_cartography(topology_featureclass, simplified_featureclass, dosimplify_method, dosimplify_tolerance, False, False, False )

##    index_OID_Original, fieldlist =  ListFieldData(simplified_featureclass, thefield)
##    if(VERBOSE): arcpy.AddMessage("List of indecies and field names after copy, with original OID, and simplification of line:")
##    if(VERBOSE): arcpy.AddMessage("index for original OID field: "+thefield+" is "+str(index_OID_Original))

    arcpy.AddMessage('completed creating a workspace copy....')
##    arcpy.AddMessage('completed funcs.make_workspace_copy')
    return badfids, simplified_featureclass
Ejemplo n.º 11
0
    def get_usable_area(self, csi_raster=str(), apply_wua=False):
        # wua_threshold =  FLOAT -- value between 0.0 and 1.0
        self.set_env()
        arcpy.env.workspace = self.cache

        try:
            ras_csi = arcpy.Raster(csi_raster + ".tif")
        except:
            ras_csi = arcpy.Raster(csi_raster)

        self.logger.info(
            "   * snapping to ProjectArea.shp within CHSI raster (%s)..." %
            csi_raster)
        ras4shp = Con(~IsNull(self.ras_project), Con(~IsNull(ras_csi), Int(1)))
        if apply_wua:
            ras4wua = Con(~IsNull(self.ras_project),
                          Con(~IsNull(ras_csi), Float(ras_csi)))

        self.logger.info(
            "   * converting snapped CHSI raster to Polygon shapefile:")
        try:
            shp_name = self.cache + "aua%s.shp" % str(self.cache_count)
            self.logger.info("     " + shp_name)
            arcpy.RasterToPolygon_conversion(ras4shp, shp_name, "NO_SIMPLIFY")
        except arcpy.ExecuteError:
            self.logger.info(
                "ExecuteERROR: (arcpy) in RasterToPolygon_conversion.")
            self.logger.info(arcpy.GetMessages(2))
            arcpy.AddError(arcpy.GetMessages(2))
            return -1
        except Exception as e:
            self.logger.info(
                "ExceptionERROR: (arcpy) in RasterToPolygon_conversion.")
            self.logger.info(e.args[0])
            arcpy.AddError(e.args[0])
            return -1
        except:
            self.logger.info("ERROR: Shapefile conversion failed.")
            return -1

        self.logger.info("   * calculating usable habitat area ... ")
        try:
            arcpy.AddField_management(shp_name, "F_AREA", "FLOAT", 9)
        except:
            pass
        try:
            arcpy.CalculateGeometryAttributes_management(
                shp_name,
                geometry_property=[["F_AREA", "AREA"]],
                area_unit=self.area_unit)
            self.logger.info("   * summing up area ...")
            area = 0.0
            if apply_wua:
                mean_csi = float(
                    arcpy.GetRasterProperties_management(
                        ras4wua, property_type="MEAN")[0])
                self.logger.info("       * weighing area with cHSI = %s ..." %
                                 str(mean_csi))
            else:
                mean_csi = 1.0
            with arcpy.da.UpdateCursor(shp_name, "F_AREA") as cursor:
                for row in cursor:
                    try:
                        area += float(row[0]) * mean_csi
                    except:
                        self.logger.info("       WARNING: Bad value (" +
                                         str(row) + ")")
        except arcpy.ExecuteError:
            self.logger.info(
                "ExecuteERROR: (arcpy) in CalculateGeometryAttributes_management."
            )
            self.logger.info(arcpy.GetMessages(2))
            arcpy.AddError(arcpy.GetMessages(2))
            return -1
        except Exception as e:
            self.logger.info(
                "ExceptionERROR: (arcpy) in CalculateGeometryAttributes_management."
            )
            self.logger.info(e.args[0])
            arcpy.AddError(e.args[0])
            return -1
        except:
            self.logger.info("ERROR: Area calculation failed.")
            return -1

        self.cache_count += 1
        self.result = area * self.ft2ac
        self.logger.info("   * result: " + str(area * self.ft2ac) + " " +
                         self.unit_str)
Ejemplo n.º 12
0
def geoprocess(inFilepathname, outPath, outFilename):
    # Overwrite pre-existing files
    arcpy.env.overwriteOutput = True

    SNODAS_errorMessage = ""
    inFilepath, inFilename = os.path.split(inFilepathname)

    # define file names of temp layers
    unsignedRaster = inFilepath + "/unsigned"
    projectedName = "albers"
    projectedRaster = inFilepath + "/" + projectedName

    #prepare the header file
    hdrFilepathname = inFilepathname[:-3] + "Hdr"

    o = open(hdrFilepathname, 'w')
    o.write("byteorder M\n")
    o.write("layout bil\n")
    o.write("nbands 1\n")
    o.write("nbits 16\n")
    o.write("ncols 6935\n")
    o.write("nrows 3351\n")
    o.write("ulxmap -124.729583333331703\n")
    o.write("ulymap 52.871249516804028\n")
    o.write("xdim 0.00833333333\n")
    o.write("ydim 0.00833333333\n")
    o.close()

    # CONVERT .BIL FILE FROM SIGNED TO UNSIGNED INTEGER
    print("CONVERT .BIL FILE FROM SIGNED TO UNSIGNED INTEGER")
    try:
        # Check out the ArcGIS Spatial Analyst extension license
        arcpy.CheckOutExtension("Spatial")

        # Execute Con to convert raster data from signed integer to unsigned integer
        outCon = Con(
            Raster(inFilepathname) >= 32768,
            Raster(inFilepathname) - 65536, Raster(inFilepathname))
        # replace negative values with 0"
        outCon2 = Con(outCon < 0, 0, outCon)

        # Save the output
        outCon2.save(unsignedRaster)

    except arcpy.ExecuteError:
        # Get the tool error messages
        SNODAS_errorMessage = SNODAS_errorMessage + arcpy.GetMessages() + ";"
        print SNODAS_errorMessage

    except:
        # Get the traceback object
        tb = sys.exc_info()[2]
        tbinfo = traceback.format_tb(tb)[0]
        # Concatenate information together concerning the error into a message string
        pymsg = tbinfo + ";" + str(sys.exc_info()[1])
        SNODAS_errorMessage = SNODAS_errorMessage + "Failed at CONVERT SIGNED TO UNSIGNED;"
        print pymsg

    # DEFINE PROJECTION FOR CONVERTED FILE
    print("DEFINE PROJECTION FOR CONVERTED FILE")
    try:
        #coordinateSystem = "C:\Program Files (x86)\ArcGIS\Desktop10.0\Coordinate Systems\Geographic Coordinate Systems\World\WGS 1984.prj"
        coordinateSystem = "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984' \
        ,6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]]"

        arcpy.DefineProjection_management(unsignedRaster, coordinateSystem)

    except:
        # Get the tool error messages
        SNODAS_errorMessage = SNODAS_errorMessage + "Failed at DEFINE PROJECTION;"
        print arcpy.GetMessages()

    # REPROJECT CONVERTED FILE FROM WGS84 TO ALBERS EQUAL AREA CONIC USGS
    print(
        "REPROJECT CONVERTED FILE FROM WGS84 TO ALBERS EQUAL AREA CONIC USGS")
    try:
        #arcpy.ProjectRaster_management(inProj, outProj, "USA Contiguous Albers Equal Area Conic USGS.prj" \
        #,"BILINEAR", "#","WGS_1984_(ITRF00)_To_NAD_1983", "#", "#")
        arcpy.ProjectRaster_management(unsignedRaster, projectedRaster,"PROJCS['USA_Contiguous_Albers_Equal_Area_Conic_USGS_version' \
        ,GEOGCS['GCS_North_American_1983',DATUM['D_North_American_1983',SPHEROID['GRS_1980',6378137.0,298.257222101]] \
        ,PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Albers'],PARAMETER['False_Easting',0.0] \
        ,PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',-96.0],PARAMETER['Standard_Parallel_1',29.5] \
        ,PARAMETER['Standard_Parallel_2',45.5],PARAMETER['Latitude_Of_Origin',23.0],UNIT['Meter',1.0]]"                                                                                                       , "BILINEAR" \
        ,"#", "WGS_1984_(ITRF00)_To_NAD_1983","#", "#")
        arcpy.Delete_management(unsignedRaster)

    except:
        # Get the tool error messages
        SNODAS_errorMessage = SNODAS_errorMessage + "Failed at REPROJECTION;"
        print arcpy.GetMessages()

    # CONVERT GRID FILE TO IMAGINE .IMG FORAMT
    print "CONVERT REPROJECTED FILE TO IMAGINE .IMG FORAMT"
    try:
        arcpy.RasterToOtherFormat_conversion(projectedRaster, outPath,
                                             "IMAGINE Image")
        arcpy.Delete_management(projectedRaster)
        arcpy.Rename_management(outPath + projectedName + ".img",
                                outPath + outFilename)

    except:
        # Get the tool error messages
        SNODAS_errorMessage = SNODAS_errorMessage + "Failed at CONVERT TO .IMG FORMAT;"
        print arcpy.GetMessages()
        pass

    if SNODAS_errorMessage == "":
        SNODAS_errorMessage = "OK"

    writelog(SNODAS_LOGFILE, outFilename, SNODAS_errorMessage)
    print "File conversion stopped. Status: " + SNODAS_errorMessage
Ejemplo n.º 13
0
        level=logging.DEBUG,
        format='%(asctime)s %(filename)s %(levelname)-8s %(message)s',
        datefmt='%d %b %Y %H:%M:%S',
        filename=LOG_FILENAME)

    logging.info("Script initiating...")
    f = open(MOSAICS_FILENAME, "r")
    mosaics = []
    for x in f.readlines():
        line = x.strip().split(";")
        mosaics.append(line)

    for mosaic in mosaics:
        source_folder = mosaic[0]
        database_name = mosaic[1]
        country_code = database_name[:2]  # IT_2016.gdb --> IT
        database_path = os.path.join(ENV_PATH, country_code, database_name)
        mosaic_name = mosaic[2]
        update_mosaic(database_path, mosaic_name, source_folder)

    f.close()
    logging.info("Script finished.")

except arcpy.ExecuteError:
    logging.debug("Script did not complete.")
    # log errors
    logging.error(arcpy.GetMessages(2))

except:
    logging.info(arcpy.GetMessages())
Ejemplo n.º 14
0
                            stats[4]) + "\n" + "R^2: " + str(stats[5]) + "\n\n"
        iterations += 1

    # Generate statistics and provide it to the user
    outlierLog(msg)

    stats = calculateStatistics(tuplesCleaned, differenceCleaned)
    showStats("Cleaned Data", stats, "CleanedStats")

    percent = (1 - (len(tuplesCleaned) / float(len(tuples)))) * 100
    arcpy.AddMessage(str(percent) + "% of rows were removed.")

    arcpy.AddMessage("Initial: " + str(len(tuples)))
    arcpy.AddMessage("Cleaned: " + str(len(tuplesCleaned)))
    # Render scatterplot for cleaned data
    for t in tuplesCleaned:
        orthoListCleaned.append(t[0])
        rasterListCleaned.append(t[1])

    renderScatterplot(orthoListCleaned, rasterListCleaned, "scatterCleaned")

    # Write calculated RMSE and AVG into a text file
    text_file = open("C:/LiDAR1_Plugin_1.4/rmse/rmse.txt", "w")
    text_file.write(str(stats[0]) + "\n" + str(stats[1]))
    text_file.close()

except:

    arcpy.AddError("Fail")
    arcpy.AddMessage(arcpy.GetMessages())
Ejemplo n.º 15
0
    if AreaCalcUnits.lower() == "hectares":
        print >>f,"Suitability Class, Pixel Count, Total Area (hectares)"
    else:
        print >>f,"Suitability Class, Pixel Count, Total Area (acres)"
    for row in searchCurs:  #iterate and load all the rows for processing
        AreaInSqM = int(getcellsizeX) * int(getcellsizeY) * int(row.getValue('Count')) 
        if AreaCalcUnits.lower() == 'hectares':
            SuitabilityCatsArea =  AreaInSqM * 0.000247105 #area in hectares
        else:
            SuitabilityCatsArea = AreaInSqM * 0.0001 #area in acres
        SuitabilityCatsArea = round(SuitabilityCatsArea,3)  
        arcpy.AddMessage('Stuibility Class {classnum} - Area = {area:.3f} {units}'.format(classnum=row.getValue('Value'),area=SuitabilityCatsArea,units=AreaCalcUnits))
        #f.write(str(row.getValue('Value')) + "," + str(int(row.getValue('Count'))) + "," + str(SuitabilityCatsArea) + "\n")
        print >>f,str(row.getValue('Value')) + "," + str(int(row.getValue('Count'))) + "," + str(SuitabilityCatsArea)
    row = searchCurs.next() #move to next search curser row
    f.close()

except LicenseError:
    arcpy.AddError("Spatial Analysis extension is unavailable")
except WeightCheckError:
    arcpy.AddError("The Weight Values need to be within 0 to 1 AND also add up to 1")
except UnitsError:
    arcpy.AddError("The Area Units need to be in hectares or acres")

except Exception as e:
    arcpy.AddError(traceback.format_exc()) #error unsuppression for debugging
    arcpy.AddError(e) # Adds errors to ArcGIS custom tool
    print "Error: " + str(e) # Prints Python-related errors
    print arcpy.GetMessages() # Prints ArcPy-related errors

Ejemplo n.º 16
0
                print("{} joined".format(str(p)))
                p.join()

            while len(multiprocessing.active_children()) > 0:
                nProc = len(multiprocessing.active_children())
                sMsg = "Current active processes={}, {}".format(nProc, datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
                arcpy.AddMessage(sMsg) 
                for actProcess in multiprocessing.active_children():
                    dt = (time.clock() - ds)
                    dt = round(dt,2)
                    arcpy.AddMessage("  {} dt={}".format(str(actProcess), dt ))
                time.sleep(interval)    
             
         
    except arcpy.ExecuteError:
        print (str(arcpy.GetMessages(2)))
        arcpy.AddError(str(arcpy.GetMessages(2)))
    except:
        print (trace())
        arcpy.AddError(str(trace()))
        arcpy.AddError(str(arcpy.GetMessages(2)))
    finally:
        if(oProcessor!=None):
            del oProcessor
        arcpy.AddMessage("Total processing time dt={}".format(apwrutils.Utils.GetDSMsg(ddt)))
        dt = datetime.datetime.now()
        print  ('Finished at ' + dt.strftime("%Y-%m-%d %H:%M:%S"))



Ejemplo n.º 17
0
    ubb_fc = arcpy.GetParameterAsText(2)
    sgid_fc = arcpy.GetParameterAsText(3)
    data_round = arcpy.GetParameterAsText(4)
    provider_field = arcpy.GetParameterAsText(5)
    provider_name = arcpy.GetParameterAsText(6)

    try:
        #: Validate and return provider name
        provider = validate_provider_name(ubb_fc, provider_field,
                                          provider_name)
        #: Generate Identifier for new data
        generate_identifiers(new_data_fc)
        #: Archive existing features
        archive_provider(provider, provider_field, ubb_fc, archive_fc,
                         data_round)
        #: Update UBB feature class
        update_features(provider, provider_field, new_data_fc, ubb_fc)
        #: Update SGID feature class
        update_features(provider, provider_field, new_data_fc, sgid_fc)

    except arcpy.ExecuteError:
        arcpy.AddError(arcpy.GetMessages(2))
        arcpy.AddMessage('========')
        arcpy.AddMessage(traceback.format_exc())

    except:
        error_message = sys.exc_info()[1]
        arcpy.AddError(error_message)
        arcpy.AddMessage('========')
        arcpy.AddMessage(traceback.format_exc())
def mainFunction(
    downloadLink, updateMode, geodatabase, featureDataset
):  # Get parameters from ArcGIS Desktop tool by seperating by comma e.g. (var1 is 1st parameter,var2 is 2nd parameter,var3 is 3rd parameter)
    try:
        # --------------------------------------- Start of code --------------------------------------- #

        # Download the file from the link
        file = urllib2.urlopen(downloadLink)
        # Download in chunks
        fileChunk = 16 * 1024

        datasetFileName = "Data.zip"
        if ".gdb" in downloadLink:
            datasetFileName = "Data.gdb.zip"
        with open(os.path.join(arcpy.env.scratchFolder, datasetFileName),
                  'wb') as output:
            while True:
                chunk = file.read(fileChunk)
                if not chunk:
                    break
                # Write chunk to output file
                output.write(chunk)
        output.close()

        # Unzip the file to the scratch folder
        arcpy.AddMessage("Extracting zip file...")
        zip = zipfile.ZipFile(os.path.join(arcpy.env.scratchFolder,
                                           datasetFileName),
                              mode="r")
        unzipFolder = arcpy.env.scratchFolder
        if ".gdb" in downloadLink:
            unzipFolder = os.path.join(arcpy.env.scratchFolder, "Data.gdb")
        zip.extractall(unzipFolder)

        # Get the newest unzipped database from the scratch folder
        database = max(glob.iglob(arcpy.env.scratchFolder + r"\*.gdb"),
                       key=os.path.getmtime)

        # Assign the geodatabase workspace and load in the datasets to the lists
        arcpy.env.workspace = database
        featureclassList = arcpy.ListFeatureClasses()
        tableList = arcpy.ListTables()

        arcpy.AddMessage("Copying datasets...")
        # Load the feature classes into the geodatabase if at least one is in the geodatabase provided
        if (len(featureclassList) > 0):
            # Loop through the feature classes
            for eachFeatureclass in featureclassList:
                # Create a Describe object from the dataset
                describeDataset = arcpy.Describe(eachFeatureclass)
                # If feature dataset provided, add that to path
                if featureDataset:
                    outputDataset = os.path.join(
                        geodatabase + "\\" + featureDataset,
                        describeDataset.name)
                else:
                    outputDataset = os.path.join(geodatabase,
                                                 describeDataset.name)
                exportData = "true"
                # If update mode is then copy, otherwise delete and appending records
                if (updateMode == "New"):
                    # Copy feature class into geodatabase using the same dataset name
                    arcpy.CopyFeatures_management(eachFeatureclass,
                                                  outputDataset, "", "0", "0",
                                                  "0")
                else:
                    # If dataset exists in geodatabase, delete features and load in new data
                    if arcpy.Exists(outputDataset):
                        arcpy.DeleteFeatures_management(outputDataset)
                        arcpy.Append_management(
                            os.path.join(arcpy.env.workspace,
                                         eachFeatureclass), outputDataset,
                            "NO_TEST", "", "")
                    else:
                        exportData = "false"
                        # Log warning
                        arcpy.AddWarning(
                            "Warning: " + outputDataset +
                            " does not exist and won't be updated")
                        # Logging
                        if (enableLogging == "true"):
                            logger.warning(
                                outputDataset +
                                " does not exist and won't be updated")
                if (exportData.lower() == "true"):
                    datasetRecordCount = arcpy.GetCount_management(
                        outputDataset)
                    arcpy.AddMessage(
                        str(outputDataset) + " record count - " +
                        str(datasetRecordCount) + "...")
                    # Logging
                    if (enableLogging == "true"):
                        logger.info(
                            str(outputDataset) + " record count - " +
                            str(datasetRecordCount) + "...")
        if (len(tableList) > 0):
            # Loop through of the tables
            for eachTable in tableList:
                # Create a Describe object from the dataset
                describeDataset = arcpy.Describe(eachTable)
                outputDataset = os.path.join(geodatabase, describeDataset.name)
                exportData = "true"

                # If update mode is then copy, otherwise delete and appending records
                if (updateMode == "New"):
                    # Copy feature class into geodatabase using the same dataset name
                    arcpy.TableSelect_analysis(eachTable, outputDataset, "")
                else:
                    # If dataset exists in geodatabase, delete features and load in new data
                    if arcpy.Exists(os.path.join(geodatabase, eachTable)):
                        arcpy.DeleteRows_management(
                            os.path.join(geodatabase, eachTable))
                        arcpy.Append_management(
                            os.path.join(arcpy.env.workspace, eachTable),
                            outputDataset, "NO_TEST", "", "")
                    else:
                        exportData = "false"
                        # Log warning
                        arcpy.AddWarning(
                            "Warning: " + outputDataset +
                            " does not exist and won't be updated")
                        # Logging
                        if (enableLogging == "true"):
                            logger.warning(
                                outputDataset +
                                " does not exist and won't be updated")
                if (exportData.lower() == "true"):
                    datasetRecordCount = arcpy.GetCount_management(
                        outputDataset)
                    arcpy.AddMessage(
                        str(outputDataset) + " record count - " +
                        str(datasetRecordCount) + "...")
                    # Logging
                    if (enableLogging == "true"):
                        logger.info(
                            str(outputDataset) + " record count - " +
                            str(datasetRecordCount) + "...")

        # --------------------------------------- End of code --------------------------------------- #
        # If called from gp tool return the arcpy parameter
        if __name__ == '__main__':
            # Return the output if there is any
            if output:
                # If ArcGIS desktop installed
                if (arcgisDesktop == "true"):
                    arcpy.SetParameterAsText(1, output)
                # ArcGIS desktop not installed
                else:
                    return output
        # Otherwise return the result
        else:
            # Return the output if there is any
            if output:
                return output
        # Logging
        if (enableLogging == "true"):
            # Log end of process
            logger.info("Process ended.")
            # Remove file handler and close log file
            logMessage.flush()
            logMessage.close()
            logger.handlers = []
    # If arcpy error
    except arcpy.ExecuteError:
        # Build and show the error message
        errorMessage = arcpy.GetMessages(2)
        printMessage(errorMessage, "error")
        # Logging
        if (enableLogging == "true"):
            # Log error
            logger.error(errorMessage)
            # Log end of process
            logger.info("Process ended.")
            # Remove file handler and close log file
            logMessage.flush()
            logMessage.close()
            logger.handlers = []
        if (sendErrorEmail == "true"):
            # Send email
            sendEmail(errorMessage)
    # If python error
    except Exception as e:
        errorMessage = ""
        # Build and show the error message
        # If many arguments
        if (e.args):
            for i in range(len(e.args)):
                if (i == 0):
                    # Python version check
                    if sys.version_info[0] >= 3:
                        # Python 3.x
                        errorMessage = str(
                            e.args[i]).encode('utf-8').decode('utf-8')
                    else:
                        # Python 2.x
                        errorMessage = unicode(e.args[i]).encode('utf-8')
                else:
                    # Python version check
                    if sys.version_info[0] >= 3:
                        # Python 3.x
                        errorMessage = errorMessage + " " + str(
                            e.args[i]).encode('utf-8').decode('utf-8')
                    else:
                        # Python 2.x
                        errorMessage = errorMessage + " " + unicode(
                            e.args[i]).encode('utf-8')
        # Else just one argument
        else:
            errorMessage = e
        printMessage(errorMessage, "error")
        # Logging
        if (enableLogging == "true"):
            # Log error
            logger.error(errorMessage)
            # Log end of process
            logger.info("Process ended.")
            # Remove file handler and close log file
            logMessage.flush()
            logMessage.close()
            logger.handlers = []
        if (sendErrorEmail == "true"):
            # Send email
            sendEmail(errorMessage)
Ejemplo n.º 19
0
        pct_time = days / all_days

        # Send results to a CSV
        with open(out_csv, 'ab') as outFile:
            writer = csv.writer(outFile)
            #flds = ['Area', 'ptt','in_locs','all_locs','pct_locs','in_days','all_days','pct_days']
            #writer.writerow(flds) Write one result to csv
            row = (sel_layer, ptt, count_pts, count_all, pct_locs, days,
                   all_days, pct_time)
            writer.writerow(row)

    else:
        arcpy.AddMessage("No selections in area")

except arcpy.ExecuteError:
    msgs = arcpy.GetMessages(2)
    arcpy.AddError(msgs)
    print msgs
except:
    tb = sys.exc_info()[2]
    tbinfo = traceback.format_tb(tb)[0]
    # Concatenate information together concerning the error into a message string
    pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(
        sys.exc_info()[1])
    msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages(2) + "\n"
    # Return python error messages for use in script tool or Python Window
    arcpy.AddError(pymsg)
    arcpy.AddError(msgs)
    # Print Python error messages for use in Python / Python Window
    print pymsg + "\n"
    print msgs
else:
    arcpy.CreateFolder_management(
        "g:\\ATD\\ATD_GIS\\Arterial_Management\\56_Pedestrian_Hybrid_Beacon_PHB\\Data_Driven_PHB_Ranking\\DTS\\",
        "Data")
    arcpy.env.scratchWorkspace = workspace
    newDataGdb = arcpy.env.scratchGDB + "\\"

# Working data variables
streetClip = newDataGdb + "Street_Clip_COA"
streetSelect = newDataGdb + "Street_Select_ASMP"
streetSelectPhb = newDataGdb + "Street_Select_PHB"

# Make Feature Layer for TRANSPORTATION.asmp_street_network
arcpy.MakeFeatureLayer_management(sdeStreet, "sdeStreetLayer",
                                  "EX_XS_GENERAL <> 'DNE'", "", "")
print "\n", arcpy.GetMessages()

# Make Feature Layer for BOUNDARIES.jurisdictions
arcpy.MakeFeatureLayer_management(
    sdeJuris, "sdeJurisLayer",
    "CITY_NAME = 'CITY OF AUSTIN' AND JURISDICTION_LABEL = 'AUSTIN FULL PURPOSE'",
    "", "")
print "\n", arcpy.GetMessages()

# Clip asmp_street_network to jurisdictions
arcpy.Clip_analysis("sdeStreetLayer", "sdeJurisLayer",
                    newDataGdb + "Street_Clip_COA", "")
print "\n", arcpy.GetMessages()

# Make Feature Layer for Street_Clip_COA
arcpy.MakeFeatureLayer_management(streetClip, "streetClipLayer", "", "", "")
def do_analysis(inFeatureClass, outFeatureClass, Length, Field,
                referenceFeatureClass):
    """This function will create streets in one location based on the incoming reference centroid for the
    purpose of being used for data driven design applications in CityEngine."""
    try:
        # Delete Existing Output
        arcpy.env.overwriteOutput = True
        if arcpy.Exists(outFeatureClass):
            arc_print("Deleting existing output feature.", True)
            arcpy.Delete_management(outFeatureClass)
        # Copy/Project feature class to get outputFC
        arc_print("Making a copy of input feature class for output.", True)
        OutPut = arcpy.CopyFeatures_management(inFeatureClass)
        arc_print("Gathering feature information.", True)
        # Get feature description and spatial reference information for tool use
        desc = arcpy.Describe(OutPut)
        SpatialRef = desc.spatialReference
        shpType = desc.shapeType
        srName = SpatialRef.name
        arc_print(
            "The shape type is {0}, and the current spatial reference is: {1}".
            format(str(shpType), str(srName)), True)
        # Get mean center of feature class (for pointGeo)
        if arcpy.Exists(
                referenceFeatureClass) and referenceFeatureClass != "#":
            arc_print(
                "Calculating the mean center of the reference feature class.",
                True)
            meanCenter = arcpy.MeanCenter_stats(referenceFeatureClass)
        else:
            arc_print("Calculating the mean center of the copied feature.",
                      True)
            meanCenter = arcpy.MeanCenter_stats(inFeatureClass)

        arc_print("Getting point geometry from copied center.", True)
        pointGeo = copy.deepcopy(
            arcpy.da.SearchCursor(
                meanCenter,
                ["SHAPE@"]).next()[0])  # Only one center, so one record
        # Check if the optional Street Length/ Lot Area field is used.
        if Field and FieldExist(OutPut, Field):
            arc_print("Using size field to create output geometries.", True)
            cursorFields = ["SHAPE@", "OID@", Field]
        else:
            arc_print(
                "Using size input value to create same sized output geometries.",
                True)
            cursorFields = ["SHAPE@", "OID@"]

        with arcpy.da.UpdateCursor(OutPut, cursorFields) as cursor:
            arc_print("Replacing existing input geometry.", True)
            count = 1
            if desc.shapeType == "Polyline":
                for row in cursor:
                    # Use two try statements, one time to try to catch the error
                    count += 1
                    try:
                        print("A Line at OID: {0}.".format(str(row[1])))
                        row[0] = CreateMainStreetCEGeometry(
                            pointGeo,
                            lineLength(row, Field, Length, cursorFields))
                        cursor.updateRow(row)
                    except:
                        handleFailedStreetUpdate(
                            cursor, row, pointGeo,
                            lineLength(row, Field, Length, cursorFields))
            else:
                arc_print("Input geometry is not a polyline. Check arguments.",
                          True)
                arcpy.AddError(
                    "Input geometry is not a polyline. Check arguments.")

            arc_print(
                "Projecting data into Web Mercator Auxiliary Sphere (a CityEngine compatible projection).",
                True)
            webMercatorAux = arcpy.SpatialReference(3857)
            arcpy.Project_management(
                OutPut, outFeatureClass,
                webMercatorAux)  # No preserve shape, keeps 2 vertices
            arc_print("Cleaning up intermediates.", True)
            arcpy.Delete_management(meanCenter)
            arcpy.Delete_management(OutPut)
            del SpatialRef, desc, cursor, webMercatorAux

    except arcpy.ExecuteError:
        print(arcpy.GetMessages(2))
    except Exception as e:
        print(e.args[0])
Ejemplo n.º 22
0
    # Loop through to cursor to get the first row

    # Get the geometry of the selected point

    # Break the loop

    # Create an XYZ coord array based on the XY of the current source layer's row and the Z from the selected point

    # Add the new coordinate array to new geometry object

    # Set the feature's geometry to the new geometry object

except:

    tb = sys.exc_info()[2]
    tbinfo = traceback.format_tb(tb)[0]
    pymsg = "PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n     " + str(
        sys.exc_info()[1])

    msgs = "ARCPY ERRORS:\n" + arcpy.GetMessages(2) + "\n"

    arcpy.AddError(msgs)
    arcpy.AddError(pymsg)

    print msgs
    print pymsg

    arcpy.AddMessage(arcpy.GetMessages(1))
    print arcpy.GetMessages(1)
Ejemplo n.º 23
0
import arcpy
try:
    arcpy.env.workspace = r"C:\Users\Administrator\Desktop\GIS Programming\Training\Data"
    searchCurs = arcpy.SearchCursor("Schools.shp","\"TYPE\" = \'Primary School\'")
    for row in searchCurs:
        print row.SCHOOL
except:
    arcpy.GetMessages()
Ejemplo n.º 24
0
 def init(self, config):
     mdType = self.getXMLNodeValue(self.m_base.m_doc,
                                   'MosaicDatasetType').lower()
     isDerived = mdType == 'derived'
     Nodelist = self.m_base.m_doc.getElementsByTagName("MosaicDataset")
     if (Nodelist.length == 0):
         self.log(
             "Error: <MosaicDataset> node is not found! Invalid schema.",
             self.const_critical_text)
         return False
     try:
         for node in Nodelist[0].childNodes:
             node = node.nextSibling
             if (node is not None
                     and node.nodeType == minidom.Node.ELEMENT_NODE):
                 if (node.nodeName == 'Name'):
                     mosasicDataset = self.m_base.m_mdName
                     if (mosasicDataset == ''):
                         mosasicDataset = node.firstChild.nodeValue
                     mosasicDataset = mosasicDataset.strip()
                     self.sMdNameList[mosasicDataset] = {
                         'md': mosasicDataset
                     }
                     self.sMdNameList[mosasicDataset]['addraster'] = []
                     self.sMdNameList[mosasicDataset][
                         'type'] = self.getXMLNodeValue(
                             self.m_base.m_doc, "MosaicDatasetType")
                 elif (node.nodeName == 'dataset_id'):
                     if (mosasicDataset in self.sMdNameList.keys()):
                         idValue = node.firstChild.nodeValue
                         self.sMdNameList[mosasicDataset][
                             'Dataset_ID'] = idValue.strip()
                 elif (node.nodeName == 'AddRasters'):
                     rasterType = False
                     if (len(mosasicDataset) == 0):
                         self.log(
                             "Error: <Name> should be the first child-element in <MosaicDataset>",
                             self.const_critical_text)
                         return False
                     for node in node.childNodes:
                         if (node is not None and node.nodeType
                                 == minidom.Node.ELEMENT_NODE):
                             nodeName = node.nodeName.lower()
                             if (nodeName == 'addraster'):
                                 hshAddRasters = {}
                                 for node in node.childNodes:
                                     if (node is not None and node.nodeType
                                             == minidom.Node.ELEMENT_NODE):
                                         nodeName = node.nodeName.lower()
                                         nodeValue = ''
                                         if (node.childNodes.length > 0):
                                             nodeValue = node.firstChild.nodeValue
                                         if (nodeName == 'sources'):
                                             dataPaths = ''
                                             keyFound = False
                                             nodeName = 'data_path'  # only <DataPath> nodes can exist under <Sources>
                                             if (self.m_base.m_sources == ''
                                                 ):
                                                 for cNode in node.childNodes:
                                                     if (cNode is not None
                                                             and
                                                             cNode.nodeType
                                                             == minidom.Node
                                                             .ELEMENT_NODE):
                                                         name_ = cNode.nodeName
                                                         name_ = name_.lower(
                                                         )
                                                         if (name_ ==
                                                                 nodeName):
                                                             if (cNode.
                                                                     childNodes
                                                                     .length
                                                                     > 0):
                                                                 _file = cNode.firstChild.nodeValue.strip(
                                                                 )
                                                                 if (isDerived
                                                                     ):
                                                                     _p, _f = os.path.split(
                                                                         _file
                                                                     )
                                                                     if (_p ==
                                                                             ''
                                                                         ):
                                                                         _flist = _f.split(
                                                                             ';'
                                                                         )
                                                                         indata = ''
                                                                         for _fl in range(
                                                                                 len(
                                                                                     _flist
                                                                                 )
                                                                         ):
                                                                             indata = indata + ";" + (
                                                                                 os
                                                                                 .
                                                                                 path
                                                                                 .
                                                                                 join(
                                                                                     self
                                                                                     .
                                                                                     m_base
                                                                                     .
                                                                                     m_geoPath,
                                                                                     _flist[
                                                                                         _fl]
                                                                                 )
                                                                             )
                                                                         if indata[
                                                                                 0] == ';':
                                                                             _file = indata[
                                                                                 1:
                                                                                 len(
                                                                                     indata
                                                                                 )]
                                                                         else:
                                                                             _file = indata
                                                                 dataPaths = dataPaths + _file + ';'
                                                                 keyFound = True
                                             else:
                                                 dataPaths = self.m_base.m_sources
                                             nodeValue = dataPaths
                                         elif (nodeName == 'raster_type'):
                                             nodeName = 'art'
                                             if (nodeValue.lower().find(
                                                     '.art') >= 0):
                                                 nodeValue = self.prefixFolderPath(
                                                     nodeValue, self.m_base.
                                                     const_raster_type_path_
                                                 )
                                         hshAddRasters[nodeName] = nodeValue
                                 if (mosasicDataset
                                         in self.sMdNameList.keys()):
                                     try:
                                         self.sMdNameList[mosasicDataset][
                                             'addraster'].append(
                                                 hshAddRasters)
                                     except:
                                         Warning_ = True
                                         # print "Warning: empty value for: MosaicDataset/" + nodeName
     except Exception as inst:
         self.log("Err. Reading MosaicDataset nodes.",
                  self.const_critical_text)
         self.log(str(inst), self.const_critical_text)
         return False
     if not arcpy.Exists(self.m_base.m_workspace):
         self.log("Err. Workspace not found!:" + self.m_base.m_workspace,
                  self.const_critical_text)
         self.log(arcpy.GetMessages(), self.const_critical_text)
         return False
     return True
Ejemplo n.º 25
0
if optSortBy == '':
    optSortBy = 'CODE'

optSortOrder = ''
if optSortOrder == '':
    optSortOrder = 'ASCENDING'

##Set up connection for deleting locks if needed
egdb_conn = arcpy.ArcSDESQLExecute(adminWS)

##Add Value to domain----------------------------------
try:
    arcpy.AddCodedValueToDomain_management(inWS, inDomain, addCode, addDesc)
    print 'Domain value: ' + str(addCode) + ' - ' + str(addDesc) + " added to domain: " + str(inDomain)
except arcpy.ExecuteError:
    if arcpy.GetMessages(2)[0:12] == 'ERROR 000464':
        if DeleteSharedLocksOnDomains(inDomain) is True:
            print "Shared locks deleted, trying again"
            try:
                arcpy.AddCodedValueToDomain_management(inWS, inDomain, addCode, addDesc)
                print 'Domain value: ' + str(addCode) + ' - ' + str(addDesc) + " added to domain: " + str(inDomain)
            except:
                print 'Adding domain failed'
except Exception, e:
    print e

##Sort domain if specified-----------------------------
if sortVal is True:
    try:
        arcpy.SortCodedValueDomain_management(inWS, inDomain, optSortBy, optSortOrder)
        print 'Domain: ' + str(inDomain) + ' sorted by ' + str(optSortBy) + ' ' + str(optSortOrder)
Ejemplo n.º 26
0
 def AddRasters(self):
     self.log("Adding rasters:", self.const_general_text)
     ArcGISVersion = self.m_base.getDesktopVersion()
     enabledARTEdit = True
     if (len(ArcGISVersion) >=
             2):  # no editing of (ART) if ArcGIS version is >= 10.4
         if (ArcGISVersion[0] >= 10 and ArcGISVersion[1] >= 4):
             enabledARTEdit = False
     for sourceID in self.sMdNameList:
         MDName = self.sMdNameList[sourceID]['md']
         fullPath = os.path.join(self.m_base.m_geoPath, MDName)
         if (arcpy.Exists(fullPath) == False):
             self.log("Path doesn't exist: %s" % (fullPath),
                      self.const_critical_text)
             return False
         self.m_base.m_last_AT_ObjectID = self.getLastObjectID(
             self.m_base.m_geoPath, MDName)
         for hshAddRaster in self.sMdNameList[sourceID]['addraster']:
             try:
                 self.log(
                     "\tUsing mosaic dataset/ID:" + MDName + '/' +
                     hshAddRaster['dataset_id'], self.const_general_text)
                 rasterType = 'Raster Dataset'
                 name_toupper = MDName.upper()
                 if ('art' in hshAddRaster.keys()):
                     rasterType = hshAddRaster['art']
                     self.log(
                         "\tUsing ART for " + name_toupper + ': ' +
                         rasterType, self.const_general_text)
                     if (self.m_base.m_art_apply_changes == True
                             and enabledARTEdit):
                         art_doc = minidom.parse(rasterType)
                         if (self.m_base.updateART(
                                 art_doc, self.m_base.m_art_ws,
                                 self.m_base.m_art_ds) == True):
                             self.log(
                                 "\tUpdating ART (Workspace, RasterDataset) values with (%s, %s) respectively."
                                 % (self.m_base.m_art_ws,
                                    self.m_base.m_art_ds),
                                 self.const_general_text)
                             c = open(rasterType, "w")
                             c.write(art_doc.toxml())
                             c.close()
                 set_filter = ''
                 if ('filter' in hshAddRaster.keys()):
                     set_filter = hshAddRaster['filter']
                     if (set_filter == '*'):
                         set_filter = ''
                 set_spatial_reference = ''
                 if ('spatial_reference' in hshAddRaster.keys()):
                     set_spatial_reference = hshAddRaster[
                         'spatial_reference']
                 objID = self.getLastObjectID(self.m_base.m_geoPath, MDName)
                 self.sMdNameList[sourceID][
                     'pre_AddRasters_record_count'] = objID
                 self.sMdNameList[sourceID]['Dataset_ID'] = hshAddRaster[
                     'dataset_id']
                 self.log('Adding items..')
                 args = []
                 args.append(fullPath)
                 args.append(rasterType)
                 args.append(self.GetValue(hshAddRaster, 'data_path'))
                 args.append(
                     self.GetValue(hshAddRaster, 'update_cellsize_ranges'))
                 args.append(self.GetValue(hshAddRaster, 'update_boundary'))
                 args.append(self.GetValue(hshAddRaster,
                                           'update_overviews'))
                 args.append(
                     self.GetValue(hshAddRaster, 'maximum_pyramid_levels'))
                 args.append(
                     self.GetValue(hshAddRaster, 'maximum_cell_size'))
                 args.append(
                     self.GetValue(hshAddRaster, 'minimum_dimension'))
                 args.append(
                     self.GetValue(hshAddRaster, 'spatial_reference'))
                 args.append(set_filter)
                 args.append(self.GetValue(hshAddRaster, 'sub_folder'))
                 args.append(
                     self.GetValue(hshAddRaster, 'duplicate_items_action'))
                 args.append(self.GetValue(hshAddRaster, 'build_pyramids'))
                 args.append(
                     self.GetValue(hshAddRaster, 'calculate_statistics'))
                 args.append(self.GetValue(hshAddRaster,
                                           'build_thumbnails'))
                 args.append(
                     self.GetValue(hshAddRaster, 'operation_description'))
                 args.append(
                     self.GetValue(hshAddRaster, 'force_spatial_reference'))
                 args.append(
                     self.GetValue(hshAddRaster, 'estimate_statistics'))
                 args.append(self.GetValue(hshAddRaster, 'aux_inputs'))
                 if (self.m_base.m_art_apply_changes
                         and not enabledARTEdit):
                     args[len(args) - 1] += ';DEM {}'.format(
                         os.path.join(self.m_base.m_art_ws,
                                      self.m_base.m_art_ds))
                 AddRaster = Base.DynaInvoke(
                     'arcpy.AddRastersToMosaicDataset_management', args,
                     None, self.m_base.m_log.Message)
                 if (AddRaster.init() == False):
                     return False
                 AddRaster.invoke()
                 newObjID = self.getLastObjectID(self.m_base.m_geoPath,
                                                 MDName)
                 if (newObjID <= objID):
                     self.log(
                         'No new mosaic dataset item was added for Dataset ID (%s)'
                         % (hshAddRaster['dataset_id']))
                     continue
                 for callback_fn in self.callback_functions:
                     if (callback_fn(self.m_base.m_geoPath, sourceID,
                                     self.sMdNameList[sourceID]) == False):
                         return False
             except Exception as e:
                 self.log(str(e), self.const_warning_text)
                 self.log(arcpy.GetMessages(), self.const_warning_text)
                 Warning = True
         newObjID = self.getLastObjectID(self.m_base.m_geoPath, MDName)
         if (newObjID <= self.m_base.m_last_AT_ObjectID):
             self.log(
                 'No new mosaic dataset items added to dataset (%s). Verify the input data path/raster type is correct'
                 % (MDName), self.const_critical_text)
             self.log(arcpy.GetMessages(), self.const_critical_text)
             return False
     return True
Ejemplo n.º 27
0
 def writeGPToLog(self):
     # Purpose: Dump the latest 3 lines of the esri GeoProcessing log to a text file written by the writeToLog() function.
     # Input: None
     # Output: Write the last 3 lines of the current Geoprocessing log  to the text log file created by the writeToLog() function.
     self.writeToLog(arcpy.GetMessages())
     self.writeToLog('~~~~~~~~~~~~~~~~~~~~~~~~')
Ejemplo n.º 28
0
def temporal_aggregate_field(inFeatureClass,
                             outFeatureClass,
                             start_time,
                             end_time,
                             time_interval,
                             weight_field="#",
                             case_field="#",
                             summary_field="#",
                             bin_start=None):
    """ This tool will split a feature class into multiple kernel densities based on a datetime field and a
    a set time interval. The result will be a time enabled moasic with Footprint. """
    try:
        splitOutPath = os.path.split(outFeatureClass)
        outWorkSpace = splitOutPath[0]
        outFCTail = splitOutPath[1]
        fin_output_workspace = outWorkSpace
        if arcpy.Exists(fin_output_workspace):
            arcpy.env.workspace = fin_output_workspace
            arcpy.env.overwriteOutput = True
            arcPrint(
                "The current work space is: {0}.".format(fin_output_workspace),
                True)
            # Set up Work Space Environments
            out_workspace_path_split = os.path.split(fin_output_workspace)
            workSpaceTail = out_workspace_path_split[1]
            inFeatureClassTail = os.path.split(inFeatureClass)[1]
            ws_desc = arcpy.Describe(fin_output_workspace)
            workspace_is_geodatabase = ws_desc.dataType == "Workspace"
            arcPrint(
                "Gathering describe object information from fields and input feature class."
            )
            fc_desc = arcpy.Describe(inFeatureClass)
            summary_field_type = arcpy.Describe(weight_field).type

            try:
                arcPrint(
                    "Attempting to create Temporal Table in output workspace.")
                arcpy.CreateFeatureclass_management(splitOutPath, outFCTail,
                                                    'POINT')
                AddNewField(
                    outFeatureClass,
                    arcpy.ValidateFieldName("Unique_ID", fin_output_workspace),
                    "TEXT")
                AddNewField(
                    outFeatureClass,
                    arcpy.ValidateFieldName("Bin_Number",
                                            fin_output_workspace), "LONG")
                AddNewField(outFeatureClass,
                            arcpy.ValidateFieldName("DT_Start_Bin",
                                                    fin_output_workspace),
                            "DATE",
                            field_alias="Start Bin Datetime")
                AddNewField(outFeatureClass,
                            arcpy.ValidateFieldName("DT_End_Bin",
                                                    fin_output_workspace),
                            "DATE",
                            field_alias="End Bin Datetime")
                AddNewField(outFeatureClass,
                            arcpy.ValidateFieldName("TXT_Start_Bin",
                                                    fin_output_workspace),
                            "TEXT",
                            field_alias="Start Bin String")
                AddNewField(outFeatureClass,
                            arcpy.ValidateFieldName("TXT_End_Bin",
                                                    fin_output_workspace),
                            "TEXT",
                            field_alias="End Bin String")
                AddNewField(
                    outFeatureClass,
                    arcpy.ValidateFieldName("Extract_Query",
                                            fin_output_workspace), "TEXT")
                AddNewField(
                    outFeatureClass,
                    arcpy.ValidateFieldName("Bin_Count", fin_output_workspace),
                    "DOUBLE")
                AddNewField(
                    outFeatureClass,
                    arcpy.ValidateFieldName("Bin_Mean", fin_output_workspace),
                    "DOUBLE")
                AddNewField(
                    outFeatureClass,
                    arcpy.ValidateFieldName("Bin_Median",
                                            fin_output_workspace), "DOUBLE")
                AddNewField(
                    outFeatureClass,
                    arcpy.ValidateFieldName("Bin_Sum", fin_output_workspace),
                    "DOUBLE")
                AddNewField(
                    outFeatureClass,
                    arcpy.ValidateFieldName("Bin_StdDev",
                                            fin_output_workspace), "DOUBLE")
                AddNewField(
                    outFeatureClass,
                    arcpy.ValidateFieldName("Bin_Min", fin_output_workspace),
                    "DOUBLE")
                AddNewField(
                    outFeatureClass,
                    arcpy.ValidateFieldName("Bin_Max", fin_output_workspace),
                    "DOUBLE")
            except:
                arcpy.AddWarning(
                    "Could not create Moasic Dataset. Time enablement is not possible."
                )
                pass
            try:
                arcpy.RefreshCatalog(outWorkSpace)
            except:
                arcPrint("Could not refresh catalog.")
                pass
            # Set up Time Deltas and Parse Time String
            arcPrint("Constructing Time Delta from input time period string.",
                     True)
            arcPrint(str(time_interval))
            time_magnitude, time_unit = alphanumeric_split(str(time_interval))
            time_delta = parse_time_units_to_dt(time_magnitude, time_unit)
            arcPrint(
                "Using datetime fields to generate new feature classes in {0}."
                .format(str(workSpaceTail)))
            arcPrint("Getting start and final times in start time field {0}.".
                     format(start_time))
            start_time_min, start_time_max = get_min_max_from_field(
                inFeatureClass, start_time)
            # Establish whether to use end time field or only a start time (Single Date Field)
            if FieldExist(inFeatureClass, end_time) and end_time:
                arcPrint(
                    "Using start and end time to grab feature classes whose bins occur within an events "
                    "start or end time.")
                end_time_min, end_time_max = get_min_max_from_field(
                    inFeatureClass, end_time)
                start_time_field = start_time
                end_time_field = end_time
                start_time_range = start_time_min
                end_time_range = end_time_max
            else:
                arcPrint(
                    "Using only first datetime start field to construct time bin ranges."
                )
                start_time_field = start_time
                end_time_field = start_time
                start_time_range = start_time_min
                end_time_range = start_time_max
            if isinstance(bin_start, datetime.datetime) or isinstance(
                    bin_start, datetime.date):
                start_time_range = bin_start
                arcPrint(
                    "Bin Start Time was selected, using {0} as bin starting time period."
                    .format(str(bin_start_time)))
            time_bins = construct_time_bin_ranges(start_time_range,
                                                  end_time_range, time_delta)
            arcPrint("Constructing queries based on datetime ranges.")
            temporal_queries = construct_sql_queries_from_time_bin(
                time_bins, inFeatureClass, start_time_field, end_time_field)
            temporary_fc_name = "Temp_{1}".format(
                arcpy.ValidateTableName(inFeatureClassTail,
                                        fin_output_workspace)[0:13])
            temporary_fc_path = os.path.join(fin_output_workspace,
                                             temporary_fc_name)
            # Transition to kernel density creation
            time_counter = 0
            temporal_record_table = []
            arcPrint(
                "Generating kernel densities based on {0} queries.".format(
                    len(temporal_queries)), True)
            for query in temporal_queries:
                try:
                    time_counter += 1
                    arcPrint(
                        "Determining name and constructing query for new feature class.",
                        True)
                    # Break up general density to have pop field set to none if no actually field exists.

                    temporary_layer = arcpy.MakeFeatureLayer_management(
                        inFeatureClass, temporary_fc_name, query)
                    tempoary_dataframe = ArcGISTabletoDataFrame()
                    arcPrint(
                        "Created Mean Center {0} with query [{1}], appending to master feature class."
                        .format(temporary_fc_name, str(query)), True)
                    arcpy.MeanCenter_stats(temporary_layer, temporary_fc_path,
                                           weight_field, case_field)
                    start_date_time = time_bins[time_counter - 1][0]
                    end_date_time = time_bins[time_counter - 1][1]
                    start_bin_time_string = str(start_date_time)
                    end_bin_time_string = str(end_date_time)
                    if not workspace_is_geodatabase:
                        arcpy.AddWarning(
                            "DBF tables can only accept date fields, not datetimes."
                            " Please check string field.")
                        start_date_time = start_date_time.date()
                        end_date_time = end_date_time.date()
                    temporal_record_table.append([
                        time_counter, start_date_time, end_date_time,
                        start_bin_time_string, end_bin_time_string, query
                    ])

                except Exception as e:
                    arcPrint(
                        "The feature bin ID {0}, could not be processed. Check arguments"
                        .format(str(query)))
                    arcpy.AddWarning(str(e.args[0]))
                    pass

            # arc_print("Adding record values to Temporal Table with an insert cursor.")
            # table_fields= get_fields(outFeatureClass)
            # with arcpy.da.InsertCursor(outFeatureClass,table_fields) as cursor:
            #     for records in temporal_record_table:
            #         cursor.insertRow(records)
            #     arc_print("Finished inserting records for database.")
            #     del cursor
            # arc_print("Tool execution complete.", True)
            pass
        else:
            arcPrint(
                "The desired workspace does not exist. Tool execution terminated.",
                True)
            arcpy.AddWarning("The desired workspace does not exist.")

    except arcpy.ExecuteError:
        print(arcpy.GetMessages(2))
    except Exception as e:
        arcPrint(str(e.args[0]))
Ejemplo n.º 29
0
def RunTest():
    try:
        arcpy.AddMessage("Starting Test: CreateCIBMosaicDataset")

        toolbox = TestUtilities.toolbox
        arcpy.ImportToolbox(toolbox, "DefenseImagery")
        arcpy.env.overwriteOutput = True

        # Set environment settings
        print "Running from: " + str(TestUtilities.currentPath)
        print "Geodatabase path: " + str(TestUtilities.geodatabasePath)

        arcpy.env.overwriteOutput = True

        webMercator = arcpy.SpatialReference(
            r"WGS 1984 Web Mercator (Auxiliary Sphere)")

        inputName = "Imagery_Test"
        inputMosaicDatasetFullPath = os.path.join(TestUtilities.inputGDB,
                                                  inputName)

        if arcpy.Exists(inputMosaicDatasetFullPath):
            print "deleting: " + inputMosaicDatasetFullPath
            arcpy.Delete_management(inputMosaicDatasetFullPath)

        ########################################################
        # Execute:
        arcpy.CreateCIBMosaicDataset_DefenseImagery(TestUtilities.inputGDB,
                                                    inputName, webMercator)
        ########################################################

        #Verify Results
        inputFeatureCount = int(
            arcpy.GetCount_management(inputMosaicDatasetFullPath).getOutput(0))
        print "Input FeatureClass: " + str(inputMosaicDatasetFullPath)
        print "Input Feature Count: " + str(inputFeatureCount)

        if inputFeatureCount > 0:
            print "Mosaic Dataset has already been created and populated"

        print "Test Successful"

    except arcpy.ExecuteError:
        # Get the tool error messages
        msgs = arcpy.GetMessages()
        arcpy.AddError(msgs)

        # return a system error code
        sys.exit(-1)

    except Exception as e:
        # Get the traceback object
        tb = sys.exc_info()[2]
        tbinfo = traceback.format_tb(tb)[0]

        # Concatenate information together concerning the error into a message string
        pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(
            sys.exc_info()[1])
        msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages() + "\n"

        # Return python error messages for use in script tool or Python Window
        arcpy.AddError(pymsg)
        arcpy.AddError(msgs)

        # return a system error code
        sys.exit(-1)
Ejemplo n.º 30
0
def computeCropRotations(state, out_dir, crp_rot_files, local_first_yr,
                         local_last_yr, min_area_rot, min_perc_of_rot,
                         first_loop):
    #yrs_in_rot es 3 en este caso.
    yrs_in_rot = len(
        crp_rot_files
    )  # yrs_in_rot is the number of years for which we have crop rotation data
    print('yrs_in_rot = ', yrs_in_rot)
    #crp_rot_files es lo mismo que state_ras_files.
    # Extract the simplified raster data
    all_inp_rasters = '"'
    logger.info('Processing ' + str(yrs_in_rot) + ' rasters located in ' +
                inp_dir)

    prev_out_dir = out_dir
    if (USE_EXISTING_DATA):
        if (first_loop):
            global existing_inp_dir
            existing_inp_dir = output_dir
        else:
            if (local_first_yr > 2006):
                out_dir = existing_inp_dir + os.sep + str(state)
            else:
                out_dir = existing_inp_dir

    base_yr_data = crp_rot_files[base_yr - local_first_yr]
    #print('base_yr_data = ',base_yr_data)
    comb_rasters = out_dir + os.sep + COMB + str(state) + '_' + str(
        yrs_in_rot) + YRS
    cdl_comb = []
    forest_data = out_dir + os.sep + 'forest' + str(base_yr)
    grasslands_data = out_dir + os.sep + 'grass' + str(base_yr)
    urban_data = out_dir + os.sep + 'urb' + str(base_yr)
    wout_urban = out_dir + os.sep + BASE_YR_WOUT_URBAN + str(state)
    rot_and_frst = out_dir + os.sep + FRST + str(state) + '_' + str(
        yrs_in_rot) + YRS
    rot_frst_grass = out_dir + os.sep + ROT_FRST_GRASS + str(
        state) + '_' + str(yrs_in_rot) + YRS
    rot_frst_grass_urb = out_dir + os.sep + ROT_FRST_GRASS_URB + str(
        state) + '_' + str(yrs_in_rot) + YRS
    comb_final = base_dir + os.sep + str(state) + '_' + str(yrs_in_rot) + YRS

    if (USE_EXISTING_DATA == False or first_loop == True):
        #For all years of CDL data, extract the pixels with crops
        for i in range(yrs_in_rot):
            if (i == 0):
                all_inp_rasters = '"' + out_dir + os.sep + OUT_RAS + str(
                    state) + str(i)
            elif (i == yrs_in_rot - 1):
                all_inp_rasters = all_inp_rasters + '; ' + out_dir + os.sep + OUT_RAS + str(
                    state) + str(i) + '"'
            else:
                all_inp_rasters = all_inp_rasters + '; ' + out_dir + os.sep + OUT_RAS + str(
                    state) + str(i)
        all_inp_rasters = []
        for i in range(yrs_in_rot):
            print(i)
            all_inp_rasters.append(out_dir + os.sep + OUT_RAS + str(state) +
                                   str(i))
            #print(all_inp_rasters)
            logger.info('\tExtracting attributes for raster ' +
                        os.path.split(crp_rot_files[i])[1])
            # Simplify each of the rasters: i.e.extract the crop information
            #crop_cdl_2_str = returnLandUses(analysis_dir+os.sep+'cultivos_'+str(2012+i)+'.txt')
            crop_cdl_2_str = returnLandUses(analysis_dir + os.sep + str(CROPS))
            #print(crop_cdl_2_str)
            dir_path = os.path.dirname(crp_rot_files[i])
            file_name = os.path.basename(crp_rot_files[i])[:-11][-7:]
            #print(i, crp_rot_files[i])
            # Copy the TIF file to a GRID file to avoid a ArcGIS bug
            try:
                arcpy.CopyRaster_management(crp_rot_files[i],
                                            out_dir + os.sep + file_name,
                                            nodata_value=0)
                crp_rot_files[i] = out_dir + os.sep + file_name
                rasters_to_delete.append(out_dir + os.sep + file_name)
            except:
                logging.info('Copy raster ' + crp_rot_files[i] + ' failed')
                print('Copy raster ' + crp_rot_files[i] + ' failed')
            # Save output of extraction as ras_<state name>_<yr of rotation>
            att_extract = ExtractByAttributes(crp_rot_files[i], crop_cdl_2_str)
            att_extract.save(out_dir + os.sep + RAS + str(state) + str(i))
            rasters_to_delete.append(out_dir + os.sep + OUT_RAS + str(state) +
                                     str(i))
            rasters_to_delete.append(out_dir + os.sep + RAS + str(state) +
                                     str(i))

        # Remove the urban areas from raster for base_yr
        if remove_urban_and_wtlnd == True:
            logging.info('\tRemoving urban areas for ' +
                         os.path.split(base_yr_data)[1])
            urban_str = returnLandUses(analysis_dir + os.sep + URBAN_WETLANDS)
            out_set_null = SetNull(base_yr_data, base_yr_data, urban_str)
            out_set_null.save(wout_urban)
            rasters_to_delete.append(wout_urban)
        else:
            pass

    #For each raster replace the NoData values with cell values from raster for base_yr
        for i in range(yrs_in_rot):
            if i != ((base_yr - local_first_yr) +
                     ((yrs_in_rot - 1) - (local_last_yr - local_first_yr))):
                logger.info('\tReplacing the NoData cells for ' +
                            os.path.split(crp_rot_files[i])[1])
                try:
                    ###out_con = Con(IsNull(out_dir+os.sep+RAS+state+str(i)),base_yr_data,out_dir+os.sep+RAS+state+str(i))
                    out_con = Con(
                        IsNull(out_dir + os.sep + RAS + str(state) + str(i)),
                        299, out_dir + os.sep + RAS + str(state) + str(i))
                    out_con.save(out_dir + os.sep + OUT_RAS + str(state) +
                                 str(i))
                except:
                    logging.info(arcpy.GetMessages())
            else:
                try:
                    arcpy.CopyRaster_management(
                        out_dir + os.sep + RAS + str(state) + str(i),
                        out_dir + os.sep + OUT_RAS + str(state) + str(i),
                        nodata_value=0)
                except:
                    logging.info(arcpy.GetMessages())

        # Combine the simplified rasters
        logger.info('Merging all CDLs')
        #        out_combine = Combine([inRaster01,inRaster02,inRaster03])
        #        out_combine.save(env.workspace+os.sep+'outcombine2')
        out_combine = Combine(all_inp_rasters)
        out_combine.save(comb_rasters)
        #('comb_rasters = ', comb_rasters)
        # Find the forested Land in sbase_yr
        if (append_for_grs_urb):
            logger.info('Extracting forest land in ' + str(base_yr))
            cdl_f_str = returnLandUses(analysis_dir + os.sep +
                                       'forests_all.txt')
            att_extract = ExtractByAttributes(base_yr_data, cdl_f_str)
            att_extract.save(forest_data)
            # Find the grassland/pasture/herbaceus vegetation land in base_yr
            logger.info('Extracting grassland in ' + str(base_yr))
            cdl_g_str = returnLandUses(analysis_dir + os.sep + GRASSLANDS)
            att_extract = ExtractByAttributes(base_yr_data, cdl_g_str)
            att_extract.save(grasslands_data)
            # Find the urban/wetland land in base_yr
            logger.info('Extracting urban and wetland in ' + str(base_yr))
            cdl_u_str = returnLandUses(analysis_dir + os.sep + URBAN_WETLANDS)
            att_extract = ExtractByAttributes(base_yr_data, cdl_u_str)
            att_extract.save(urban_data)
    #################################################################################
    try:
        ras_cell_size_x = int(
            arcpy.GetRasterProperties_management(out_combine,
                                                 'CELLSIZEX').getOutput(0))
        ras_cell_size_y = int(
            arcpy.GetRasterProperties_management(out_combine,
                                                 'CELLSIZEY').getOutput(0))
        print('Tamanio de pixel (x,y) = (', ras_cell_size_x, ',',
              ras_cell_size_y, ')')
    except:
        logging.info(arcpy.GetMessages())
    area_raster = ras_cell_size_x * ras_cell_size_y * (
        m2_to_ha
    )  # Area is computed in ha (convert cell_size_x*cell_size_y into ha)
    print('El area del pixel en hectareas es de:', area_raster)
    out_dir = prev_out_dir  # Revert to the old out_dir
    cdl_comb = combineCDLs(comb_rasters, yrs_in_rot, state)

    # Sort the data based on the COUNT which is in column index yrs_in_rot (0 based counting)
    cdl_comb = sorted(cdl_comb,
                      key=operator.itemgetter(yrs_in_rot),
                      reverse=True)
    writeCDLData(cdl_comb,
                 out_dir + os.sep + 'cdl_comb_' + str(state) + '.csv')

    logger.info('Simplifying crop rotations')
    # Also create the reclassification table
    recl_1 = open(out_dir + os.sep + 'recl_1.csv', 'w')
    state_stats = open(
        output_dir + os.sep + STATS + '.csv',
        'a')  # Contains information on how many pixels each rotation occupies

    total_pixels = 0
    for i in range(len(cdl_comb)):
        total_pixels += cdl_comb[i][yrs_in_rot]
    print('total_pixels', total_pixels)
    print('El area util es de:', total_pixels * area_raster)
    ###########################################################################
    # ALGORITHM!!!
    #
    #
    ###########################################################################
    # The first max_crop_rot rotations are selected by default
    cur_num_rot = 0
    cur_perc_land_in_rot = 0.0
    perc_simplified_land = 0.0
    perc_of_nxt_rot = 100.0
    cur_sum_land_in_rot = 0.0
    cur_index = 0
    cont_loop = False

    while ((perc_of_nxt_rot > min_perc_of_rot
            and cur_perc_land_in_rot < min_area_rot)):
        cont_loop = False
        cur_num_rot += 1  # increase number of selected rotations by 1
        cur_sum_land_in_rot += float(cdl_comb[cur_index][yrs_in_rot])

        # find the % of pixels in all rotations till now
        #rate_of_increase = cur_perc_land_in_rot - prev_perc_land_in_rot # find how many pixels are in current rotation
        #prev_perc_land_in_rot = cur_perc_land_in_rot
        cur_index += 1
        try:
            perc_of_nxt_rot = float(
                cdl_comb[cur_index][yrs_in_rot]) / float(cur_sum_land_in_rot)
            #print (cur_perc_land_in_rot, perc_of_nxt_rot,float(cdl_comb[cur_index][yrs_in_rot]),float(cur_sum_land_in_rot))
        except:
            perc_of_nxt_rot = 0.0  # If we end up here, we have probably run out of rotatiops
        cur_perc_land_in_rot = cur_sum_land_in_rot / float(total_pixels)
        if (perc_of_nxt_rot < min_perc_of_rot
                and cur_perc_land_in_rot < min_area_rot):
            cont_loop = True
        if (perc_of_nxt_rot >= min_perc_of_rot
                and cur_perc_land_in_rot >= min_area_rot):
            cont_loop = True
        if (perc_of_nxt_rot >= 0.0 and cur_perc_land_in_rot >= 1.0):
            cont_loop = False

    for i in range(cur_num_rot, len(cdl_comb)):
        cur_diff = 1.0
        rot_to_match_with = 0
        for j in range(cur_num_rot):
            # Compute the levenshtein distance between crop rotation types i and j
            # TODO!!! If we have 299 in crop rotation, then it does not compare well with the CDL because
            # that does not have 299
            diff_btwn_rot = float(
                lev(cdl_comb[i][:yrs_in_rot],
                    cdl_comb[j][:yrs_in_rot])) / float(yrs_in_rot)

            # Check if the strings are permutations of each other
            if diff_btwn_rot < cur_diff:
                rot_to_match_with = j
                cur_diff = diff_btwn_rot

        cdl_comb[rot_to_match_with][yrs_in_rot] += cdl_comb[i][yrs_in_rot]
        # Set the USED bit of rotation 2 to 0
        cdl_comb[i][yrs_in_rot + 1] = 0
        # In the reclassification file, set the reclassification bit to the rotation 1
        writeReClassFile(recl_1, cdl_comb[i][yrs_in_rot + 2],
                         cdl_comb[rot_to_match_with][yrs_in_rot + 2])
        cdl_comb[i][yrs_in_rot + 2] = cdl_comb[rot_to_match_with][yrs_in_rot +
                                                                  2]
        perc_simplified_land += (
            1.0 - cur_diff) * cdl_comb[i][yrs_in_rot] / total_pixels

    float_str1 = "%.2f" % (cur_perc_land_in_rot * 100)
    float_str2 = "%.2f" % (perc_simplified_land * 100)
    float_str3 = "%.2f" % ((perc_simplified_land + cur_perc_land_in_rot) * 100)
    logger.info('% of rotations which match CDL data exactly: ' +
                str(float_str1) + '%')
    logger.info('% of rotations which have been simplified: ' +
                str(float_str2) + '%')
    logger.info('% net accuracy: ' + str(float_str3) + '%')
    stat_writer.write(
        str(state) + ', ' + str(cur_num_rot) + ', ' + str(local_first_yr) +
        ', ' + str(local_last_yr) + ', ' + str(float_str1) + ', ' +
        str(float_str3) + '\n')
    stat_writer.flush()

    cdl_comb = sorted(cdl_comb,
                      key=operator.itemgetter(yrs_in_rot + 1),
                      reverse=True)
    writeCDLData(cdl_comb, out_dir + os.sep + CDL_ROT + str(state) + '.csv')
    recl_1.close()

    sort_csv(out_dir + os.sep + 'recl_1.csv', (int, int, int), 0)

    # Reclassify
    rot_data = prev_out_dir + os.sep + ROT + str(state) + '_' + str(
        yrs_in_rot) + YRS
    num_lines = sum(1 for line in open(out_dir + os.sep + 'recl_1.csv'))
    if (num_lines > 1):
        try:
            out_reclass = ReclassByTable(comb_rasters,
                                         out_dir + os.sep + 'recl_1.csv',
                                         "FROM", "TO", "VALUE", "DATA")
            out_reclass.save(rot_data)
        except:
            logging.info(arcpy.GetMessages())
    else:
        rot_data = comb_rasters

    # read existing rotations
    prev_rot_file = csv.reader(
        open(
            base_dir + os.sep + 'salida' + os.sep + tag + os.sep +
            EXISTING_ROTATIONS, 'a+'))
    add_more_rot = []
    max_rot_id = MIN_ID
    prev_rot = {}
    if (os.path.getsize(base_dir + os.sep + 'salida' + os.sep + tag + os.sep +
                        EXISTING_ROTATIONS) > 0):
        for str_row in prev_rot_file:
            row = [int(x) for x in str_row]
            #yrs_in_prev_rot = len(row)-1
            for i in range(1, len(row) - 1):
                prev_rot[row[0]] = row[1:len(row)]
                if (row[0]) > max_rot_id:
                    max_rot_id = row[0]

    # Reclassify the existing rotaations so that they do not overlap with the otehr CDL ID's.
    recl_cur_to_higher = open(out_dir + os.sep + 'recl_cur_to_higher.csv', 'w')
    for i in range(len(cdl_comb)):
        if (cdl_comb[i][yrs_in_rot + 1] > 0):
            if (cdl_comb[i][yrs_in_rot + 2] < max_rot_id):
                max_rot_id += 1
                recl_cur_to_higher.write(
                    str(cdl_comb[i][yrs_in_rot + 2]) + ', ' +
                    str(cdl_comb[i][yrs_in_rot + 2]) + ', ' + str(max_rot_id) +
                    '\n')
                cdl_comb[i][yrs_in_rot + 2] = max_rot_id

    recl_cur_to_higher.close()
    sort_csv(out_dir + os.sep + 'recl_cur_to_higher.csv', (int, int, int), 0)

    higher_rot_data = out_dir + os.sep + HIGH + str(state) + '_' + str(
        yrs_in_rot) + YRS

    num_lines = sum(
        1 for line in open(out_dir + os.sep + 'recl_cur_to_higher.csv'))
    if (num_lines > 1):
        try:
            out_reclass = ReclassByTable(
                rot_data, out_dir + os.sep + 'recl_cur_to_higher.csv', "FROM",
                "TO", "VALUE", "DATA")
            out_reclass.save(higher_rot_data)
        except:
            logger.info(arcpy.GetMessages())
    else:
        higher_rot_data = rot_data
    # Extract the existing rotations
    cur_rot = {}
    for i in range(len(cdl_comb)):
        if (cdl_comb[i][yrs_in_rot + 1] > 0):
            #for j in range(yrs_in_rot):
            sync_vec = synchronizeRotation(cdl_comb[i][0:yrs_in_rot],
                                           yrs_in_rot,
                                           local_last_yr - local_first_yr + 1,
                                           local_first_yr, prev_first_yr)
            cur_rot[cdl_comb[i][yrs_in_rot + 2]] = sync_vec

    # Compare the newly created rotations with the existing rotations and reclassify
    # 1. Iterate through cur_rot and compare each cur_rot with the prev_rot
    # 2. If a cur_rot matches a prev_rot then put an entry in the reclass file
    # 3. If not, then add the cur_rot to the prev_rot file
    recl_cur_to_prev = open(out_dir + os.sep + 'recl_cur_to_prev.csv', 'w')
    k1 = k2 = v1 = v2 = []

    remove_cur_rot_items = []
    for k1, v1 in cur_rot.items():
        for k2, v2 in prev_rot.items():
            if (v1 == v2):
                # write in reclass file
                recl_cur_to_prev.write(
                    str(k1) + ', ' + str(k1) + ', ' + str(k2) + '\n')
                remove_cur_rot_items.append(k1)
                # Iterate through cdl_comb and replace occurences of crop rotation type k1 with k2
                for i in range(len(cdl_comb)):
                    if (cdl_comb[i][yrs_in_rot + 1] > 0
                            and (cdl_comb[i][yrs_in_rot + 2] == int(k1))):
                        cdl_comb[i][yrs_in_rot + 2] = int(k2)

    for i in range(len(remove_cur_rot_items)):
        cur_rot.pop(remove_cur_rot_items[i])

    #global MIN_ID
    for k1, v1 in cur_rot.items():
        add_cur_rot = True
        for k2, v2 in prev_rot.items():
            if (k1 == k2):
                tmp = max_rot_id + 1
                max_rot_id += 1
                lis = tmp, v1
                recl_cur_to_prev.write(
                    str(k1) + ', ' + str(k1) + ', ' + str(tmp) + '\n')

                add_more_rot.append(lis)
                add_cur_rot = False
                #MIN_ID += 300
        if add_cur_rot == True:
            lis = k1, v1
            recl_cur_to_prev.write(
                str(k1) + ', ' + str(k1) + ', ' + str(k1) + '\n')
            add_more_rot.append(lis)

    recl_cur_to_prev.close()
    sort_csv(out_dir + os.sep + 'recl_cur_to_prev.csv', (int, int, int), 0)
    append_to_prev_rot_file = open(output_dir + os.sep + EXISTING_ROTATIONS,
                                   'a+')
    for j in range(len(add_more_rot)):
        # e.g. add_more_rot = (23, [24, 61, 61, 24])
        tmp = add_more_rot[j]
        append_to_prev_rot_file.write(str(tmp[0]) + ', ')
        for i in range(len(tmp[1])):
            append_to_prev_rot_file.write(str(tmp[1][i]))
            if i < (len(tmp[1]) - 1):
                append_to_prev_rot_file.write(', ')
            else:
                append_to_prev_rot_file.write('\n')
    append_to_prev_rot_file.close()
    shutil.copyfile(output_dir + os.sep + EXISTING_ROTATIONS,
                    base_dir + os.sep + EXISTING_ROTATIONS)

    # Reclassify
    final_rot_data = out_dir + os.sep + PRODUCT + str(state) + '_' + str(
        yrs_in_rot)
    num_lines = sum(
        1 for line in open(out_dir + os.sep + 'recl_cur_to_prev.csv'))
    if (num_lines > 1):
        try:
            out_reclass = ReclassByTable(
                higher_rot_data, out_dir + os.sep + 'recl_cur_to_prev.csv',
                "FROM", "TO", "VALUE", "DATA")
            out_reclass.save(final_rot_data)
        except:
            logger.info(arcpy.GetMessages())
    else:
        final_rot_data = higher_rot_data

    # In the statename_stats.csv file print the crop rotation ID and the number of pixels it contains
    state_stats.write(str(state) + ' , Rotation, Num_Pixels, Area_ha\n')
    for i in range(len(cdl_comb)):
        if (cdl_comb[i][yrs_in_rot + 1] > 0):
            sync_vec = synchronizeRotation(cdl_comb[i][:yrs_in_rot],
                                           yrs_in_rot,
                                           local_last_yr - local_first_yr + 1,
                                           local_first_yr, prev_first_yr)
            state_stats.write(str(cdl_comb[i][yrs_in_rot+2])+', '+str(sync_vec).strip('[]')+', '+ str(cdl_comb[i][yrs_in_rot])+\
                              ', '+'\n')

    state_stats.close()

    # Append the forest data to the remaining set
    if (append_for_grs_urb and first_loop == True):
        logger.info('Appending forest,grassland and urban data')
        out_con = Con(IsNull(forest_data), final_rot_data, forest_data)
        out_con.save(rot_and_frst)
        # Append the grasslands data to the remaining set
        out_con = Con(IsNull(grasslands_data), rot_and_frst, grasslands_data)
        out_con.save(rot_frst_grass)
        # Append the urban data to the remaining set
        out_con = Con(IsNull(urban_data), rot_frst_grass, urban_data)
        out_con.save(rot_frst_grass_urb)
        out_con.save(comb_final)
        rasters_to_delete.append(forest_data)
        rasters_to_delete.append(grasslands_data)
        rasters_to_delete.append(urban_data)
        rasters_to_delete.append(rot_and_frst)
        rasters_to_delete.append(rot_frst_grass)
    else:
        try:
            arcpy.CopyRaster_management(final_rot_data,
                                        rot_frst_grass_urb,
                                        nodata_value=0)
            arcpy.CopyRaster_management(final_rot_data,
                                        comb_final,
                                        nodata_value=0)
        except:
            logging.info(arcpy.GetMessages())

    # Add temporary files to the delete files set
    rasters_to_delete.append(rot_data)
    rasters_to_delete.append(higher_rot_data)
    rasters_to_delete.append(comb_rasters)
    rasters_to_delete.append(final_rot_data)

    return rot_frst_grass_urb