def make_volume_diff_rasters(self): # Writes Raster Dataset to Output/Rasters/vol_name folder self.logger.info("") self.logger.info(" * creating volume difference Rasters ...") for rn in self.reach_ids_applied: if not (rn == "none"): reach_name = str(rn) else: reach_name = "ras" + str(rn)[0] arcpy.gp.overwriteOutput = True arcpy.env.workspace = self.cache try: extents = self.reader.get_reach_coordinates(self.reaches.dict_id_int_id[rn]) except: extents = "MAXOF" self.logger.info("ERROR: Could not retrieve reach coordinates.") if not (type(extents) == str): try: # XMin, YMin, XMax, YMax arcpy.env.extent = arcpy.Extent(extents[0], extents[1], extents[2], extents[3]) except: self.logger.info("ERROR: Failed to set reach extents -- output is corrupted.") continue else: arcpy.env.extent = extents if str(self.vol_name).__len__() > 5: ras_name = reach_name + "_" + str(self.vol_name)[0:5] else: ras_name = reach_name + "_" + str(self.vol_name) self.logger.info(" * making excavation Raster ... ") try: excav_ras = Con(Float(self.modified_raster) <= Float(self.orig_raster), Con(Float(Abs(self.orig_raster - self.modified_raster)) >= self.volume_threshold, Float(Abs(self.orig_raster - self.modified_raster)), Float(0.0)), Float(0.0)) except arcpy.ExecuteError: self.logger.info(arcpy.GetMessages(2)) arcpy.AddError(arcpy.GetMessages(2)) except Exception as e: self.logger.info(e.args[0]) arcpy.AddError(e.args[0]) except: self.logger.info("ERROR: (arcpy).") self.logger.info(arcpy.GetMessages()) try: self.rasters_for_neg_vol.update({rn: excav_ras}) self.volume_neg_dict.update({rn: -0.0}) self.rasters.append(ras_name + "exc.tif") excav_ras.save(self.output_ras_dir + ras_name + "exc.tif") except: self.logger.info("ERROR: Raster could not be saved.") self.logger.info(" * making fill Raster ... ") try: fill_ras = Con(Float(self.modified_raster) > Float(self.orig_raster), Con(Float(Abs(self.modified_raster - self.orig_raster)) >= self.volume_threshold, Float(Abs(self.modified_raster - self.orig_raster)), Float(0.0)), Float(0.0)) except arcpy.ExecuteError: self.logger.info(arcpy.GetMessages(2)) arcpy.AddError(arcpy.GetMessages(2)) except Exception as e: self.logger.info(e.args[0]) arcpy.AddError(e.args[0]) except: self.logger.info("ERROR: (arcpy).") self.logger.info(arcpy.GetMessages()) try: self.rasters_for_pos_vol.update({rn: fill_ras}) self.volume_pos_dict.update({rn: +0.0}) self.rasters.append(ras_name + "fill.tif") fill_ras.save(self.output_ras_dir + ras_name + "fill.tif") except: self.logger.info("ERROR: Raster could not be saved.")
def execute_task(in_extentDict): stco_atlas = "tile_" + str(in_extentDict[0]) procExt = in_extentDict[1] # print procExt XMin = procExt[0] YMin = procExt[1] XMax = procExt[2] YMax = procExt[3] #set environments #The brilliant thing here is that using the extents with the full dataset!!!!!! DONT EVEN NEED TO CLIP THE FULL RASTER TO THE FISHNET BECASUE arcpy.env.snapRaster = data['pre']['traj']['path'] arcpy.env.cellsize = data['pre']['traj']['path'] arcpy.env.outputCoordinateSystem = data['pre']['traj']['path'] arcpy.env.extent = arcpy.Extent(XMin, YMin, XMax, YMax) cls = 3577 rws = 13789 # outData = numpy.zeros((rows,cols), numpy.int16) outData = np.zeros((rws, cls), dtype=np.int) ### create numpy arrays for input datasets cdls, traj and traj_state cdls = { 2008: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2008', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls), 2009: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2009', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls), 2010: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2010', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls), 2011: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2011', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls), 2012: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2012', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls), 2013: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2013', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls), 2014: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2014', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls), 2015: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2015', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls), 2016: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2016', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls), 2017: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2017', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls) } arr_traj = arcpy.RasterToNumPyArray( in_raster=data['pre']['traj_rfnd']['path'], lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls) traj_state = arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\conf.gdb\\traj_state', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls) # find the location of each pixel labeled with specific arbitray value in the rows list mainlist = [] for traj in traj_list: #Return the indices of the pixels that have values of the ytc arbitrary values of the traj. indices = (arr_traj == traj[0]).nonzero() #stack indices so easier to work with stacked_indices = np.column_stack((indices[0], indices[1])) #get the x and y location of each pixel that has been selected from above for pixel_location in stacked_indices: row = pixel_location[0] col = pixel_location[1] #####define elements trajectory = traj[0] year = traj[1] state = str(traj_state[row][col]) tile = str(in_extentDict[0]) templist = [trajectory, year, state, tile, row, col] for year in data['global']['years']: templist.append(cdls[year][row][col]) mainlist.append(templist) arcpy.ClearEnvironment("extent") if len(mainlist) > 0: to_string = map(str, data['global']['years']) year_columns = ["cdl_" + to_string for to_string in to_string] df = pd.DataFrame( mainlist, columns=['traj', 'year', 'state', 'tile', 'rows', 'cols'] + year_columns) df.to_sql(stco_atlas, engine, schema='refinement_tiles_2011') addTrajArrayField('refinement_tiles_2011', stco_atlas, year_columns)
def skrivuttema(Tema): print ('AApner dokumentet') strekningsLabel = "" if Tema == "Oversikt": mxd = arcpy.mapping.MapDocument(r"O:\616937\01\08_GIS\09_Mxd\KU_Delstrekning_Parsell.mxd") df = arcpy.mapping.ListDataFrames(mxd, "Delstrekningramme")[0] for elm in arcpy.mapping.ListLayoutElements(mxd, "TEXT_ELEMENT"): if elm.name == "DSTittel": strekningsLabel = elm elif Tema == "Anbefalte linjer og eksempellinjer": mxd = arcpy.mapping.MapDocument(r"O:\616937\01\08_GIS\09_Mxd\KU_MASTER_Parsell_RodtPaaBlaatt.mxd") df = arcpy.mapping.ListDataFrames(mxd, "Verdikartramme")[0] for elm in arcpy.mapping.ListLayoutElements(mxd, "TEXT_ELEMENT"): if elm.name == "DSTittel": strekningsLabel = elm else: mxd = arcpy.mapping.MapDocument(r"O:\616937\01\08_GIS\09_Mxd\KU_MASTER_Parsell.mxd") df = arcpy.mapping.ListDataFrames(mxd, "Verdikartramme")[0] # Finner posisjoner for tegnforklaring utenfor og innenfor TegnforklaringY = 0 PlassertUtenforY = 0 for elm in arcpy.mapping.ListLayoutElements(mxd, "GRAPHIC_ELEMENT"): if (elm.name == "VerdiForklaring"): print "Funnet " + elm.name TegnforklaringY = elm.elementPositionY if (elm.name == "KulturForklaring"): print "Funnet " + elm.name PlassertUtenforY = elm.elementPositionY # Plasserer alterntiv tegnforklaring innenfor for elm in arcpy.mapping.ListLayoutElements(mxd, "GRAPHIC_ELEMENT"): if (Tema == "Kulturmiljø"): if (elm.name == "KulturForklaring"): print "Plasserer " + elm.name elm.elementPositionY = TegnforklaringY if (Tema == "Friuftsliv, by og bygdeliv") and (elm.name == "ByBygdelivForklaring"): print "Plasserer " + elm.name elm.elementPositionY = TegnforklaringY if (Tema == "Løsmasser"): if (elm.name == "LosmasserForklaring"): print "Plasserer " + elm.name elm.elementPositionY = 1.5 if (Tema == "Innspill"): if (elm.name == "Innspillforklaring"): print "Plasserer " + elm.name elm.elementPositionY = TegnforklaringY # Plasserer verdiforklaring utenfor if (elm.name == "VerdiForklaring") and ((Tema == "Kulturmiljø") or ( Tema == "Friuftsliv, by og bygdeliv") or (Tema == "Løsmasser") or (Tema == "Innspill")): print "Plasserer utenfor " + elm.name elm.elementPositionY = PlassertUtenforY PositionX = 0.00000 for elm in arcpy.mapping.ListLayoutElements(mxd, "TEXT_ELEMENT"): if elm.name == "Verdi": print elm.name PositionX = elm.elementPositionX for elm in arcpy.mapping.ListLayoutElements(mxd, "TEXT_ELEMENT"): if elm.name == "DSTittel": strekningsLabel = elm if elm.name == "Tittel": print elm.name elm.text = Tema elm.elementPositionX = PositionX # Setter en tekstlinje utenfor hvis nødvendig for elm in arcpy.mapping.ListLayoutElements(mxd, "TEXT_ELEMENT"): if elm.name == "Verdi": print elm.name PositionX = elm.elementPositionX if (Tema == "Vannmiljø sårbarhet") or (Tema == "Innspill") or (Tema == "Løsmasser") or (Tema == "Innspill"): elm.elementPositionY = PlassertUtenforY for lyr in arcpy.mapping.ListLayers(mxd, "*", df): # print lyr.name if (lyr.name == "KUfase ByOgBygdelivRegistreringer") and (Tema == "Friuftsliv, by og bygdeliv"): lyr.visible = True if (lyr.name == "N50_Arealdekke_omrade (ByBygdeliv)") and (Tema == "Friuftsliv, by og bygdeliv"): lyr.visible = True if (lyr.name == "KUfase LandskapsbildeRgistreringer") and (Tema == "Landskapsbilde"): lyr.visible = True if (lyr.name == "KUfase Naturressursregistreringer Mineralressurser") and (Tema == "Mineralressurser"): lyr.visible = True if (lyr.name == "KUfase DyrketMarkregistreringer") and (Tema == "Dyrket mark"): lyr.visible = True if (lyr.name == "KUfase Naturressursregistreringer (vann) DS") and (Tema == "Vannressurs"): lyr.visible = True if (lyr.name == "KUfase Kulturmiljoregistreringer") and (Tema == "Kulturmiljø"): lyr.visible = True if (lyr.name.find("flate SOSI-koder") > -1) and (Tema == "Løsmasser"): lyr.visible = True if (lyr.name == "TilbakemeldingerEksternt") and (Tema == "Innspill"): lyr.visible = True if (lyr.name == "TilbakemeldingerEksterntRedigert") and (Tema == "Innspill"): lyr.visible = True for bkmk in arcpy.mapping.ListBookmarks(mxd, data_frame=df): extentFlytt = bkmk.extent BreddeJustering = (extentFlytt.XMax - extentFlytt.XMin) / 4 HoydeJustering = (extentFlytt.YMax - extentFlytt.YMin) / 4 Xmax = extentFlytt.XMax - BreddeJustering Xmin = extentFlytt.XMin + BreddeJustering Ymax = extentFlytt.YMax - HoydeJustering Ymin = extentFlytt.YMin + HoydeJustering nyExtent = arcpy.Extent(Xmin,Ymin,Xmax,Ymax) df.extent = nyExtent kartnavn = "" if bkmk.name == "AG": kartnavn = "Delstrekning Arendal - Grimstad" else: kartnavn = "Delstrekning Dørdal - Tvedestrand" print "Skriver ut " + kartnavn + " " + Tema strekningsLabel.text = kartnavn arcpy.mapping.ExportToJPEG(mxd, r"O:\616937\01\08_GIS\99_Eksport\KU_verdikart\KU_" + Tema.replace("ø","o") + "_Delstrekning_" + bkmk.name + ".jpg", None, 4178, 6000, 400, world_file=False, color_mode="24-BIT_TRUE_COLOR", jpeg_quality=100) del mxd
"UTM32_clc06_c331_StraendeDuenenSandflaechen_140807", "UTM32_clc06_c332_Felsflaechen_140807", "UTM32_clc06_c333_spaerlicheVegetation_140807", "UTM32_clc06_c335_GletscherDauerschneegebiete_140807", "UTM32_clc06_c411_Suempfe_140807", "UTM32_clc06_c412_Torfmoore_140807", "UTM32_clc06_c421_Salzwiesen_140807", "UTM32_clc06_c423_Gezeitenzonenflaechen_140807", "UTM32_clc06_c511_Gewaesserlaeufe_140807", "UTM32_clc06_c512_Wasserflaechen_140807", "UTM32_clc06_c521_Lagunen_140807", "UTM32_clc06_c522_Muendungsgebiete_140807", "UTM32_clc06_c523_MeerOzean_140807" ] #Verarbeitungsausdehnung festlegen arcpy.env.extent = arcpy.Extent(xmin, ymin, xmax, ymax, "ETRS_1989_UTM_Zone_32N") #Mergen in aktueller Ausdehnung arcpy.Merge_management(rauhList, "mergetemp", "") #FeatureClass und Feature zum Ausschneiden erstellen arcpy.CreateFeatureclass_management( r"X:\05_Basisdaten\Rauhigkeiten\Corine2006_V17.gdb", "cliptemp", "POLYGON", "", "", "", sr) #Punkte zu Array array = arcpy.Array([ arcpy.Point(xmin, ymin), arcpy.Point(xmin, ymax), arcpy.Point(xmax, ymax), arcpy.Point(xmax, ymin) ])
def execute_task(args): in_extentDict, data, traj_list = args fc_count = in_extentDict[0] procExt = in_extentDict[1] # print procExt XMin = procExt[0] YMin = procExt[1] XMax = procExt[2] YMax = procExt[3] #set environments arcpy.env.snapRaster = data['pre']['traj']['path'] arcpy.env.cellsize = data['pre']['traj']['path'] arcpy.env.outputCoordinateSystem = data['pre']['traj']['path'] arcpy.env.extent = arcpy.Extent(XMin, YMin, XMax, YMax) # outData = numpy.zeros((rows,cols), numpy.int16) outData = np.zeros((rws, cls), dtype=np.uint8) ### create numpy arrays for input datasets nlcds and traj nlcds = { 1992: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\nlcd.gdb\\nlcd30_1992', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls), 2001: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\nlcd.gdb\\nlcd30_2001', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls), 2006: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\nlcd.gdb\\nlcd30_2006', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls), 2011: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\nlcd.gdb\\nlcd30_2011', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls), } arr_traj = arcpy.RasterToNumPyArray(in_raster=data['pre']['traj']['path'], lower_left_corner=arcpy.Point( XMin, YMin), nrows=rws, ncols=cls) #### find the location of each pixel labeled with specific arbitray value in the rows list #### note the traj_list is derived from the sql query above nlcd_list = [] i = 0 for row in traj_list: print 'i', i traj = row[0] ytc = row[1] print 'ytc', ytc yfc = row[2] print 'yfc', yfc #Return the indices of the pixels that have values of the ytc arbitray values of the traj. indices = (arr_traj == row[0]).nonzero() #stack the indices variable above so easier to work with stacked_indices = np.column_stack((indices[0], indices[1])) #get the x and y location of each pixel that has been selected from above for pixel_location in stacked_indices: row = pixel_location[0] col = pixel_location[1] ####depending on year of conversion, append the value of the two previous nlcds if ytc != None: if ytc < 2012: nlcd_list.append(nlcds[2001][row][col]) nlcd_list.append(nlcds[2006][row][col]) else: nlcd_list.append(nlcds[2006][row][col]) nlcd_list.append(nlcds[2011][row][col]) elif yfc != None: if yfc < 2012: nlcd_list.append(nlcds[2001][row][col]) nlcd_list.append(nlcds[2006][row][col]) else: nlcd_list.append(nlcds[2006][row][col]) nlcd_list.append(nlcds[2011][row][col]) # #get the length of nlcd list containing only the value 81 and 82 # ##81 = pasture/hay # ##82 = cultivated crop count_81 = nlcd_list.count(81) count_82 = nlcd_list.count(82) count_81_82 = count_81 + count_82 ####create masks for both ytc and yfc ###################### ##label the pixel ############################################################ # outData[row,col] = labelPixel(data, ytc, yfc, count_82, count_81_82) i = i + 1 arcpy.ClearEnvironment("extent") outname = "tile_" + str(fc_count) + '.tif' # #create outpath = os.path.join("C:/Users/Bougie/Desktop/Gibbs/data/", r"tiles", outname) # NumPyArrayToRaster (in_array, {lower_left_corner}, {x_cell_size}, {y_cell_size}, {value_to_nodata}) myRaster = arcpy.NumPyArrayToRaster(outData, lower_left_corner=arcpy.Point( XMin, YMin), x_cell_size=30, y_cell_size=30, value_to_nodata=0) ##free memory from outdata array!! outData = None myRaster.save(outpath) myRaster = None
def execute_task(args): in_extentDict, data = args ######### Execute Nibble ##################### filter_combos = { 'n4h': ["FOUR", "HALF"], 'n4m': ["FOUR", "MAJORITY"], 'n8h': ["EIGHT", "HALF"], 'n8m': ["EIGHT", "MAJORITY"] } filter_key = data['core']['filter'] rg_combos = { '4w': ["FOUR", "WITHIN"], '8w': ["EIGHT", "WITHIN"], '4c': ["FOUR", "CROSS"], '8c': ["EIGHT", "CROSS"] } rg_instance = rg_combos[data['core']['rg']] # for count in masks_list: cond = "Count < " + str( gen.getPixelCount(str(data['global']['res']), int( data['core']['mmu']))) print 'cond: ', cond fc_count = in_extentDict[0] # print fc_count procExt = in_extentDict[1] # print procExt XMin = procExt[0] YMin = procExt[1] XMax = procExt[2] YMax = procExt[3] #set environments arcpy.env.extent = arcpy.Extent(XMin, YMin, XMax, YMax) if data['core']['route'] == 'r1': raster_mtr = Reclassify(Raster(data['pre']['traj_rfnd']['path']), "Value", RemapRange(createReclassifyList(data)), "NODATA") raster_filter = MajorityFilter(raster_mtr, filter_combos[filter_key][0], filter_combos[filter_key][1]) raster_rg = RegionGroup(raster_filter, rg_instance[0], rg_instance[1], "NO_LINK") raster_mask = SetNull(raster_rg, 1, cond) raster_nbl = arcpy.sa.Nibble(raster_filter, raster_mask, "DATA_ONLY") #clear out the extent for next time arcpy.ClearEnvironment("extent") outname = "tile_" + str(fc_count) + '.tif' outpath = os.path.join("C:/Users/Bougie/Desktop/Gibbs/", r"tiles", outname) raster_nbl.save(outpath) elif data['core']['route'] == 'r2': raster_filter = MajorityFilter( Raster(data['pre']['traj_rfnd']['path']), filter_combos[filter_key][0], filter_combos[filter_key][1]) raster_mtr = Reclassify(raster_filter, "Value", RemapRange(createReclassifyList(data)), "NODATA") raster_rg = RegionGroup(raster_mtr, rg_instance[0], rg_instance[1], "NO_LINK") raster_mask = SetNull(raster_rg, raster_mtr, cond) filled_1 = Con( IsNull(raster_mask), FocalStatistics(raster_mask, NbrRectangle(3, 3, "CELL"), 'MAJORITY'), raster_mask) t1 = Con( IsNull(filled_1), FocalStatistics(filled_1, NbrRectangle(10, 10, "CELL"), 'MAJORITY'), filled_1) # t2 = Con(IsNull(filled_1),FocalStatistics(filled_1,NbrRectangle(3, 3, "CELL"),'MAJORITY'), filled_1) # final = SetNull(path_mtr, filled_2, "VALUE <> 3") # raster_mask = SetNull(raster_rg, 1, cond) # raster_nbl = arcpy.sa.Nibble(raster_mtr, raster_mask, "DATA_ONLY") #clear out the extent for next time arcpy.ClearEnvironment("extent") outname = "tile_" + str(fc_count) + '.tif' outpath = os.path.join("C:/Users/Bougie/Desktop/Gibbs/", r"tiles", outname) # raster_shrink.save(outpath) t1.save(outpath) if data['core']['route'] == 'r3': raster_filter = MajorityFilter( Raster(data['pre']['traj_rfnd']['path']), filter_combos[filter_key][0], filter_combos[filter_key][1]) raster_rg = RegionGroup(raster_filter, rg_instance[0], rg_instance[1], "NO_LINK") raster_mask = SetNull(raster_rg, 1, cond) raster_nbl = arcpy.sa.Nibble(raster_filter, raster_mask, "DATA_ONLY") raster_mtr = Reclassify(raster_nbl, "Value", RemapRange(createReclassifyList(data)), "NODATA") #clear out the extent for next time arcpy.ClearEnvironment("extent") outname = "tile_" + str(fc_count) + '.tif' outpath = os.path.join("C:/Users/Bougie/Desktop/Gibbs/", r"tiles", outname) raster_mtr.save(outpath)
def BrownianBridge(features, rasterName, dateField, groupingFields, cellSizeConstant, intervalsConstant, locationVarianceField, locationVarianceConstant, mobilityVarianceField, mobilityVarianceConstant, spatialReference): """Features can be a line or point feature. They must be projected, or a projection must be provided. They must have a time field, x field, y field. Locational variance must be provided as a field name, or as a constant value. Mobility variance can be provided as a field name, or as a constant value, if neither is provided, then a mobility variance is estimated and applied at all fixes. A group of fields can be provided to group the features. There will be one raster per group.""" try: start = datetime.datetime.now() print "Begin processing Brownian Bridge", start utils.info("Reading features...") #Get Intervals if utils.IsInt(intervalsConstant): intervals = int(intervalsConstant) else: intervals = 10 #get a collection of fixes, one for each grouping. desc = arcpy.Describe(features) shapeFieldName = desc.shapeFieldName shapeType = desc.shapeType if shapeType.lower() == 'polyline': fixSets = BuildFixesFromLines(features, shapeFieldName, dateField, groupingFields, locationVarianceField, mobilityVarianceField, spatialReference) elif shapeType.lower() == 'point': fixSets = BuildFixesFromPoints(features, shapeFieldName, dateField, groupingFields, locationVarianceField, mobilityVarianceField, spatialReference) else: raise TypeError, "Features must be points or lines." utils.info("Got Fix Sets " + str(datetime.datetime.now() - start)) start = datetime.datetime.now() for groupName,fixes in fixSets.iteritems(): #print "groupName",groupName, "len(fixes)",len(fixes) #print fixes[0] #print fixes[1] if len(fixes) < 2: if name == '': raise ValueError, "The feature class does not have two or more fix locations." else: utils.warn("The group named " + groupName + "does not have enough fix locations. Skipping") continue #Get Extents - based on selection of fixes, not feature class. xmin = xmax = fixes[0][1] ymin = ymax = fixes[0][2] prevTime = False maxTime = 0 for fix in fixes: if fix[1] < xmin: xmin = fix[1] if fix[1] > xmax: xmax = fix[1] if fix[2] < ymin: ymin = fix[2] if fix[2] > ymax: ymax = fix[2] if not prevTime: delta = fix[0] - prevTime prevTime = fix[0] if delta > maxTime: maxTime = delta extents = arcpy.Extent(xmin, ymin, xmax, ymax) #print "Got extents",xmin, ymin, xmax, ymax #Get LocationalVariance if not locationVarianceField: if utils.IsFloat(locationVarianceConstant): locationVariance = float(locationVarianceConstant) else: locationVariance = EstimateLocationVariance() for fix in fixes: fix[3] = locationVariance maxLocationVariance = locationVariance else: maxLocationVariance = 0 for fix in fixes: if fix[3] == None or fix[3] <= 0: raise ValueError, "Invalid location variance in data. Use constant or none for default." if fix[3] > maxLocationVariance: maxLocationVariance = fix[3] #print "maxLocationVariance",maxLocationVariance #Get MobilityVariance if not mobilityVarianceField: if utils.IsFloat(mobilityVarianceConstant): mobilityVariance = float(mobilityVarianceConstant) else: utils.info(" Calculating most likely mobility variance...") #FIXME, guess and scalefactor should depend on fix data, # i.e number of fixes, estimate of Vm, max velocity or such. # The method Brownian.MobilityVariance() will validate and adjust # as necessary, but using good estimates reduces computation. guess = 1000000.0 scaleFactor = 1e10 mobilityVariance = Brownian.MobilityVariance(fixes, guess, scaleFactor) utils.info(" Got mobility variance of %.2f" % mobilityVariance) for fix in fixes: fix[4] = mobilityVariance maxMobilityVariance = mobilityVariance else: maxMobilityVariance = 0 for fix in fixes: if fix[4] == None or fix[4] <= 0: raise ValueError, "Invalid mobility variance in data. Use constant or none for default." if fix[4] > maxMobilityVariance: maxMobilityVariance = fix[4] print " Got Variances: Vl", maxLocationVariance, "Vm", maxMobilityVariance, datetime.datetime.now() - start start = datetime.datetime.now() #Buffer the extents to include the variance extents = GetGridExtents(extents, maxLocationVariance, maxMobilityVariance * maxTime) #Get cellSize overhead = 0.000000429 #per gridpoint secondsPerCheck = 0.00051443 #per gridpoint (includes overhead) secondsPerCalculation = 0.000002756 #236 if utils.IsFloat(cellSizeConstant): cellSize = float(cellSizeConstant) # all parameters from ArcToolbox are text else: #by default, limit the size of the grid to what can be processed in 1 minute. totalTime = 60 #seconds cellSize = CalcCellSize(extents, len(fixes), intervals, secondsPerCalculation, totalTime) print " Got extents and cellsize", cellSize, datetime.datetime.now() - start start = datetime.datetime.now() searchArea = GetBufferForPath(extents, cellSize, fixes, maxTime) utils.info(" Got buffer " + str(datetime.datetime.now() - start)) start = datetime.datetime.now() PrintCellSizeEvaluation(cellSize, maxLocationVariance, maxMobilityVariance) PrintRunTime(extents, cellSize, searchArea, len(fixes), intervals, secondsPerCheck, secondsPerCalculation) if not ShouldUseSearchArea(extents, cellSize, searchArea, len(fixes), intervals, secondsPerCheck, secondsPerCalculation): searchArea = None utils.info(" Not using a search buffer") start = datetime.datetime.now() raster = CreateBBRaster(extents, cellSize, searchArea, fixes, intervals, spatialReference) newName = GetFixedRasterName(rasterName, groupName) if arcpy.Exists(newName): arcpy.Delete_management(newName) raster.save(newName) utils.info("Done " + str(datetime.datetime.now() - start)) except: utils.die(sys.exc_info()[1])
def execute_task(in_extentDict): fc_count = in_extentDict[0] procExt = in_extentDict[1] # print procExt XMin = procExt[0] YMin = procExt[1] XMax = procExt[2] YMax = procExt[3] #set environments arcpy.env.snapRaster = data['pre']['traj']['path'] arcpy.env.cellsize = data['pre']['traj']['path'] arcpy.env.outputCoordinateSystem = data['pre']['traj']['path'] arcpy.env.extent = arcpy.Extent(XMin, YMin, XMax, YMax) cls = 21973 rws = 13789 # outData = numpy.zeros((rows,cols), numpy.int16) outData = np.zeros((13789, 21973), dtype=np.int) ### create numpy arrays for input datasets nlcds and traj # arcpy.RasterToNumPyArray(in_raster='C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\binaries.gdb\\cdl30_b_2007_resampled', lower_left_corner = arcpy.Point(XMin,YMin), nrows = 13789, ncols = 21973), cdls = { 2007: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\binaries.gdb\\cdl30_b_2007_resampled', lower_left_corner=arcpy.Point(XMin, YMin), nrows=13789, ncols=21973, nodata_to_value=255) } arr_traj = arcpy.RasterToNumPyArray(in_raster=data['pre']['traj']['path'], lower_left_corner=arcpy.Point( XMin, YMin), nrows=13789, ncols=21973) #### find the location of each pixel labeled with specific arbitray value in the rows list #### note the traj_list is derived from the sql query above for row in traj_list: #trajectory value traj = row[0] #conversion year ytc ytc = row[1] # print 'ytc', ytc yfc = row[2] # print 'yfc', yfc #Return the indices of the pixels that have values of the ytc arbitray values of the traj. indices = (arr_traj == row[0]).nonzero() #stack the indices variable above so easier to work with stacked_indices = np.column_stack((indices[0], indices[1])) #get the x and y location of each pixel that has been selected from above for pixel_location in stacked_indices: row = pixel_location[0] col = pixel_location[1] if ytc == 2009 and cdls[2007][row][col] == 1: # print cdls[2007][row][col] outData[row, col] = data['refine']['arbitrary_crop'] elif yfc == 2009 and cdls[2007][row][col] == 0: # print cdls[2007][row][col] outData[row, col] = data['refine']['arbitrary_noncrop'] arcpy.ClearEnvironment("extent") outname = "tile_" + str(fc_count) + '.tif' # #create outpath = os.path.join("C:/Users/Bougie/Desktop/Gibbs/", r"tiles", outname) # NumPyArrayToRaster (in_array, {lower_left_corner}, {x_cell_size}, {y_cell_size}, {value_to_nodata}) myRaster = arcpy.NumPyArrayToRaster(outData, lower_left_corner=arcpy.Point( XMin, YMin), x_cell_size=30, y_cell_size=30, value_to_nodata=0) myRaster.save(outpath)
def main(workspace_location, scratch_location, dem_name, inundation_polygon_name): """ Floodwater Depth Estimation Tool (FwDET) Calculate water depth from a flood extent polygon (e.g. from remote sensing analysis) based on an underlying DEM. Program procedure: 1. Flood extent polygon to polyline 2. Polyline to Raster - DEM extent and resolution (Env) 3. Con - DEM values to Raster 4. Focal Statistics loop 5. Water depth calculation - difference between Focal Statistics output and DEM See: Cohen, S., G. R. Brakenridge, A. Kettner, B. Bates, J. Nelson, R. McDonald, Y. Huang, D. Munasinghe, and J. Zhang (2017), Estimating Floodwater Depths from Flood Inundation Maps and Topography. Journal of the American Water Resources Association (JAWRA):1-12. Created by Sagy Cohen, Surface Dynamics Modeling Lab, University of Alabama email: [email protected] web: http://sdml.ua.edu June 30, 2016 Updated by Austin Raney September 16, 2019 Copyright (C) 2017 Sagy Cohen Developer can be contacted by [email protected] and Box 870322, Tuscaloosa AL 35487 USA This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. """ # Require an ArcGIS Spatial Analyst extension arcpy.CheckOutExtension("Spatial") arcpy.env.overwriteOutput = True # Location of workspace (preferably a GeoDatabase) WS = arcpy.env.workspace = r'{}'.format(workspace_location) # Location of the Scratch GeoDatabase (optional but highly recommended) arcpy.env.scratchWorkspace = r'{}'.format(scratch_location) # name of the input DEM (within the Workspace) DEMname = dem_name # Name of the input Inundation extent polygon layer (within the Workspace) InundPolygon = inundation_polygon_name # Name of the output clipped DEM (clipped by the inundation polygon extent) ClipDEM = 'dem_clip' dem = arcpy.Raster(DEMname) cellSize = dem.meanCellHeight boundary = CalculateBoundary(dem, InundPolygon, cellSize, WS) extent = str(dem.extent.XMin) + " " + str(dem.extent.YMin) + " " + str(dem.extent.XMax) + " " + str(dem.extent.YMax) print(extent) arcpy.Clip_management(DEMname, extent, ClipDEM, InundPolygon, cellSize, 'ClippingGeometry', 'NO_MAINTAIN_EXTENT') print(arcpy.GetMessages()) arcpy.env.extent = arcpy.Extent(dem.extent.XMin,dem.extent.YMin,dem.extent.XMax,dem.extent.YMax) print('First focal ') OutRas = FocalStatistics(boundary, 'Circle 3 CELL', "MAXIMUM", "DATA") # Focal Statistics loop - Number of iteration will depend on the flood inundation extent and DEM resolution # TO CHANGE NUMBER OF FOCAL STATS ITERATIONS CHANGE VARIABLE ITER_NUM. NOTE, python's range function is an # exclusive range ITER_NUM = 50 for i in range(3, 50): print(i) negihbor = 'Circle ' + str(i) + ' CELL' OutRasTemp = FocalStatistics(boundary, negihbor, "MAXIMUM", "DATA") # Assure that only 'empty' (NoDATA) cells are assigned a value in each iteration OutRas = Con(IsNull(OutRas), OutRasTemp, OutRas) print('Focal loop done!') OutRas.save('Focafin10m') #name of output final focal statistics raster # Calculate floodwater depth waterDepth = Minus(OutRas, ClipDEM) waterDepth = Con(waterDepth < 0, 0, waterDepth) # name of output floodwater depth raster waterDepth.save('WaterDepth10m') waterDepthFilter = Filter(waterDepth, "LOW", "DATA") # name of output floodwater depth raster after low-pass filter waterDepthFilter.save('WaterDep10mf') print('Done')
def execute_task(args): in_extentDict, data, traj_list, noncroplist, croplist, cls, rws = args fc_count = in_extentDict[0] ###get the extent from the SPECIFIC tile procExt = in_extentDict[1] XMin = procExt[0] YMin = procExt[1] XMax = procExt[2] YMax = procExt[3] #set environments arcpy.env.snapRaster = data['pre']['traj']['path'] arcpy.env.cellsize = data['pre']['traj']['path'] arcpy.env.outputCoordinateSystem = data['pre']['traj']['path'] ###set the extent for this SPECIFIC tile arcpy.env.extent = arcpy.Extent(XMin, YMin, XMax, YMax) print 'rws==================================',rws print 'cls==================================',cls ##create an empty matrix that will be filled with values using the code below if conditions are met outData = np.zeros((rws, cls), dtype=np.uint16) print 'outdata', outData ### create numpy arrays for input datasets cdls and traj cdls = { 2008:arcpy.RasterToNumPyArray(in_raster='C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2008', lower_left_corner = arcpy.Point(XMin,YMin), nrows = rws, ncols = cls), 2009:arcpy.RasterToNumPyArray(in_raster='C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2009', lower_left_corner = arcpy.Point(XMin,YMin), nrows = rws, ncols = cls), 2010:arcpy.RasterToNumPyArray(in_raster='C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2010', lower_left_corner = arcpy.Point(XMin,YMin), nrows = rws, ncols = cls), 2011:arcpy.RasterToNumPyArray(in_raster='C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2011', lower_left_corner = arcpy.Point(XMin,YMin), nrows = rws, ncols = cls), 2012:arcpy.RasterToNumPyArray(in_raster='C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2012', lower_left_corner = arcpy.Point(XMin,YMin), nrows = rws, ncols = cls), 2013:arcpy.RasterToNumPyArray(in_raster='C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2013', lower_left_corner = arcpy.Point(XMin,YMin), nrows = rws, ncols = cls), 2014:arcpy.RasterToNumPyArray(in_raster='C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2014', lower_left_corner = arcpy.Point(XMin,YMin), nrows = rws, ncols = cls), 2015:arcpy.RasterToNumPyArray(in_raster='C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2015', lower_left_corner = arcpy.Point(XMin,YMin), nrows = rws, ncols = cls), 2016:arcpy.RasterToNumPyArray(in_raster='C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2016', lower_left_corner = arcpy.Point(XMin,YMin), nrows = rws, ncols = cls), 2017:arcpy.RasterToNumPyArray(in_raster='C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2017', lower_left_corner = arcpy.Point(XMin,YMin), nrows = rws, ncols = cls) } ###this is the trajectory that is referenced with the specific trajectory values we are looking for from the sql statement above arr_traj = arcpy.RasterToNumPyArray(in_raster=data['pre']['traj']['path'], lower_left_corner = arcpy.Point(XMin,YMin), nrows = rws, ncols = cls) #### find the location of each pixel labeled with specific arbitray value in the rows list #### note the traj_list is derived from the sql query above for traj in traj_list: ### traj_value = traj[0] mtr = traj[1] traj_array = traj[2] #Return the indices of the pixels that have values of the ytc arbitray values of the traj. indices = (arr_traj == traj_value).nonzero() #stack the indices variable above so easier to work with stacked_indices=np.column_stack((indices[0],indices[1])) ##### get the x and y location of each pixel that has been selected from above for pixel_location in stacked_indices: row = pixel_location[0] col = pixel_location[1] ### attach the cdl values to each binary trajectory at each pixel location entirelist = [] for year in data['global']['years']: entirelist.append(cdls[year][row][col]) ### if 61 in the entirelist AND the entirelist does not start with 61 then process the entirelist otherwise skip processing of pixel if(61 in entirelist): # print 'entirelist---', entirelist current_index=entirelist.index(61) beforelist=np.array(entirelist[:current_index]) # print 'beforelist---', beforelist afterlist=np.array(entirelist[current_index:]) # print 'afterlist---', afterlist ### remove the first for elements from traj_array traj_array_before = traj_array[:current_index] ##make sure that there are at least two elements in the beforelist(i.e 2 elements before the first 61)-----length of 2 means yfc is 2010 if(beforelist.size >= 2): #### Conditions ################################################ ## only uniques elements -1 and 0 in numpy diff list to make sure change is only in one direction (ie 1 to 0) cond1 = np.isin(np.diff(traj_array_before), [-1,0]).all() ## make sure that the first two elements in beforelist are crop ----- this works with cond1 cond2 = traj_array_before[0] == 1 and traj_array_before[1] == 1 ## make sure that afterlist contains only noncrop and 61 cond3 = np.isin(afterlist, noncroplist + [61]).all() ## make sure that the afterlist length is greater than 1 cond4 = afterlist.size > 1 if(cond1 and cond2 and cond3 and cond4): ##### label the pixel with conversion year ############################################## outData[row,col] = data['global']['years'][getIndex(traj_array_before)] arcpy.ClearEnvironment("extent") outname = "tile_" + str(fc_count) +'.tif' outpath = os.path.join("C:/Users/Bougie/Desktop/Gibbs/data/", r"tiles", outname) myRaster = arcpy.NumPyArrayToRaster(outData, lower_left_corner=arcpy.Point(XMin, YMin), x_cell_size=30, y_cell_size=30, value_to_nodata=0) ##free memory from outdata array outData = None myRaster.save(outpath) myRaster = None
def execute_task(args): # in_extentDict, data, traj_list = args in_extentDict = args st_abbrev = in_extentDict[0] print st_abbrev procExt = in_extentDict[1] print procExt XMin = procExt[0] YMin = procExt[1] XMax = procExt[2] YMax = procExt[3] #set environments arcpy.env.extent = arcpy.Extent(XMin, YMin, XMax, YMax) yo='D:\\projects\\intact_land\\intact\\refine\\archive\\mask\\misc.gdb\\fishnet_region' ####### clip ########################################################################################## rails = 'D:\\projects\\intact_land\\intact\\refine\\layers\\development\\road\\roads.gdb\\region_rails_buff25m' roads = 'D:\\projects\\intact_land\\intact\\refine\\layers\\development\\road\\roads.gdb\\region_roads_buff25m' cdl_urban = 'D:\\projects\\intact_land\\intact\\refine\\layers\\development\\urban\\urban.gdb\\region_cdl_2015_dev_5mmu' urban_500k = 'D:\\projects\\intact_land\\intact\\refine\\layers\\development\\urban\\urban.gdb\\region_urban_500k' water = 'D:\\projects\\intact_land\\intact\\refine\\layers\\water\\water.gdb\\region_tiger_water' main = 'D:\\projects\\intact_land\\intact\\main\\years\\2015.gdb\\clu_2015_noncrop_c' merged = 'D:\\projects\\intact_land\\intact\\refine\\archive\\mask\\final.gdb\\region_merged_masks_t2' output = 'D:\\projects\\intact_land\\intact\\refine\\final\\merged_{}.shp'.format(st_abbrev) # inputslist = [rails, roads, cdl_urban, urban_500k, water] # for inputs in inputslist: current_layer='layer_{}'.format(st_abbrev) # "\"FIELD\" = \'121\'" # Make a layer from the feature class arcpy.MakeFeatureLayer_management(in_features=main, out_layer=current_layer) # arcpy.SelectLayerByAttribute_management("cities_lyr", "SUBSET_SELECTION", "POPULATION > 10000") arcpy.SelectLayerByAttribute_management (in_layer_or_view=current_layer, selection_type="SUBSET_SELECTION", where_clause="oid_yo={}".format(st_abbrev)) # # Select all cities that overlap the chihuahua polygon # arcpy.SelectLayerByLocation_management("cities_lyr", "INTERSECT", "c:/data/mexico.gdb/chihuahua", "", "NEW_SELECTION") # # Within the selection (done above) further select only those cities that have a population >10,000 # arcpy.SelectLayerByAttribute_management(st_abbrev, "SUBSET_SELECTION", "POPULATION > 10000") # # Write the selected features to a new featureclass # arcpy.CopyFeatures_management("cities_lyr", "c:/data/mexico.gdb/chihuahua_10000plus") arcpy.Clip_analysis(in_features=main, clip_features=current_layer, out_feature_class=output)
def execute_task(args): in_extentDict, data, traj_list = args filter_combos = {'n4h':["FOUR", "HALF"],'n4m':["FOUR", "MAJORITY"],'n8h':["EIGHT", "HALF"],'n8m':["EIGHT", "MAJORITY"]} filter_key = data['core']['filter'] rg_combos = {'4w':["FOUR", "WITHIN"], '8w':["EIGHT", "WITHIN"], '4c':["FOUR", "CROSS"], '8c':["EIGHT", "CROSS"]} rg_instance = rg_combos[data['core']['rg']] # for count in masks_list: cond = "Count < " + str(gen.getPixelCount(str(data['global']['res']), int(data['core']['mmu']))) print 'cond: ',cond fc_count = in_extentDict[0] # print fc_count procExt = in_extentDict[1] # print procExt XMin = procExt[0] YMin = procExt[1] XMax = procExt[2] YMax = procExt[3] #set environments arcpy.env.extent = arcpy.Extent(XMin, YMin, XMax, YMax) cdl=Raster('C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2012') if data['core']['route'] == 'r1': raster_yxc = Reclassify(Raster(data['pre']['traj_rfnd']['path']), "Value", RemapRange(traj_list), "NODATA") raster_filter = MajorityFilter(raster_yxc, filter_combos[filter_key][0], filter_combos[filter_key][1]) raster_rg = RegionGroup(raster_filter, rg_instance[0], rg_instance[1],"NO_LINK") raster_mask = SetNull(raster_rg, 1, cond) raster_nbl = arcpy.sa.Nibble(raster_filter, raster_mask, "DATA_ONLY") #clear out the extent for next time arcpy.ClearEnvironment("extent") outname = "tile_" + str(fc_count) +'.tif' outpath = os.path.join("C:/Users/Bougie/Desktop/Gibbs/data/", r"tiles", outname) raster_nbl.save(outpath) elif data['core']['route'] == 'r2': ##perform a majority filter on the refined trajectory raster_filter = MajorityFilter(Raster(data['pre']['traj_rfnd']['path']), filter_combos[filter_key][0], filter_combos[filter_key][1]) ####reclassify the filtered raster to the MTR labels raster_yxc = Reclassify(raster_filter, "Value", RemapRange(traj_list), "NODATA") raster_filter=None ### perform region group on the raster_yxc to get the number of pixels for each region raster_rg = RegionGroup(raster_yxc, rg_instance[0], rg_instance[1], "NO_LINK") ### set null the regions that are less than the mmu treshold raster_mask = SetNull(raster_rg, raster_yxc, cond) raster_yxc=None raster_rg=None ### fill in the regions that were below the mmu threshold. Perform these in series starting small (taking advantage of spatial autocorelation) and expanding out to get ### past the equilibrium sink of a certain kernel size filled_1 = Con(IsNull(raster_mask),FocalStatistics(raster_mask,NbrRectangle(3, 3, "CELL"),'MAJORITY'), raster_mask) raster_mask=None filled_2 = Con(IsNull(filled_1),FocalStatistics(filled_1,NbrRectangle(5, 5, "CELL"),'MAJORITY'), filled_1) filled_1=None filled_3 = Con(IsNull(filled_2),FocalStatistics(filled_2,NbrRectangle(10, 10, "CELL"),'MAJORITY'), filled_2) filled_2=None filled_4 = Con(IsNull(filled_3),FocalStatistics(filled_3,NbrRectangle(20, 20, "CELL"),'MAJORITY'), filled_3) filled_3=None ##### trim of the patches that have expanded past the cdl map boundaries ##################### cond = "Value = 0" raster_mask = SetNull(cdl, filled_4, cond) filled_4=None ##### create a tiff file from the raster object ####################################### #clear out the extent for next time arcpy.ClearEnvironment("extent") outname = "tile_" + str(fc_count) +'.tif' outpath = os.path.join("C:/Users/Bougie/Desktop/Gibbs/data/", r"tiles", outname) raster_mask.save(outpath) raster_mask=None
try: FileSnap = 'Y:\\ECMWF_Runoff\\ERA_Interim\\Daily_GeoTIFF\\{0}\\{1}\\{0}-{1}-{2}.tif'.format( year, month, day) except: FileSnap = 'Y:\\ECMWF_Runoff\\ERA_Interim\\Daily_GeoTIFF\\{0}\\{1}\\1980-01-01.tif'.format( year, month, day) # NetCDF Raster layer fileInput = folder_Input + "\\" + year + "\\" + month + "\\" + year + "-" + month + "-" + day + ".tif" fileOutput = folder_Output + "\\" + year + "\\" + month + "\\" + year + "-" + month + "-" + day + ".tif" # Change XY Resolution to GeoTiFF # Adjust the grid cell to ERA5 #arcpy.env.cellSize = FileSnap #arcpy.env.extent = FileSnap #arcpy.env.extent = arcpy.Extent(-0.175781, -90.125, 360.074219, 90.125) arcpy.env.extent = arcpy.Extent(-0.375, -90.375, 360.375, 90.375) #arcpy.env.snapRaster = FileSnap filled_raster = arcpy.sa.Con( arcpy.sa.IsNull(arcpy.sa.Raster(fileInput)), arcpy.sa.FocalStatistics(arcpy.sa.Raster(fileInput), arcpy.sa.NbrRectangle(3, 3), 'MEAN'), arcpy.sa.Raster(fileInput)) filled_raster.save(folder_Output + "\\" + year + "\\" + month + "\\" + year + "-" + month + "-" + day + ".tif") #arcpy.Resample_management (fileInput, fileOutput, "0.25 0.25", "BILINEAR") FECHA = year + "-" + month + "-" + day print(FECHA) # Finaliza Fin = '------------TRANSFORMACION TERMINADA------------'
def execute_task(args): in_extentDict, data, nc_list = args fc_count = in_extentDict[0] procExt = in_extentDict[1] # print procExt XMin = procExt[0] YMin = procExt[1] XMax = procExt[2] YMax = procExt[3] #set environments #The brilliant thing here is that using the extents with the full dataset!!!!!! DONT EVEN NEED TO CLIP THE FULL RASTER TO THE FISHNET BECASUE arcpy.env.snapRaster = data['pre']['traj']['path'] arcpy.env.cellsize = data['pre']['traj']['path'] arcpy.env.outputCoordinateSystem = data['pre']['traj']['path'] arcpy.env.extent = arcpy.Extent(XMin, YMin, XMax, YMax) cls = 21973 rws = 13789 # outData = numpy.zeros((rows,cols), numpy.int16) outData = np.zeros((13789, 21973), dtype=np.int) ### create numpy arrays for input datasets cdls and traj cdls = { 2008: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2008', lower_left_corner=arcpy.Point(XMin, YMin), nrows=13789, ncols=21973), 2009: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2009', lower_left_corner=arcpy.Point(XMin, YMin), nrows=13789, ncols=21973), 2010: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2010', lower_left_corner=arcpy.Point(XMin, YMin), nrows=13789, ncols=21973), 2011: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2011', lower_left_corner=arcpy.Point(XMin, YMin), nrows=13789, ncols=21973), 2012: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2012', lower_left_corner=arcpy.Point(XMin, YMin), nrows=13789, ncols=21973), 2013: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2013', lower_left_corner=arcpy.Point(XMin, YMin), nrows=13789, ncols=21973), 2014: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2014', lower_left_corner=arcpy.Point(XMin, YMin), nrows=13789, ncols=21973), 2015: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2015', lower_left_corner=arcpy.Point(XMin, YMin), nrows=13789, ncols=21973), 2016: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2016', lower_left_corner=arcpy.Point(XMin, YMin), nrows=13789, ncols=21973), 2017: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2017', lower_left_corner=arcpy.Point(XMin, YMin), nrows=13789, ncols=21973) } arr_traj = arcpy.RasterToNumPyArray(in_raster=data['pre']['traj']['path'], lower_left_corner=arcpy.Point( XMin, YMin), nrows=13789, ncols=21973) # find the location of each pixel labeled with specific arbitray value in the rows list for row in createReclassifyList(data): #year of conversion for either expansion or abandonment fc = data['global']['years_conv'] print 'fc', fc #year before conversion for either expansion or abandonment bfc = data['global']['years_conv'] - 1 print 'bfc', bfc #Return the indices of the pixels that have values of the fc arbitrsy values of the traj. indices = (arr_traj == row[0]).nonzero() #stack indices so easier to work with stacked_indices = np.column_stack((indices[0], indices[1])) #get the x and y location of each pixel that has been selected from above for pixel_location in stacked_indices: row = pixel_location[0] col = pixel_location[1] #get the pixel value for fc pixel_value_fc = cdls[fc][row][col] #get the pixel value for bfc pixel_value_bfc = cdls[bfc][row][col] ##### create dev mask ################################################################################## if pixel_value_bfc in [122, 123, 124]: outData[row, col] = data['refine']['arbitrary_noncrop'] ##### create 36_61 mask ################################################################################ if pixel_value_fc in [36, 61]: #find the years still left in the time series for this pixel location yearsleft = [i for i in data['global']['years'] if i > fc] #create templist to hold the rest of the cld values for the time series. initiaite it with the first cdl value templist = [pixel_value_fc] for year in yearsleft: templist.append(cdls[year][row][col]) #if the templist values are esentailly all "noncrop" then realbel as noncrop if len(set(np.isin(templist, nc_list))) == 1: outData[row, col] = data['refine']['arbitrary_noncrop'] arcpy.ClearEnvironment("extent") outname = "tile_" + str(fc_count) + '.tif' # #create outpath = os.path.join("C:/Users/Bougie/Desktop/Gibbs/", r"tiles", outname) # NumPyArrayToRaster (in_array, {lower_left_corner}, {x_cell_size}, {y_cell_size}, {value_to_nodata}) myRaster = arcpy.NumPyArrayToRaster(outData, lower_left_corner=arcpy.Point( XMin, YMin), x_cell_size=30, y_cell_size=30, value_to_nodata=0) myRaster.save(outpath)
rec = recs.next() while rec: if not rec.VehicleID in lstVehicleID: lstVehicleID.append(rec.VehicleID) timeStamp = string.split(rec.Timestamp, "T")[1] timeStamp = string.split(timeStamp, ".")[0] timeStamp = string.split(timeStamp, "+")[0] rec.Timestamp = timeStamp recs.updateRow(rec) rec = recs.next() del rec del recs ###todo:make KML file print "\nConvert EMU Layers from Map to KML" extent = arcpy.Extent() extent.XMin = 0 extent.XMax = 180 extent.YMin = -90 extent.YMax = 0 #todo: loop through layers for v in lstVehicleID: sql = "\"VehicleId\" = \'" + v + "\'" print sql arcpy.MakeFeatureLayer_management(in_features="EMU", out_layer=v, where_clause=sql) kmlFile = outFolder + os.path.sep + v + ".kmz" vfc = outFolder + os.path.sep + "tmp" arcpy.Select_analysis(in_features="EMU",
def execute_task(in_extentDict): fc_count = in_extentDict[0] procExt = in_extentDict[1] # print procExt XMin = procExt[0] YMin = procExt[1] XMax = procExt[2] YMax = procExt[3] #set environments #The brilliant thing here is that using the extents with the full dataset!!!!!! DONT EVEN NEED TO CLIP THE FULL RASTER TO THE FISHNET BECASUE arcpy.env.snapRaster = data['pre']['traj']['path'] arcpy.env.cellsize = data['pre']['traj']['path'] arcpy.env.outputCoordinateSystem = data['pre']['traj']['path'] arcpy.env.extent = arcpy.Extent(XMin, YMin, XMax, YMax) cls = 21973 rws = 13789 # outData = numpy.zeros((rows,cols), numpy.int16) outData = np.zeros((13789, 21973), dtype=np.int) ### create numpy arrays for input datasets cdls and traj change = arcpy.RasterToNumPyArray( in_raster= 'D:\\projects\\usxp\\series\\s14\\core\\core.gdb\\s14_traj_cdl30_b_2008to2016_rfnd_n8h_mtr_8w_mmu5', lower_left_corner=arcpy.Point(XMin, YMin), nrows=13789, ncols=21973), conf = arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\conf.gdb\\National_average_cdl_30m_r_2008to2016_albers_confidence', lower_left_corner=arcpy.Point(XMin, YMin), nrows=13789, ncols=21973) # np.nonzero(change) # # arr_traj = arcpy.RasterToNumPyArray(in_raster=data['pre']['traj']['path'], lower_left_corner = arcpy.Point(XMin,YMin), nrows = 13789, ncols = 21973) # # find the location of each pixel labeled with specific arbitray value in the rows list # for row in location_list: # #year of conversion for either expansion or abandonment # ytx = row[1] # print 'ytx', ytx # #year before conversion for either expansion or abandonment # ybx = row[1]-1 # print 'ybx', ybx #Return the indices of the pixels that have values of the ytc arbitrsy values of the traj. indices = np.nonzero(change) print indices # #stack indices so easier to work with # stacked_indices=np.column_stack((indices[0],indices[1])) # #get the x and y location of each pixel that has been selected from above # for pixel_location in stacked_indices: # row = pixel_location[0] # col = pixel_location[1] # print 'row', row # print 'col', col # #get the pixel value for ytx # pixel_value_ytx = cdls[ytx][row][col] # #get the pixel value for ybx # pixel_value_ybx = cdls[ybx][row][col] # print row # ##### create dev mask componet # # cdls # if np.isnan(change[row,col]): # print 'null' # break # else: # outData[row,col] = conf[row,col] # ##### create 36_61 mask componet # if pixel_value_ytx in [36,61]: # #find the years stil left in the time series for this pixel location # yearsleft = [i for i in data['global']['years'] if i > ytx] # #only focus on the extended series ---dont care about 2012 # if len(yearsleft) > 1: # #create templist to hold the rest of the cld values for the time series. initiaite it with the first cdl value # templist = [pixel_value_ytx] # for year in yearsleft: # # print 'year', year # # print 'cdls[year][row][col] :', cdls[year][row][col] # templist.append(cdls[year][row][col]) # #check if all elements in array are the same # if len(set(templist)) == 1: # outData[row,col] = data['refine']['mask_dev_alfalfa_fallow']['arbitrary'] arcpy.ClearEnvironment("extent") outname = "tile_" + str(fc_count) + '.tif' # #create outpath = os.path.join("C:/Users/Bougie/Desktop/Gibbs/", r"tiles", outname) # NumPyArrayToRaster (in_array, {lower_left_corner}, {x_cell_size}, {y_cell_size}, {value_to_nodata}) myRaster = arcpy.NumPyArrayToRaster(outData, lower_left_corner=arcpy.Point( XMin, YMin), x_cell_size=30, y_cell_size=30, value_to_nodata=0) myRaster.save(outpath)
def execute_task(in_extentDict): fc_count = in_extentDict[0] procExt = in_extentDict[1] # print procExt XMin = procExt[0] YMin = procExt[1] XMax = procExt[2] YMax = procExt[3] #set environments #The brilliant thing here is that using the extents with the full dataset!!!!!! DONT EVEN NEED TO CLIP THE FULL RASTER TO THE FISHNET BECASUE arcpy.env.snapRaster = data['pre']['traj']['path'] arcpy.env.cellsize = data['pre']['traj']['path'] arcpy.env.outputCoordinateSystem = data['pre']['traj']['path'] arcpy.env.extent = arcpy.Extent(XMin, YMin, XMax, YMax) cls = 21973 rws = 13789 # outData = numpy.zeros((rows,cols), numpy.int16) outData = np.zeros((13789, 21973), dtype=np.int) ### create numpy arrays for input datasets cdls and traj cdls = { 2008: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2008', lower_left_corner=arcpy.Point(XMin, YMin), nrows=13789, ncols=21973), 2009: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2009', lower_left_corner=arcpy.Point(XMin, YMin), nrows=13789, ncols=21973), 2010: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2010', lower_left_corner=arcpy.Point(XMin, YMin), nrows=13789, ncols=21973), 2011: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2011', lower_left_corner=arcpy.Point(XMin, YMin), nrows=13789, ncols=21973), 2012: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2012', lower_left_corner=arcpy.Point(XMin, YMin), nrows=13789, ncols=21973), 2013: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2013', lower_left_corner=arcpy.Point(XMin, YMin), nrows=13789, ncols=21973), 2014: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2014', lower_left_corner=arcpy.Point(XMin, YMin), nrows=13789, ncols=21973), 2015: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2015', lower_left_corner=arcpy.Point(XMin, YMin), nrows=13789, ncols=21973), 2016: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2016', lower_left_corner=arcpy.Point(XMin, YMin), nrows=13789, ncols=21973), 2017: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2017', lower_left_corner=arcpy.Point(XMin, YMin), nrows=13789, ncols=21973) } arr_traj = arcpy.RasterToNumPyArray(in_raster=data['pre']['traj']['path'], lower_left_corner=arcpy.Point( XMin, YMin), nrows=13789, ncols=21973) # find the location of each pixel labeled with specific arbitray value in the rows list for row in location_list: #year of conversion for either expansion or abandonment ytx = row[1] print 'ytx', ytx #year before conversion for either expansion or abandonment ybx = row[1] - 1 print 'ybx', ybx #Return the indices of the pixels that have values of the ytc arbitrsy values of the traj. indices = (arr_traj == row[0]).nonzero() #stack indices so easier to work with stacked_indices = np.column_stack((indices[0], indices[1])) #get the x and y location of each pixel that has been selected from above for pixel_location in stacked_indices: row = pixel_location[0] col = pixel_location[1] #get the pixel value for ytx pixel_value_ytx = cdls[ytx][row][col] #get the pixel value for ybx pixel_value_ybx = cdls[ybx][row][col] ##### create dev mask componet if pixel_value_ybx in [122, 123, 124]: # print 'ybx:', pixel_value_ybx outData[row, col] = data['refine']['mask_dev_alfalfa_fallow'][ 'arbitrary'] ##### create 36_61 mask componet if pixel_value_ytx in [36, 61]: #find the years still left in the time series for this pixel location yearsleft = [i for i in data['global']['years'] if i > ytx] #only focus on the extended series ---dont care about teminal year if len(yearsleft) > 1: #create templist to hold the rest of the cld values for the time series. Initialize it with the first cdl value templist = [pixel_value_ytx] for year in yearsleft: templist.append(cdls[year][row][col]) z = [176, 152] #check if all elements in array are the same if len(set(templist)) == 1: outData[row, col] = data['refine'][ 'mask_dev_alfalfa_fallow']['arbitrary'] elif [x for x in templist if x in z]: # [x for x in item if x not in z] print 'templist', templist if len(set(templist)) == 2 or len(set(templist)) == 3: print 'templist len == 2', templist outData[row, col] = data['refine'][ 'mask_dev_alfalfa_fallow']['arbitrary'] arcpy.ClearEnvironment("extent") outname = "tile_" + str(fc_count) + '.tif' # #create outpath = os.path.join("C:/Users/Bougie/Desktop/Gibbs/", r"tiles", outname) # NumPyArrayToRaster (in_array, {lower_left_corner}, {x_cell_size}, {y_cell_size}, {value_to_nodata}) myRaster = arcpy.NumPyArrayToRaster(outData, lower_left_corner=arcpy.Point( XMin, YMin), x_cell_size=30, y_cell_size=30, value_to_nodata=0) myRaster.save(outpath)
def execute_task(in_extentDict): fc_count = in_extentDict[0] procExt = in_extentDict[1] # print procExt XMin = procExt[0] YMin = procExt[1] XMax = procExt[2] YMax = procExt[3] #set environments #The brilliant thing here is that using the extents with the full dataset!!!!!! DONT EVEN NEED TO CLIP THE FULL RASTER TO THE FISHNET BECASUE arcpy.env.snapRaster = data['pre']['traj']['path'] arcpy.env.cellsize = data['pre']['traj']['path'] arcpy.env.outputCoordinateSystem = data['pre']['traj']['path'] arcpy.env.extent = arcpy.Extent(XMin, YMin, XMax, YMax) cls = 21973 rws = 13789 # outData = numpy.zeros((rows,cols), numpy.int16) outData = np.zeros((13789, 21973), dtype=np.int) state = 'nd' ### create numpy arrays for input datasets cdls and traj conf = { 2008: arcpy.RasterToNumPyArray( in_raster= 'D:\\projects\\usxp\\qaqc\\{}.gdb\\cdl_30m_r_{}_2008_albers_confidence' .format(state, state), lower_left_corner=arcpy.Point(XMin, YMin), nrows=13789, ncols=21973), 2009: arcpy.RasterToNumPyArray( in_raster= 'D:\\projects\\usxp\\qaqc\\{}.gdb\\cdl_30m_r_{}_2009_albers_confidence' .format(state, state), lower_left_corner=arcpy.Point(XMin, YMin), nrows=13789, ncols=21973), 2010: arcpy.RasterToNumPyArray( in_raster= 'D:\\projects\\usxp\\qaqc\\{}.gdb\\cdl_30m_r_{}_2010_albers_confidence' .format(state, state), lower_left_corner=arcpy.Point(XMin, YMin), nrows=13789, ncols=21973) # 2011:arcpy.RasterToNumPyArray(in_raster='D:\\projects\\usxp\\qaqc\\al.gdb\\cdl30_2011', lower_left_corner = arcpy.Point(XMin,YMin), nrows = 13789, ncols = 21973), # 2012:arcpy.RasterToNumPyArray(in_raster='D:\\projects\\usxp\\qaqc\\al.gdb\\cdl30_2012', lower_left_corner = arcpy.Point(XMin,YMin), nrows = 13789, ncols = 21973), # 2013:arcpy.RasterToNumPyArray(in_raster='D:\\projects\\usxp\\qaqc\\al.gdb\\cdl30_2013', lower_left_corner = arcpy.Point(XMin,YMin), nrows = 13789, ncols = 21973), # 2014:arcpy.RasterToNumPyArray(in_raster='D:\\projects\\usxp\\qaqc\\al.gdb\\cdl30_2014', lower_left_corner = arcpy.Point(XMin,YMin), nrows = 13789, ncols = 21973), # 2015:arcpy.RasterToNumPyArray(in_raster='D:\\projects\\usxp\\qaqc\\al.gdb\\cdl30_2015', lower_left_corner = arcpy.Point(XMin,YMin), nrows = 13789, ncols = 21973), # 2016:arcpy.RasterToNumPyArray(in_raster='D:\\projects\\usxp\\qaqc\\al.gdb\\cdl30_2016', lower_left_corner = arcpy.Point(XMin,YMin), nrows = 13789, ncols = 21973) } arr_traj = arcpy.RasterToNumPyArray( in_raster= 'D:\\projects\\usxp\\series\\s14\\post\\ytc.gdb\\s14_ytc30_2008to2016_mmu5_nbl', lower_left_corner=arcpy.Point(XMin, YMin), nrows=13789, ncols=21973) # # find the location of each pixel labeled with specific arbitray value in the rows list # for row in location_list: # #year of conversion for either expansion or abandonment # ytx = row[1] # #year before conversion for either expansion or abandonment # ybx = row[1]-1 #Return the indices of the pixels that have values of the ytc arbitrsy values of the traj. indices = (arr_traj != 0).nonzero() print indices #stack indices so easier to work with stacked_indices = np.column_stack((indices[0], indices[1])) #get the x and y location of each pixel that has been selected from above for pixel_location in stacked_indices: row = pixel_location[0] # print 'row', row col = pixel_location[1] # print 'col', col # #get the pixel value for ytx pixel_value_ytx = conf[2008][row][col] print 'pixel_value_ytx', pixel_value_ytx
cellSizeConstant = "#" IntegrationIntervalConstant = "#" locationVarianceFieldName = "#" locationVarianceConstant = None mobilityVarianceFieldName = "" mobilityVarianceConstant = "" spatialReference = arcpy.SpatialReference() spatialReference.loadFromString("PROJCS['NAD_1983_Alaska_Albers',GEOGCS['GCS_North_American_1983',DATUM['D_North_American_1983',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Albers'],PARAMETER['False_Easting',0.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',-154.0],PARAMETER['Standard_Parallel_1',55.0],PARAMETER['Standard_Parallel_2',65.0],PARAMETER['Latitude_Of_Origin',50.0],UNIT['Meter',1.0]];-13752200 -8948200 10000;-100000 10000;-100000 10000;0.001;0.001;0.001;IsHighPrecision") #arcpy.env.outputCoordinateSystem = spatialReference arcpy.env.outputCoordinateSystem = None spatialReference = None testHorne = False if testHorne: rasterName = r"C:\tmp\kd_test\bb_h_smallv.tif" extents = arcpy.Extent(-90,-180,370,180) cellSize = 1.0 intervals = 10 searchArea = None fixes = [ #time, x, y, locational_variance, mobility_variance [ 0.0, 0, 0, 8.32, 6.42], [ 20.0, 280, 0, 8.32, 6.42], ] print "Horne Test", extents, cellSize, searchArea, fixes, intervals raster = CreateBBRaster(extents, cellSize, searchArea, fixes, intervals) #arcpy.Delete_management(rasterName) raster.save(rasterName) sys.exit() #
def execute_task(args): in_extentDict, data, yxc, subtype, traj_list = args yxc_dict = {'ytc': 3, 'yfc': 4} fc_count = in_extentDict[0] # print fc_count procExt = in_extentDict[1] # print procExt XMin = procExt[0] YMin = procExt[1] XMax = procExt[2] YMax = procExt[3] path_traj_rfnd = data['pre']['traj_rfnd']['path'] print 'path_traj_rfnd:', path_traj_rfnd ## the finished mtr product datset path_mtr = Raster(data['core']['path']) ##dataset to create tha mask 61 # path_yfc_fnc = Raster(data['post']['yfc']['fnc_61_path']) path_yfc_fnc = 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\sa\\r2\\s35\\post\\yfc_s35.gdb\\s35_yfc_fnc_61_w_mask_k50' # mask_61 = SetNull(path_yfc_fnc, path_yfc_fnc, "VALUE <> 61") # path_mask61 = Raster('C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\sa\\r2\\s35\\post\\yfc_s35.gdb\\s35_yfc30_2008to2017_mmu5_fnc_61_mask61') ##dataset that will be used to fill the mask layer conus_nc_2010 = Raster( 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_noncrop_2010' ) #set environments arcpy.env.snapRaster = path_mtr arcpy.env.cellsize = path_mtr arcpy.env.extent = arcpy.Extent(XMin, YMin, XMax, YMax) # arcpy.env.mask = mask_61 # arcpy.env.mask = path_mask61 ## Execute the three functions ##################### ####fill in the null values #################################### filled_1 = FocalStatistics(conus_nc_2010, NbrRectangle(3, 3, "CELL"), 'MAJORITY') conus_nc_2010 = None filled_2 = FocalStatistics(filled_1, NbrRectangle(5, 5, "CELL"), 'MAJORITY') filled_1 = None filled_3 = FocalStatistics(filled_2, NbrRectangle(10, 10, "CELL"), 'MAJORITY') filled_2 = None filled_4 = FocalStatistics(filled_3, NbrRectangle(20, 20, "CELL"), 'MAJORITY') filled_3 = None filled_5 = FocalStatistics(filled_4, NbrRectangle(25, 25, "CELL"), 'MINIMUM') filled_4 = None final = SetNull(path_yfc_fnc, filled_5, "VALUE <> 61") filled_5 = None outname = "tile_" + str(fc_count) + '.tif' outpath = os.path.join("C:/Users/Bougie/Desktop/Gibbs/data/", r"tiles", outname) arcpy.ClearEnvironment("extent") final.save(outpath) outpath = None final = None
def __init__(self, cellsize, xmin, ymin): self.meanCellWidth = cellsize self.meanCellHeight = cellsize self.extent = arcpy.Extent(xmin, ymin, numpy.nan, numpy.nan)
def execute_task(args): in_extentDict, data, traj_list, noncroplist, croplist, cls, rws = args fc_count = in_extentDict[0] procExt = in_extentDict[1] XMin = procExt[0] YMin = procExt[1] XMax = procExt[2] YMax = procExt[3] #set environments parameters arcpy.env.snapRaster = data['pre']['traj']['path'] arcpy.env.cellsize = data['pre']['traj']['path'] arcpy.env.outputCoordinateSystem = data['pre']['traj']['path'] arcpy.env.extent = arcpy.Extent(XMin, YMin, XMax, YMax) print 'rws==================================', rws print 'cls==================================', cls ###create empty object to append to below in script outData = np.zeros((rws, cls), dtype=np.uint8) ### create numpy arrays for input datasets cdls and traj cdls = { 2008: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2008', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls), 2009: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2009', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls), 2010: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2010', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls), 2011: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2011', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls), 2012: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2012', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls), 2013: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2013', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls), 2014: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2014', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls), 2015: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2015', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls), 2016: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2016', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls), 2017: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2017', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls) } arr_traj = arcpy.RasterToNumPyArray( in_raster=data['pre']['traj_yfc']['path'], lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls) # find the location of each pixel labeled with specific arbitray value in the rows list for row in traj_list: traj_value = row[0] #year of conversion for either expansion or abandonmen ytx = row[1] #year before conversion for either expansion or abandonment ybx = row[1] - 1 #Return the indices of the pixels that have values of the ytc arbitrsy values of the traj. indices = (arr_traj == row[0]).nonzero() #stack indices so easier to work with stacked_indices = np.column_stack((indices[0], indices[1])) #get the x and y location of each pixel that has been selected from above for pixel_location in stacked_indices: row = pixel_location[0] col = pixel_location[1] #get the pixel value for ybx pixel_value_ybx = cdls[ybx][row][col] #get the pixel value for ytx pixel_value_ytx = cdls[ytx][row][col] #### find the years stil before in the time series for this pixel location years_before_list = [i for i in data['global']['years'] if i < ytx] list_before = [] for year in years_before_list: list_before.append(cdls[year][row][col]) #### find the value of the years after cy years_after_list = [i for i in data['global']['years'] if i >= ytx] list_after = [] for year in years_after_list: list_after.append(cdls[year][row][col]) list_entire = list_before + list_after #### define lists that contain lcc that pixel will be compared to fuzzylist = [36, 37, 61, 152, 176] fuzzycroplist = [58, 59, 60] fruitlist = [ 66, 67, 68, 69, 71, 72, 74, 75, 76, 77, 204, 210, 211, 212, 218, 220, 223 ] #### dev/fallow ###################################################### if (np.isin(list_before, noncroplist + [61]).all()) and ((np.isin( list_after, [121, 122, 123, 124])).any() == False): outData[row, col] = 101 ########### fuzzycroplist ############################ if (pixel_value_ybx in fuzzycroplist): outData[row, col] = 105 #### fruit mask ############################################################################################## if (np.isin(list_before, fruitlist).any() and np.isin(list_after, fruitlist).any()): outData[row, col] = 201 ### rice #################################### #####needs to have rice before AND after to be considered false conversion if (np.isin(list_before, [3]).any()) and (np.isin(list_after, [3]).any()): outData[row, col] = 204 ### 36_to_37 ########################################### if (np.isin(list_before, [36, 37]).all()) and (np.isin( list_after, croplist + [37]).all()): outData[row, col] = 206 arcpy.ClearEnvironment("extent") outname = "tile_" + str(fc_count) + '.tif' outpath = os.path.join("C:/Users/Bougie/Desktop/Gibbs/data/", r"tiles", outname) myRaster = arcpy.NumPyArrayToRaster(outData, lower_left_corner=arcpy.Point( XMin, YMin), x_cell_size=30, y_cell_size=30, value_to_nodata=0) ##free memory from outdata array!! outData = None myRaster.save(outpath) myRaster = None
def execute(self, parameters, messages): """The source code of the tool.""" # Set log configuration logPath = os.path.join(parameters[0].valueAsText, "logs") if not os.path.exists(logPath): os.makedirs(logPath) logFile = os.path.join(logPath, "chloro.log") logging.basicConfig(filename=logFile, format='%(asctime)s -- %(message)s', datefmt='%d/%m/%Y %H:%M:%S', level=logging.INFO) arcpy.AddMessage( "\nApplying available chlorophyll_a values to dark targets...") logging.info("Starting applyChloro.py script...") arcpy.CheckOutExtension("Spatial") logging.info( "Check Out Extension: Spatial Analyst extension checked out\n") # Define variables from parameters working_folder = parameters[0].valueAsText chloro_folder = os.path.join(os.path.dirname(working_folder), "Auxiliary", "Chlorophyll") if not os.path.exists(chloro_folder): os.makedirs(chloro_folder) cell_size = parameters[1].value focal_field = "chlor_a_" + str(cell_size) + "x" + str(cell_size) # Determine list of yearly GDBs in workspace arcpy.env.workspace = working_folder gdbList = arcpy.ListWorkspaces("*", "FileGDB") arcpy.AddMessage("Workspace contains the following " + str(len(gdbList)) + " GDBs: " + str(gdbList)) # Iterate through yearly GDBs for gdb in gdbList: arcpy.AddMessage("\nProcessing " + str(gdb)) logging.info("Processing '%s' geodatabase\n", gdb) gdbDesc = arcpy.Describe(gdb) gdbYear = gdbDesc.baseName # Determine list of .nc files in corresponding yearly chlorophyll folder chloro_year = os.path.join(chloro_folder, gdbYear) arcpy.env.workspace = chloro_year ncList = arcpy.ListFiles('*.nc') # Determine list of feature classes in current GDB arcpy.env.workspace = gdb fcList = arcpy.ListFeatureClasses() arcpy.AddMessage("\nGDB contains the following " + str(len(fcList)) + " feature classes: " + str(fcList)) # Iterate through feature classes in GDB for fc in fcList: # Check if chlorophyll_a has already been added to current feature class arcpy.AddMessage("\nVerifying " + fc + "...") logging.info("Processing '%s' feature class", fc) fldList = arcpy.ListFields(fc) fldNames = [] for fld in fldList: fldNames.append(fld.name) # If no chlorophyll_a data already in feature class, proceed with applying values if not focal_field in fldNames: # Create points feature class from current feature class to extract chlorophyll raster values arcpy.AddMessage("Creating points feature class...") targetLyr = "targetLyr" arcpy.MakeFeatureLayer_management(fc, targetLyr) logging.info( "Make Feature Layer: '%s' layer created from '%s' feature class", targetLyr, fc) pointFC = fc + "_point" arcpy.FeatureToPoint_management(targetLyr, pointFC, "CENTROID") logging.info( "Feature To Point: '%s' points feature class created from centroid of features in '%s' layer", pointFC, targetLyr) # Determine year and day of year to load appropriate .nc file as raster yDay = self.yearDay(fc.split("_")[1]) chloro_file = "A" + gdbYear + yDay # Iterate through list of year's .nc files to find corresponding file to current feature class for ncFile in ncList: # Check for .nc file and feature class match if ncFile.startswith(chloro_file): # Make NetCDF raster layer from .nc file arcpy.AddMessage( "Preparing chlorophyll_a raster layer...") ncFilePath = os.path.join(chloro_year, ncFile) arcpy.MakeNetCDFRasterLayer_md( ncFilePath, "chlor_a", "lon", "lat", chloro_file) logging.info( "Make NetCDF Raster Layer: '%s' raster layer created from '%s'", chloro_file, ncFilePath) # Apply extent to raster layer (to limit processing to pertinent region) chloro_extent = arcpy.Extent( -160.0, 40.0, -40.0, 89.989002) chloro_rectExtract = arcpy.sa.ExtractByRectangle( chloro_file, chloro_extent, "INSIDE") logging.info( "Extract By Rectangle: Extent (-160 (W), 40 (S), -40 (E), 89.989002 (N)) applied to '%s'", chloro_file) # Calculate focal statistics (mean value of focal window) arcpy.AddMessage("Calculating focal statistics...") neighborhood = arcpy.sa.NbrRectangle( cell_size, cell_size, "CELL") chloro_focal = arcpy.sa.FocalStatistics( chloro_rectExtract, neighborhood, "MEAN", "DATA") logging.info( "Focal Statistics: '%s' raster created by calculating mean value of '%s'x'%s' neighbourhood calculated for cells from '%s'", chloro_focal, str(cell_size), str(cell_size), chloro_file) if not "chlor_a" in fldNames: # Extract point values from raster arcpy.AddMessage( "Extracting raster chlorophyll_a values to points..." ) extractFC = fc + "_extract" arcpy.sa.ExtractValuesToPoints( pointFC, chloro_rectExtract, extractFC) arcpy.AlterField_management( extractFC, "RASTERVALU", "chlor_a") logging.info( "Extract Values to Points: '%s' feature class created with point values calculated from '%s' raster layer with '%s' feature class", extractFC, chloro_file, pointFC) # Extract focal values from raster arcpy.AddMessage( "Extracting raster chlorophyll_a mean values to points..." ) finalExtractFC = fc + "_final_extract" arcpy.sa.ExtractValuesToPoints( extractFC, chloro_focal, finalExtractFC) ## focal_field = "chlor_a_" + str(cell_size) + "x" + str(cell_size) arcpy.AlterField_management( finalExtractFC, "RASTERVALU", focal_field) logging.info( "Extract Values to Points: '%s' feature class created with point values calculated from '%s' raster layer with '%s' feature class", finalExtractFC, chloro_focal, extractFC) # Join point and focal values to feature class arcpy.AddMessage( "Joining values to feature class...") self.join_field(fc, "OBJECTID", finalExtractFC, "ORIG_FID", "chlor_a;" + focal_field) logging.info( "Join Field: chlor_a and chlor_a focal values joined to '%s' feature class from '%s' table", fc, finalExtractFC) # Break iteration through .nc files once processing with corresponding .nc file and feature class is complete break # If chlorophyll_a values found in feature class, no further processing required for current feature class else: arcpy.AddMessage( "Chlorophyll_a values already applied to feature class. Continuing..." ) logging.info("Values already applied") logging.info("Processing for '%s' feature class complete\n", fc) # Delete extra feature classes used during geoprocessing self.cleanWorkspace(gdb) arcpy.CheckInExtension("Spatial") logging.info( "Check In Extension: Spatial Analyst extension checked back in") logging.info("applyChloro.py script finished\n\n") return
def crs_check_data_extent(args): # script parameters gdb = args[0] extentString = args[1] itemsToCheck = args[2] # log = args[3] # workspace arcpy.env.workspace = gdb # script name script_name = os.path.basename(__file__) # variables err_message = None print('calling {}'.format(script_name)) try: extentValues = extentString.split(',') if len(extentValues) != 4: err_message = "missing pamaremter in extent config" return err_message xMin = int(extentValues[0]) yMin = int(extentValues[1]) xMax = int(extentValues[2]) yMax = int(extentValues[3]) extent = arcpy.Extent(xMin, yMin, xMax, yMax) extentArray = arcpy.Array( i for i in (extent.lowerLeft, extent.lowerRight, extent.upperRight, extent.upperLeft, extent.lowerLeft)) # create a extent polygon extentPolygon = arcpy.Polygon(extentArray, sr) # go through feature class in crs gdb, delete feature which is out of the nz bound fcs = arcpy.ListFeatureClasses() if len(itemsToCheck) > 0: fcs = list(set(fcs).intersection(set(itemsToCheck))) for fc in fcs: name = arcpy.Describe(fc).name print('checking {0}...'.format(name)) # Make a layer and select features which within the extent polygon lyr = 'lyr_{}'.format(name) delete_layer(lyr) arcpy.MakeFeatureLayer_management(fc, lyr) count = int(arcpy.GetCount_management(lyr)[0]) arcpy.SelectLayerByLocation_management(lyr, "INTERSECT", extentPolygon, "", "NEW_SELECTION", "NOT_INVERT") arcpy.SelectLayerByLocation_management(lyr, "", "", "", "SWITCH_SELECTION") count = int(arcpy.GetCount_management(lyr)[0]) # delete features outside nz bound if count > 0: print('deleting features in {0}: {1}'.format(name, count)) arcpy.DeleteFeatures_management(lyr) except Exception as e: err_message = "ERROR while running {0}: {1}".format(script_name, e) return err_message
print "Adding: " + serviceLayer.name if baseURL is None or baseURL == "": baseURL = "https://www.arcgis.com/sharing/rest" sh = arcrest.AGOLTokenSecurityHandler(username='******', password='******') agol = arcrest.manageorg.Administration(url=baseURL, securityHandler=sh) usercontent = agol.content.usercontent('ScottMoorePNW') if isinstance( usercontent, arcrest.manageorg.administration._content.UserContent): pass #create an ArcPy Extent Object so we can reproject it to 4326 (lat/long) layerExtent = arcpy.Extent(serviceLayer.extent.get('xmin'), serviceLayer.extent.get('ymin'), serviceLayer.extent.get('xmax'), serviceLayer.extent.get('ymax')) try: layerExtent.spatialReference = arcpy.SpatialReference( serviceLayer.extent["spatialReference"]["wkid"]) except KeyError, e: print "WKT is not supported" break #project the extent of the layer to 4326 layerExtentPrj = layerExtent.projectAs( arcpy.SpatialReference(4326)) itemURL = url + "/" + str(serviceLayer.id) itemParams = arcrest.manageorg.ItemParameter() itemParams.title = serviceLayer.name
from Settings import Settings from Reset import reset import time t = time.time() reset() # setting the paths Settings = Settings() arcpy.env.workspace = Settings.output fc = Settings.fc output = Settings.output log_path = Settings.log_path result_path = Settings.result_path # set the environment extent arcpy.env.extent = arcpy.Extent(140.428953, -39.136649, 150.033447, -34.056565) # Open the log file to get the information of the previous operation with open(log_path) as saving: save = json.load(saving) start_ID = save[0] count = save[0] save_time = save[1] count += 1 # indicator of how many times the loop goes loop_time = 1 # set a list to store the field name needed # put all items into one list which is cursor fields = ['objectid', 'Shape', 'count']
def execute_task(args): in_extentDict, data, yxc, subtype, traj_list, croplist_subset = args yxc_dict = {'ytc':3, 'yfc':4} fc_count = in_extentDict[0] # print fc_count procExt = in_extentDict[1] # print procExt XMin = procExt[0] YMin = procExt[1] XMax = procExt[2] YMax = procExt[3] path_traj_rfnd = data['pre']['traj_rfnd']['path'] print 'path_traj_rfnd:', path_traj_rfnd ## the finished mtr product datset path_mtr = Raster(data['core']['path']) #set environments arcpy.env.snapRaster = path_mtr arcpy.env.cellsize = path_mtr arcpy.env.extent = arcpy.Extent(XMin, YMin, XMax, YMax) ## Execute the three functions ##################### ## this is the base yxc dataset derived from trajectory (speckles) raster_yxc_initial = Reclassify(Raster(path_traj_rfnd), "Value", RemapRange(traj_list), "NODATA") ## clean the speckles so only left with yxc where the mtr regions fullfilling mmu requirement are raster_yxc = Con((path_mtr == yxc_dict[yxc]) & (raster_yxc_initial >= 2008), raster_yxc_initial) ##delete object from memory del raster_yxc_initial ###create raster_yxc_cdl to b eoverwritten in script below raster_yxc_cdl = raster_yxc ###get the cdl path object from current instance to loop through each of the cdl paths in the object for year, cdlpath in data['post'][yxc][subtype]['cdlpaths'].iteritems(): print year, cdlpath # allow raster to be overwritten arcpy.env.overwriteOutput = True print "overwrite on? ", arcpy.env.overwriteOutput #establish the condition cond = "Value = " + year print 'cond: ', cond ## replace the yxc year value with the appropriate cdl value for that given year raster_yxc_cdl = Con(raster_yxc_cdl, cdlpath, raster_yxc_cdl, cond) # ##### refine the raster to replace 61 to first crop not 61 after conversion refined_raster_mask = replace_61_w_hard_crop.run(data, raster_yxc, subtype, raster_yxc_cdl, XMin, YMin, XMax, YMax, croplist_subset) del raster_yxc, raster_yxc_cdl ####fill in the null values #################################### filled_1 = Con(IsNull(refined_raster_mask),FocalStatistics(refined_raster_mask,NbrRectangle(3, 3, "CELL"),'MAJORITY'), refined_raster_mask) del refined_raster_mask filled_2 = Con(IsNull(filled_1),FocalStatistics(filled_1,NbrRectangle(5, 5, "CELL"),'MAJORITY'), filled_1) del filled_1 filled_3 = Con(IsNull(filled_2),FocalStatistics(filled_2,NbrRectangle(10, 10, "CELL"),'MAJORITY'), filled_2) del filled_2 filled_4 = Con(IsNull(filled_3),FocalStatistics(filled_3,NbrRectangle(20, 20, "CELL"),'MAJORITY'), filled_3) del filled_3 final = SetNull(path_mtr, filled_4, "VALUE <> {}".format(str(yxc_dict[yxc]))) del filled_4 outname = "tile_" + str(fc_count)+'.tif' outpath = os.path.join("C:/Users/Bougie/Desktop/Gibbs/data/", r"tiles", outname) arcpy.ClearEnvironment("extent") final.save(outpath) del outpath, final
def execute_task(args): in_extentDict, data, croplist = args fc_count = in_extentDict[0] procExt = in_extentDict[1] # ##print procExt XMin = procExt[0] YMin = procExt[1] XMax = procExt[2] YMax = procExt[3] #set environments #The brilliant thing here is that using the extents with the full dataset!!!!!! DONT EVEN NEED TO CLIP THE FULL RASTER TO THE FISHNET BECASUE arcpy.env.snapRaster = data['pre']['traj']['path'] arcpy.env.cellsize = data['pre']['traj']['path'] arcpy.env.outputCoordinateSystem = data['pre']['traj']['path'] arcpy.env.extent = arcpy.Extent(XMin, YMin, XMax, YMax) # outData = numpy.zeros((rows,cols), numpy.int16) outData = np.zeros((rws, cls), dtype=np.uint8) ### create numpy arrays for input datasets cdls and traj cdls = { 2008: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2008', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls), 2009: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2009', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls), 2010: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2010', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls), 2011: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2011', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls), 2012: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2012', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls), 2013: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2013', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls), 2014: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2014', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls), 2015: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2015', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls), 2016: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2016', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls), 2017: arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\ancillary\\raster\\cdl.gdb\\cdl30_2017', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls) } arr_traj = arcpy.RasterToNumPyArray(in_raster=data['pre']['traj']['path'], lower_left_corner=arcpy.Point( XMin, YMin), nrows=rws, ncols=cls) rg = arcpy.RasterToNumPyArray( in_raster= 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\sa\\r2\\s22\\core\\core_s22.gdb\\s22_mtr4_id', lower_left_corner=arcpy.Point(XMin, YMin), nrows=rws, ncols=cls) # ##find the location of each pixel labeled with specific arbitray value in the rows list # print np.unique(rg) for patch in np.unique(rg)[1:]: "print test---" print '---------------patch-----out------------------', patch # #Return the indices of the pixels that have values of the ytc arbitrsy values of the traj. indices = (rg == patch).nonzero() # print indices # #stack indices so easier to work with stacked_indices = np.column_stack((indices[0], indices[1])) patch_list = [] #get the x and y location of each pixel that has been selected from above for pixel_location in stacked_indices: row = pixel_location[0] col = pixel_location[1] patch_list.append(arr_traj[row][col]) keep = ~np.isnan(patch_list) y = np.bincount(keep) ii = np.nonzero(y)[0] yo = np.vstack((ii, y[ii])).T
def create_input_nc(start_date, years, cellsize, basin_shp, p_path, et_path, eto_path, lai_path, swi_path, swio_path, swix_path, qratio_path, rainydays_path, thetasat_ras, rootdepth_ras, input_nc, epsg=4326, bbox=None): """ Creates the input netcdf file required to run waterpix """ # Script parameters print "Variable\tRaster" arcpy.CheckOutExtension('spatial') if bbox: latlim = [bbox[1], bbox[3]] lonlim = [bbox[0], bbox[2]] else: shp_extent = arcpy.Describe(basin_shp).extent latlim = [shp_extent.YMin, shp_extent.YMax] lonlim = [shp_extent.XMin, shp_extent.XMax] arcpy.env.extent = arcpy.Extent(lonlim[0], latlim[0], lonlim[1], latlim[1]) arcpy.env.cellSize = cellsize time_range = pd.date_range(start_date, periods=12 * years, freq='MS') time_ls = [d.strftime('%Y%m') for d in time_range] time_dt = [pd.to_datetime(i, format='%Y%m') for i in time_ls] time_n = len(time_ls) years_ls = set() years_ls = [ i.year for i in time_dt if i.year not in years_ls and not years_ls.add(i.year) ] time_indeces = {} for j, item in enumerate(years_ls): temp_ls = [ int(i.strftime('%Y%m')) for i in pd.date_range( str(item) + '0101', str(item) + '1231', freq='MS') ] time_indeces[item] = [time_ls.index(str(i)) for i in temp_ls] for key in time_indeces.keys(): if time_indeces[key] != range(time_indeces[key][0], time_indeces[key][-1] + 1): raise Exception('The year {0} in the netcdf file is incomplete' ' or the dates are non-consecutive') all_paths = { 'p': p_path, 'et': et_path, 'eto': eto_path, 'lai': lai_path, 'swi': swi_path, 'swio': swio_path, 'swix': swix_path, 'qratio': qratio_path, 'rainydays': rainydays_path } # Latitude and longitude lat_ls = pd.np.arange(latlim[0] + 0.5 * cellsize, latlim[1] + 0.5 * cellsize, cellsize) lat_ls = lat_ls[::-1] # ArcGIS numpy lon_ls = pd.np.arange(lonlim[0] + 0.5 * cellsize, lonlim[1] + 0.5 * cellsize, cellsize) lat_n = len(lat_ls) lon_n = len(lon_ls) spa_ref = arcpy.SpatialReference(epsg) projection = spa_ref.exportToString() ll_corner = arcpy.Point(lonlim[0], latlim[0]) # Snap raster temp_ras = arcpy.NumPyArrayToRaster(pd.np.zeros((lat_n, lon_n)), ll_corner, cellsize, cellsize) scratch_ras = arcpy.CreateScratchName('ras_', '.tif', '', arcpy.env.scratchFolder) temp_ras.save(scratch_ras) arcpy.management.DefineProjection(scratch_ras, spa_ref) arcpy.env.snapRaster = scratch_ras # Basin mask basin_ras = arcpy.CreateScratchName('bas_', '.tif', '', arcpy.env.scratchFolder) buff_shp = arcpy.CreateScratchName('bas_', '.shp', '', arcpy.env.scratchFolder) arcpy.analysis.Buffer(basin_shp, buff_shp, 2 * cellsize, 'FULL', 'ROUND', 'NONE', '#', 'PLANAR') arcpy.conversion.FeatureToRaster(buff_shp, "FID", basin_ras, cellsize) # Create NetCDF file nc_file = netCDF4.Dataset(input_nc, 'w', format="NETCDF4") nc_file.set_fill_on() # Create dimensions lat_dim = nc_file.createDimension('latitude', lat_n) lon_dim = nc_file.createDimension('longitude', lon_n) month_dim = nc_file.createDimension('time_yyyymm', time_n) year_dim = nc_file.createDimension('time_yyyy', len(years_ls)) # Create NetCDF variables crs_var = nc_file.createVariable('crs', 'i', (), fill_value=-9999) crs_var.standard_name = 'crs' crs_var.grid_mapping_name = 'latitude_longitude' crs_var.crs_wkt = projection lat_var = nc_file.createVariable('latitude', 'f8', ('latitude'), fill_value=-9999) lat_var.units = 'degrees_north' lat_var.standard_name = 'latitude' lon_var = nc_file.createVariable('longitude', 'f8', ('longitude'), fill_value=-9999) lon_var.units = 'degrees_east' lon_var.standard_name = 'longitude' month_var = nc_file.createVariable('time_yyyymm', 'l', ('time_yyyymm'), fill_value=-9999) month_var.standard_name = 'time' month_var.format = 'YYYYMM' year_var = nc_file.createVariable('time_yyyy', 'l', ('time_yyyy'), fill_value=-9999) year_var.standard_name = 'time' year_var.format = 'YYYY' # Variables p_var = nc_file.createVariable('Precipitation_M', 'f8', ('time_yyyymm', 'latitude', 'longitude'), fill_value=-9999) p_var.long_name = 'Precipitation' p_var.units = 'mm/month' py_var = nc_file.createVariable('Precipitation_Y', 'f8', ('time_yyyy', 'latitude', 'longitude'), fill_value=-9999) py_var.long_name = 'Precipitation' py_var.units = 'mm/year' et_var = nc_file.createVariable('Evapotranspiration_M', 'f8', ('time_yyyymm', 'latitude', 'longitude'), fill_value=-9999) et_var.long_name = 'Evapotranspiration' et_var.units = 'mm/month' ety_var = nc_file.createVariable('Evapotranspiration_Y', 'f8', ('time_yyyy', 'latitude', 'longitude'), fill_value=-9999) ety_var.long_name = 'Evapotranspiration' ety_var.units = 'mm/year' eto_var = nc_file.createVariable('ReferenceET_M', 'f8', ('time_yyyymm', 'latitude', 'longitude'), fill_value=-9999) eto_var.long_name = 'Reference Evapotranspiration' eto_var.units = 'mm/month' lai_var = nc_file.createVariable('LeafAreaIndex_M', 'f8', ('time_yyyymm', 'latitude', 'longitude'), fill_value=-9999) lai_var.long_name = 'Leaf Area Index' lai_var.units = 'm2/m2' swi_var = nc_file.createVariable('SWI_M', 'f8', ('time_yyyymm', 'latitude', 'longitude'), fill_value=-9999) swi_var.long_name = 'Soil Water Index - Monthly mean' swi_var.units = '%' swio_var = nc_file.createVariable('SWIo_M', 'f8', ('time_yyyymm', 'latitude', 'longitude'), fill_value=-9999) swio_var.long_name = 'Soil water index - First day of the month' swio_var.units = '%' swix_var = nc_file.createVariable('SWIx_M', 'f8', ('time_yyyymm', 'latitude', 'longitude'), fill_value=-9999) swix_var.long_name = 'Soil water index - Last day of the month' swix_var.units = '%' qratio_var = nc_file.createVariable('RunoffRatio_Y', 'f8', ('time_yyyy', 'latitude', 'longitude'), fill_value=-9999) qratio_var.long_name = 'Runoff ratio' qratio_var.units = '-' rainydays_var = nc_file.createVariable( 'RainyDays_M', 'f8', ('time_yyyymm', 'latitude', 'longitude'), fill_value=-9999) rainydays_var.long_name = 'Number of rainy days per month' rainydays_var.units = 'No. rainy days/month' thetasat_var = nc_file.createVariable('SaturatedWaterContent', 'f8', ('latitude', 'longitude'), fill_value=-9999) thetasat_var.long_name = 'Saturated water content (top soil)' thetasat_var.units = 'cm3/cm3' rootdepth_var = nc_file.createVariable('RootDepth', 'f8', ('latitude', 'longitude'), fill_value=-9999) rootdepth_var.long_name = 'Root depth' rootdepth_var.units = 'mm' basinmask_var = nc_file.createVariable('BasinBuffer', 'l', ('latitude', 'longitude'), fill_value=0) basinmask_var.long_name = 'Basin buffer' # Load data lat_var[:] = lat_ls lon_var[:] = lon_ls month_var[:] = time_ls year_var[:] = years_ls # Static variables temp_dir = tempfile.mkdtemp() # Theta sat print "{0}\t{1}".format('thetasat', thetasat_ras) thetasat_temp = os.path.join(temp_dir, 'thetasat.tif') arcpy.management.Resample(thetasat_ras, thetasat_temp, cellsize) inp_ras = arcpy.Raster(thetasat_temp) array = arcpy.RasterToNumPyArray(inp_ras, ll_corner, lon_n, lat_n, -9999) thetasat_var[:, :] = array[:, :] # Root depth print "{0}\t{1}".format('rootdepth', rootdepth_ras) rootdepth_temp = os.path.join(temp_dir, 'rootdepth.tif') arcpy.management.Resample(rootdepth_ras, rootdepth_temp, cellsize) inp_ras = arcpy.Raster(rootdepth_temp) array = arcpy.RasterToNumPyArray(inp_ras, ll_corner, lon_n, lat_n, -9999) rootdepth_var[:, :] = array[:, :] # Basin mask inp_ras = arcpy.sa.Con(arcpy.sa.IsNull(arcpy.Raster(basin_ras)), 0, 1) array = arcpy.RasterToNumPyArray(inp_ras, ll_corner, lon_n, lat_n, -9999) basinmask_var[:, :] = array[:, :] # Dynamic variables for var in ['p', 'et', 'eto', 'lai', 'swi', 'swio', 'swix', 'rainydays']: # Make temp directory temp_dir2 = tempfile.mkdtemp() for yyyymm in time_ls: yyyy = yyyymm[:4] mm = yyyymm[-2:] ras = all_paths[var].format(yyyy=yyyy, mm=mm) print "{0}\t{1}".format(var, ras) arcpy.management.Resample( ras, os.path.join(temp_dir2, os.path.basename(ras)), cellsize, 'NEAREST') inp_ras = arcpy.Raster( os.path.join(temp_dir2, os.path.basename(ras))) array = arcpy.RasterToNumPyArray(inp_ras, ll_corner, lon_n, lat_n, pd.np.nan) t_index = time_ls.index(yyyymm) exec('{0}_var[t_index, :, :] = array[:, :]'.format(var)) # Runoff ratio temp_dir2 = tempfile.mkdtemp() for yyyy in years_ls: ras = all_paths['qratio'].format(yyyy=yyyy) print "{0}\t{1}".format('qratio', ras) arcpy.management.Resample( ras, os.path.join(temp_dir2, os.path.basename(ras)), cellsize, 'NEAREST') inp_ras = arcpy.Raster(os.path.join(temp_dir2, os.path.basename(ras))) array = arcpy.RasterToNumPyArray(inp_ras, ll_corner, lon_n, lat_n, pd.np.nan) y_index = years_ls.index(yyyy) qratio_var[y_index, :, :] = array[:, :] # Calculate yearly rasters for yyyy in years_ls: yyyyi = years_ls.index(yyyy) ti1 = time_indeces[yyyy][0] ti2 = time_indeces[yyyy][-1] + 1 py_var[yyyyi, :, :] = pd.np.sum(p_var[ti1:ti2, :, :], axis=0) ety_var[yyyyi, :, :] = pd.np.sum(et_var[ti1:ti2, :, :], axis=0) # Close file arcpy.env.extent = None arcpy.env.snapRaster = None arcpy.env.cellSize = 'MAXOF' nc_file.close() # Return return input_nc
def execute_task(args): arcpy.env.workspace = 'C:\\Users\\Bougie\\Desktop\\Gibbs\\data\\usxp\\sa\\r2\\s21\\core\\core_s21.gdb' in_extentDict = args cond = 'Value <> 4' fc_count = in_extentDict[0] # print fc_count procExt = in_extentDict[1] # print procExt XMin = procExt[0] YMin = procExt[1] XMax = procExt[2] YMax = procExt[3] path_mtr = Raster('s21_v5_traj_cdl30_b_2008to2010_rfnd_v4_n8h_mtr_8w_mmu5') print path_mtr #set environments arcpy.env.snapRaster = path_mtr arcpy.env.cellsize = path_mtr arcpy.env.extent = arcpy.Extent(XMin, YMin, XMax, YMax) ## Execute the three functions ##################### raster_1 = 's21_v5_traj_cdl30_b_2008to2010_rfnd_v4_n8h_mtr_8w_mmu5' mask_1 = SetNull(raster_1, '2009', cond) raster_2 = 's21_v5_traj_cdl30_b_2008to2011_rfnd_v4_n8h_mtr_8w_mmu5' mask_2 = SetNull(raster_2, '2010', cond) raster_3 = 's21_v5_traj_cdl30_b_2009to2012_rfnd_v4_n8h_mtr_8w_mmu5' mask_3 = SetNull(raster_3, '2011', cond) raster_4 = 's21_v5_traj_cdl30_b_2010to2013_rfnd_v4_n8h_mtr_8w_mmu5' mask_4 = SetNull(raster_4, '2012', cond) raster_5 = 's21_v5_traj_cdl30_b_2011to2014_rfnd_v4_n8h_mtr_8w_mmu5' mask_5 = SetNull(raster_5, '2013', cond) raster_6 = 's21_v5_traj_cdl30_b_2012to2015_rfnd_v4_n8h_mtr_8w_mmu5' mask_6 = SetNull(raster_6, '2014', cond) raster_7 = 's21_v5_traj_cdl30_b_2013to2016_rfnd_v4_n8h_mtr_8w_mmu5' mask_7 = SetNull(raster_7, '2015', cond) raster_8 = 's21_v5_traj_cdl30_b_2014to2017_rfnd_v4_n8h_mtr_8w_mmu5' mask_8 = SetNull(raster_8, '2016', cond) #clear out the extent for next time arcpy.ClearEnvironment("extent") # print fc_count outname = "tile_" + str(fc_count) + '.tif' filelist = [mask_8, mask_7, mask_6, mask_5, mask_4, mask_3, mask_2, mask_1] print 'filelist:', filelist folder = "C:/Users/Bougie/Desktop/Gibbs/tiles" outname = "tile_" + str(fc_count) + '.tif' ##### mosaicRasters(): arcpy.MosaicToNewRaster_management(filelist, folder, outname, path_mtr.spatialReference, '16_BIT_UNSIGNED', 30, "1", "LAST", "FIRST")