def copyRas(): arcpy.CopyRaster_management(mosaicRaster, outputRaster)
######################################################################################################################## # CREATE HEATMAPS # of speedlimit(for Seattle), raw AADT (for Seattle), functional class-based AADT (for Puget Sound) and # BING (see Bing_format.py) # Use a decay function to 'simulate' the pollution spread of various levels of traffic volume, speed, and congestion ######################################################################################################################## #Seattle SPEED LIMIT arcpy.env.snapRaster = template_ras arcpy.PolylineToRaster_conversion(roadstraffic_avg, value_field='SPEEDLIMIT', out_rasterdataset='Seattle_spdlm', priority_field='SPEEDLIMIT',cellsize=restemplate) heat_spdlm = FocalStatistics(os.path.join(gdb,'Seattle_spdlm'), neighborhood=NbrWeight('C:/Mathis/ICSL/stormwater/results/logkernel100.txt'), statistics_type='SUM', ignore_nodata='DATA') #It seems that full paths are needed to make this work heat_spdlm.save('heat_spdlm') heat_spdlm_int = Int(Raster('heat_spdlm')+0.5) #Constantly result in overall python crash? heat_spdlm_int.save('heat_spdlm_int') arcpy.CopyRaster_management('heat_spdlm_int', os.path.join(rootdir, 'results/heatspdlm_int')) #Seattle AADT arcpy.PolylineToRaster_conversion(roadstraffic_avg, value_field='AADT_interp', out_rasterdataset='Seattle_AADT', priority_field='AADT_interp',cellsize=restemplate) customheatmap(kernel_dir=os.path.join(rootdir, 'results/bing'), in_raster=os.path.join(gdb, 'Seattle_AADT'), out_gdb = gdb, out_var='AADT', divnum=100, keyw='') #OSM functional class-based AADT arcpy.PolylineToRaster_conversion(PSOSM_all, value_field='fclassADT', out_rasterdataset=OSM_AADT, priority_field='fclassADT',cellsize=restemplate) customheatmap(kernel_dir=os.path.join(rootdir, 'results/bing'), in_raster=OSM_AADT, out_gdb = PSgdb, out_var='OSMAADT', divnum=100, keyw='') #OSM functional class-based SPEED LIMIT arcpy.PolylineToRaster_conversion(PSOSM_all, value_field='fclassSPD', out_rasterdataset=OSM_SPD,
def tail(): tupDateNow = datetime.now() while(1): # buka file csv untuk mengetahui scene yang telah selesai diproses #arcpy.env.workspace = config.gdbPath log = pd.read_csv("logComplete.csv") liScene = log["scene"].tolist() liDate = log["dateComplete"].tolist() msg = str(datetime.now()) + '\t' + "Importing Library ... \n" redis.rpush(config.MESSAGES_KEY, msg) redis.publish(config.CHANNEL_NAME, msg) arcpy.CheckOutExtension("spatial") # pass list yang telah selesai ke ftp download filenameNow, scene, boolScene, year, month = ft.downloadFile(liScene) del log del liScene del liDate if(boolScene == False): print "Data hari ini selesai diproses" tupDateLoop = datetime.now() while (tupDateNow.day == tupDateLoop.day): print "menunggu hari berganti :)" time.sleep(10) tupDateLoop = datetime.now() tupDateNow = tupDateLoop print "hari telah berganti" #definisikan nama file yang akan diproses filename = filenameNow # definisikan nama file keluaran hasil klasifikasi yang masih mentah filenameOut = filenameNow + "_classified.TIF" # definisikan letak file ers yang telah didownload dalam workstation dataPath = config.dataPath + scene + "/" + filename # definisikan letak model .pkl hasil training data sampel modelPath = config.modelPath # definisikan shp file indonesia untuk cropping batas administrasi shpPath = config.shpPath # definisikan folder keluaran hasil proses outFolder = config.outputPath + filename.split(".")[0] # jika folder ada maka hapus if(os.path.exists(outFolder)): shutil.rmtree(outFolder) # buat folder yang telah didefinisikan os.makedirs(outFolder) # definisikan path file keluaran outputPath = outFolder + "/" + filenameOut ##################### KONVERSI DATA ERS KE TIAP BAND ###################################### print ("converting b3") if(os.path.exists(dataPath + "TOA_B3" + ".TIF")): os.remove(dataPath + "TOA_B3" + ".TIF") # Ambil hanya band 3 dan jadikan raster try: b_green = arcpy.Raster( dataPath + "/B3" ) * 1.0 except : b_green = arcpy.Raster( dataPath + "/Band_3" ) * 1.0 print ("saving b3") msg = str(datetime.now()) + '\t' + "saving b3 \n" redis.rpush(config.MESSAGES_KEY, msg) redis.publish(config.CHANNEL_NAME, msg) # save raster band 3 ke folder data input b_green.save(dataPath + "TOA_B3" + ".TIF" ) del b_green print ("converting b5") if(os.path.exists(dataPath + "TOA_B5" + ".TIF")): os.remove(dataPath + "TOA_B5" + ".TIF") # Ambil hanya band 5 dan jadikan raster try: b_nir = arcpy.Raster( dataPath + "/B5" ) * 1.0 except: b_nir = arcpy.Raster( dataPath + "/Band_5" ) * 1.0 print ("saving b5") msg = str(datetime.now()) + '\t' + "saving b5 \n" redis.rpush(config.MESSAGES_KEY, msg) redis.publish(config.CHANNEL_NAME, msg) # save raster band 5 ke folder data input b_nir.save( dataPath + "TOA_B5" + ".TIF" ) del b_nir print ("converting b6") if(os.path.exists(dataPath + "TOA_B6" + ".TIF")): os.remove(dataPath + "TOA_B6" + ".TIF") # Ambil hanya band 6 dan jadikan raster try: b_swir1 = arcpy.Raster( dataPath + "/B6") * 1.0 except: b_swir1 = arcpy.Raster( dataPath + "/Band_6") * 1.0 msg = str(datetime.now()) + '\t' + "saving b6 \n" redis.rpush(config.MESSAGES_KEY, msg) redis.publish(config.CHANNEL_NAME, msg) print ("saving b6") # save raster band 6 ke folder data input b_swir1.save( dataPath + "TOA_B6" + ".TIF" ) del b_swir1 ####################### SELESAI KONVERSI DATA ####################################### #################### UBAH RASTER KE FORMAT DATAFRAME ############################### msg = str(datetime.now()) + '\t' + "Processing file "+filename+"\n" redis.rpush(config.MESSAGES_KEY, msg) redis.publish(config.CHANNEL_NAME, msg) # load semua raster yang telah dikonversi diawal rasterarrayband6 = arcpy.RasterToNumPyArray(dataPath + "TOA_B3.TIF") rasterarrayband6 = np.array(rasterarrayband6, dtype=np.uint32) rasterarrayband5 = arcpy.RasterToNumPyArray(dataPath + "TOA_B5.TIF") rasterarrayband5 = np.array(rasterarrayband5, dtype=np.uint32) rasterarrayband3 = arcpy.RasterToNumPyArray(dataPath + "TOA_B6.TIF") rasterarrayband3 = np.array(rasterarrayband3, dtype=np.uint32) print rasterarrayband6.dtype print("Change raster format to numpy array") # gabungkan 3 array data secara horizontal data = np.array([rasterarrayband6.ravel(), rasterarrayband5.ravel(), rasterarrayband3.ravel()], dtype=np.int16) # ubah menjadi vertikal untuk kebutuhan prediksi .pkl data = data.transpose() # langsung hapus variabel yang tidak digunakan lagi del rasterarrayband6 del rasterarrayband5 del rasterarrayband3 print("Change to dataframe format") msg = str(datetime.now()) + '\t' + "Change to dataframe format \n" redis.rpush(config.MESSAGES_KEY, msg) redis.publish(config.CHANNEL_NAME, msg) #time.sleep(1) # definisikan nama kolom dataframe columns = ['band3','band5', 'band6'] # ubah array vertical menjadi dataframe df = pd.DataFrame(data, columns=columns) # hapus array vertikal del data ###################### SELESAI #################################################### print("Split data to 20 chunks ") msg = str(datetime.now()) + '\t' + "Split data to 20 chunks \n" redis.rpush(config.MESSAGES_KEY, msg) redis.publish(config.CHANNEL_NAME, msg) #time.sleep(1) # bagi data menjadi 20 bagian karena program tidak kuat prediksi sekaligus df_arr = np.array_split(df, 20) # hapus dataframe del df # load classifier (model pkl) yang telah di train clf = joblib.load(modelPath) # definisikan array untuk menampung nilai integer hasil prediksi kelasAll = [] # ulangi untuk setiap bagian data for i in range(len(df_arr)): print ("predicting data chunk-%s\n" % i) msg = str(datetime.now()) + '\t' + "predicting data chunk-%s\n" % i redis.rpush(config.MESSAGES_KEY, msg) redis.publish(config.CHANNEL_NAME, msg) msg2 = i redis.rpush(config.MESSAGES_KEY_2, msg2) redis.publish(config.CHANNEL_NAME_2, msg2) #time.sleep(1) # fungi untuk prediksi data baru dengan data ke i kelas = clf.predict(df_arr[i]) # buat dataframe kosong dat = pd.DataFrame() # masukkan hasil prediksi data ke i ke kolom kelas dat['kel'] = kelas print ("mapping to integer class") msg = str(datetime.now()) + '\t' + "mapping to integer class \n" redis.rpush(config.MESSAGES_KEY, msg) redis.publish(config.CHANNEL_NAME, msg) #time.sleep(1) # definisikan dictionary untuk ubah string kelas ke integer kelas prediksi mymap = {'awan':1, 'air':2, 'tanah':3, 'vegetasi':4} # fungsi map dengan parameter dictionary dat['kel'] = dat['kel'].map(mymap) # ubah kolom dataframe ke array band1Array = dat['kel'].values # ubah array ke numpy array dengan tipe unsigned 8 untuk menghindari memory error band1Array = np.array(band1Array, dtype = np.uint8) print ("extend to list") msg = str(datetime.now()) + '\t' + "extend to list \n" redis.rpush(config.MESSAGES_KEY, msg) redis.publish(config.CHANNEL_NAME, msg) #time.sleep(1) #kelasAllZeros[] = band1Array # masukkan numpy aray ke list prediksi kelasAll.extend(band1Array.tolist()) # mencoba cek array hasil prediksi print(kelasAll[1:10]) # hapus semua variabel yang tidak digunakan lagi del df_arr del clf del kelas del dat del band1Array print ("change list to np array") msg = str(datetime.now()) + '\t' + "change list to np array \n" redis.rpush(config.MESSAGES_KEY, msg) redis.publish(config.CHANNEL_NAME, msg) # ubah list prediksi ke numpy array kelasAllArray = np.array(kelasAll, dtype=np.uint8) # hapus list prediksi del kelasAll print ("reshaping np array") msg = str(datetime.now()) + '\t' + "reshaping np array \n" redis.rpush(config.MESSAGES_KEY, msg) redis.publish(config.CHANNEL_NAME, msg) rasterarrayband6 = arcpy.RasterToNumPyArray(dataPath + "TOA_B3.TIF") # reshape numpy array 1 dimensi ke dua dimensi sesuai format raster band1 = np.reshape(kelasAllArray, (-1, rasterarrayband6[0].size)) # ubah tipe data ke unsigned integer band1 = band1.astype(np.uint8) del rasterarrayband6 # load raster band6 untuk kebutuhan projeksi dan batas batas raster raster = arcpy.Raster(dataPath + "TOA_B6.TIF") inputRaster = dataPath + "TOA_B6.TIF" # ambil referensi spatial spatialref = arcpy.Describe(inputRaster).spatialReference # ambil tinggi dan lebar raster cellsize1 = raster.meanCellHeight cellsize2 = raster.meanCellWidth # definisikan extent dari raster dan point dari extent extent = arcpy.Describe(inputRaster).Extent pnt = arcpy.Point(extent.XMin,extent.YMin) # hapus yang tidak dipakai lagi del raster del kelasAllArray # save the raster print ("numpy array to raster ..") msg = str(datetime.now()) + '\t' + "numpy array to raster .. \n" redis.rpush(config.MESSAGES_KEY, msg) redis.publish(config.CHANNEL_NAME, msg) # ubah numpy array ke raster dengan atribut yang telah didefinisikan out_ras = arcpy.NumPyArrayToRaster(band1, pnt, cellsize1, cellsize2) arcpy.CheckOutExtension("Spatial") print ("define projection ..") msg = str(datetime.now()) + '\t' + "define projection ..\n" redis.rpush(config.MESSAGES_KEY, msg) redis.publish(config.CHANNEL_NAME, msg) # simpan raster hasil konversi ke path yang telah didefinisikan arcpy.CopyRaster_management(out_ras, outputPath) # definisikan projeksi dengan referensi spatial arcpy.DefineProjection_management(outputPath, spatialref) print ("Majority Filter..") msg = str(datetime.now()) + '\t' + "majority filter..\n" redis.rpush(config.MESSAGES_KEY, msg) redis.publish(config.CHANNEL_NAME, msg) #overwriteoutputPath outputPath enable arcpy.env.workspace = config.outputPath arcpy.env.overwriteOutput = True #majority filter arcpy.CheckOutExtension("Spatial") outMajFilt = MajorityFilter(outputPath, "FOUR", "MAJORITY") #Save the output outMajFilt.save(outputPath) # hapus yang tidak digunakan lagi del out_ras del band1 del spatialref del cellsize1 del cellsize2 del extent del pnt ########################### MASKING CLOUD AND BORDER ######################### print("Masking Cloud") # load file cm hasil download yang disediakan mask = Raster(os.path.dirname(dataPath) + "/" + filename.split(".")[0] + "_cm.ers") # load raster hasil klasifikasi mentah inRas = Raster(outputPath) # jika file cm bernilai 1 = cloud, 2 = shadow, 11 = border # ubah nilai tersebut menjadi 1 dan lainnya menjadi 0 #inRas_mask = Con((mask == 1), 1, Con((mask == 2), 1, Con((mask == 11), 1, 0))) inRas_mask = Con((mask == 1), 1, Con((mask == 2), 1, Con((mask == 11), 1, Con((mask == 3), 1, Con((mask == 4), 1, Con((mask == 5), 1, Con((mask == 6), 1, Con((mask == 7), 1, 0)))))))) # buat raster yang merupakan nilai no data dari hasil kondisi diatas, hasilnya nodata = 1 # saya juga tidak mengerti yang bukan cloud jadi no data mask2 = IsNull(inRas_mask) # jika raster bernilai 1 maka ubah jadi 0, jika tidak tetap nilai raster hasil kondisi inRas2 = Con((mask2 == 1), 0, inRas_mask) # simpan raster pure dimana semua nilai 1 akan dihilangkan dari hasil klasifikasi inRas2.save(os.path.dirname(outputPath) + "/" + filenameOut.split(".")[0] + "_mask.TIF") # jika raster bernilai 1 maka jadi no data, jika tidak maka tetap si raster hasil awal inRas_mask2 = SetNull(inRas2 == 1, inRas) # simpan raster yang telah bersih dari cloud dan border yang jelek inRas_mask2.save(os.path.dirname(outputPath) + "/" + filenameOut.split(".")[0] + "_maskCloud.TIF") # hapus variabel conditional yang tidak digunakan lagi del mask del mask2 del inRas del inRas2 del inRas_mask del inRas_mask2 ############################## SELESAI ########################################### ####################### MASKING DENGAN SHP INDONESIA ############################## print("Masking with shp indonesia") arcpy.CheckOutExtension("Spatial") # buka file shp indonesia inMaskData = os.path.join(shpPath, "INDONESIA_PROP.shp") # buka raster hasil masking cloud dan border inRasData = Raster(os.path.dirname(outputPath) + "/" + filenameOut.split(".")[0] + "_maskCloud.TIF") # terapkan fungsi masking dengan shapefile try: outExtractByMask = ExtractByMask(inRasData, inMaskData) print("Saving in: " + str(os.path.dirname(outputPath) + "/" + filenameOut.split(".")[0] + "_maskShp.TIF")) # simpan hasil masking outExtractByMask.save(os.path.dirname(outputPath) + "/" + filenameOut.split(".")[0] + "_maskShp.TIF") finalPath = config.finalOutputPath + year + "/" + month + "/" + filenameNow.split(".")[0] print finalPath if( os.path.exists(finalPath) ): shutil.rmtree(finalPath) os.makedirs(finalPath) arcpy.CopyRaster_management( outExtractByMask, finalPath + "/" + filenameOut) # hapus lagi dan lagi variabel yang tidak digunakan del inMaskData del inRasData del outExtractByMask except: print "diluar indonesia shp" finalPath = config.finalOutputPath + year + "/" + month + "/" + filenameNow.split(".")[0] print finalPath if( os.path.exists(finalPath) ): shutil.rmtree(finalPath) os.makedirs(finalPath) arcpy.CopyRaster_management( inRasData, finalPath + "/" + filenameOut) pass ########################## SELESAI ################################################ ####################### SAVE LOG DATA YANG TELAH SELESAI DIPROSES ######################################## log = pd.read_csv("logComplete.csv") liScene = log["scene"].tolist() liDate = log["dateComplete"].tolist() liScene.append(scene) liDate.append(str(datetime.now())) print(liScene) print(liDate) serScene = pd.Series(liScene) serDate = pd.Series(liDate) print(serScene) print(serDate) log2 = pd.DataFrame() log2["scene"] = serScene log2["dateComplete"] = serDate print(log2.head(5)) log2.to_csv("logComplete.csv", index=False) del liScene del liDate del serScene del serDate del log del log2 ########################################################################################################## # delete downloaded data in workstation dataFolder = os.listdir(config.dataPath) print dataFolder if(len(dataFolder) > 1): print config.dataPath + dataFolder[0] shutil.rmtree(config.dataPath + dataFolder[0]) hasilFolder = os.listdir(config.outputPath) print hasilFolder if(len(hasilFolder) > 1): print config.outputPath + hasilFolder[0] shutil.rmtree(config.outputPath + hasilFolder[0]) print ("Finished ..") msg = str(datetime.now()) + '\t' + "Finished ... \n" redis.rpush(config.MESSAGES_KEY, msg) redis.publish(config.CHANNEL_NAME, msg) redis.delete(config.MESSAGES_KEY) redis.delete(config.MESSAGES_KEY_2) # local variable to list dictLocal = locals() # delete all local variable, hope will free some space for key in dictLocal.keys(): del key clear_all() # bersih bersih lainnya gc.collect() #shutil.rmtree(config.gdbPath) arcpy.Delete_management(config.gdbPathDefault) arcpy.Delete_management("in_memory") arcpy.env.overwriteOutput = True
if not os.path.exists(sweDir): os.makedirs(sweDir) snwpkDir = os.path.join(outputDir, 'snwpk') if not os.path.exists(snwpkDir): os.makedirs(snwpkDir) print(jjdate + ' is processing in output folder ' + str(outputDir)) ppt3 = Raster(ppt1) ppt3.save(os.path.join(pptDir, 'ppt' + str(jjdate))) # + '.tif') ppt = os.path.join(pptDir, 'ppt' + str(jjdate)) # + '.tif' arcpy.env.snapRaster = os.path.join(workDir, 'USA_data', 'NDVI_snapgrid_1km.tif') print('Effective PPT = ' + str(ppt) + '* (1 - (Igrid/100)') effppt = ppt * (1 - (Igrid / 100.0)) arcpy.CopyRaster_management(effppt, os.path.join(effDir, 'effppt' + str(jjdate)), "#", "#", "#", "NONE","NONE", "#") print('Intercepted PPT = ' + str(ppt) + '* (Igrid/100)') intcep = ppt * Float(Igrid / 100.0) arcpy.env.compression = 'LZW' arcpy.CopyRaster_management(intcep, os.path.join(intcepDir, 'intcep' + str(jjdate)), "#", "#", "#", "NONE","NONE", "#") print('Created eff Rainfall and Interception') log_file.write('Created eff Rainfall and Interception' + '\n') log_file.write('\n') # Snowpack print('Snow component of Precipitation....' + '\n') print('Calculating what portion is rain or snow based on average temperature') snow_melt_fac = arcpy.sa.Con(tavg <= 6, 0, melt_rate) snow_melt_fac.save(os.path.join(snwpkDir, 'swmeltfac' + str(jjdate) + '.tif'))
arcpy.AddMessage("Image Max value=" + str(imageMax)) imageSTDEV = float( arcpy.GetRasterProperties_management(inRaster, "STD").getOutput(0)) arcpy.AddMessage("Image STDEV value=" + str(imageSTDEV)) # Process: calculates a linear stretch with right tail trim arcpy.AddMessage("Enhancing image.....") enhImage = ((myRasterObj - imageMin) / ( (imageMean + float(No_STDEV) * imageSTDEV) - imageMin)) * 255 enhImage.save(enhImage1) # Process: Trimming vales >255 and converting to tif arcpy.AddMessage("Converting to tif for final output...") outCon1 = Con(enhImage1, enhImage1, 255, "VALUE < 255") outCon1.save(enhImage1) arcpy.CopyRaster_management(enhImage1, outputRaster, "", "", "", "NONE", "NONE", "8_BIT_UNSIGNED") except arcpy.ExecuteError: #Return Geoprocessing tool specific errors line, filename, err = trace() arcpy.AddError("Geoprocessing error on " + line + " of " + filename + " :") for msg in range(0, arcpy.GetMessageCount()): if arcpy.GetSeverity(msg) == 2: arcpy.AddReturnMessage(msg) except: #Returns Python and non-tool errors line, filename, err = trace() arcpy.AddError("Python error on " + line + " of " + filename) arcpy.AddError(err)
#You can use the rasters in the Exercise09 folder as an example. import arcpy from arcpy import env #Overwrite in case it fails arcpy.env.overwriteOutput = True #Work Area env.workspace = "C:\EsriPress\Python\Data\Exercise09" #Path for my new GDB Out_Raster = ("C:\EsriPress\Python\Data\Exercise09\MyRadRasters.gdb") #Creates GDB arcpy.CreateFileGDB_management("C:\EsriPress\Python\Data\Exercise09", "MyRadRasters.gdb") #variable that looks for all rasters and lists them listras = arcpy.ListRasters( ) # the parenthesis is empyt because I want to copy all files no matter the type. #Loop that takes the list of rasters and copys them to new GDB for raster in listras: arcpy.CopyRaster_management( raster, Out_Raster + '/' + arcpy.Describe(raster).basename, "Defaults") #here is exactly where the copying happens print " Task complete! " #
for jpg in jpgs: # Output geotiff name, inputName_suffix output_name = arcpy.Describe(jpg).baseName + "_copyRaster.tif" # Execute the CopyRaster tool # Check if jpg is 3-band RGB or single-band grayscale if arcpy.Describe(jpg).bandCount != 3: RGB_to_Colormap = "NONE" else: RGB_to_Colormap = "RGBToColormap" arcpy.CopyRaster_management(in_raster=jpg, out_rasterdataset=os.path.join( workspace, output_name), config_keyword="", background_value="", nodata_value="256", onebit_to_eightbit="NONE", colormap_to_RGB="NONE", pixel_type="", scale_pixel_value="NONE", RGB_to_Colormap=RGB_to_Colormap, format="TIFF", transform="NONE") # Print messages when the tool runs successfully print(arcpy.GetMessages(0)) except arcpy.ExecuteError: print(arcpy.GetMessages(2)) except Exception as ex: print(ex.args[0])
### D - LOAD FILLER OUTLINE MASK for the filling process. # NOTE: This is a blank raster that is used for filling "missing data" # in each daily raster. # Make the raster file path: filling_mask_rasterfile = os.path.join(coreobjectpath, r'fillraster_resampled.tif') # Load filling mask raster, copy into global scratch memory. arcpy.MakeRasterLayer_management(filling_mask_rasterfile, r'fillingrasterlayer') # Put in scratch environment, since we may want to clear IN_MEMORY environment: arcpy.CopyRaster_management( r'fillingrasterlayer', os.path.join(scratch_gdb, 'fillingrasterlayer')) ### E - MAIN LOOP: PROCESSES ALL DAILY FILES FOR EACH YEAR. startyear = 1898 # Make iterable for the year range of the project. yearlist_iterable = iter(range(startyear, endyear)) # Loop over each year. for fileyear in yearlist_iterable: # Skip forward if the year is 1899. try: # E.1 - Grab list of full tiff files for current year of loop. annual_raster_list = getrasters(fileyear, all_raster_list)
arcpy.env.overwriteOutput = True # think of as the input directory feature_dir = r'M:\git-annex\globalprep\prs_oa\v2015\working\annual_oa_rescaled' #feature directory is where Arc considers the 'home' for this script # the r above means treat the following as a raw string (don't escape the backslashes) output_raster_dir = r'M:\git-annex\globalprep\prs_oa\v2015\working\annual_oa_rescaled_int' arcpy.env.workspace = feature_dir if not os.path.exists(output_raster_dir): os.mkdir( output_raster_dir) #if output folder doesn't already exist, create it! # for multiple rasters (ocean acid) rasters = glob.glob( 'M:/git-annex/globalprep/prs_oa/v2015/working/annual_oa_rescaled/*.tif') for raster_in in rasters: basename = os.path.splitext(raster_in)[0] #indent this once for ocean acid outname = os.path.join(output_raster_dir, basename + "_int.tif") #indent this once for ocean acid #get the basename of the input without extention arcpy.env.compression = "LZW" input_raster_1000_int = arcpy.sa.Int(arcpy.sa.Raster(raster_in) * 1000) input_raster_1000_int_nonull = arcpy.sa.Con( arcpy.sa.IsNull(input_raster_1000_int), 0, input_raster_1000_int) input_raster_1000_int_nonull_nibble = arcpy.sa.Nibble( input_raster_1000_int_nonull, input_raster_1000_int, "DATA_ONLY") input_raster_1000_int_nonull_nibble_float = input_raster_1000_int_nonull_nibble / 1000.0 arcpy.CopyRaster_management(input_raster_1000_int_nonull_nibble_float, outname)
def ContributingArea(output_workspace, dem, processes): # Set environment variables arcpy.env.overwriteOutput = True arcpy.env.workspace = output_workspace # List parameter values arcpy.AddMessage("Workspace: {}".format(arcpy.env.workspace)) arcpy.AddMessage("DEM: {}".format(arcpy.Describe(dem).baseName)) arcpy.AddMessage("Processes: {}".format(str(processes))) # Export DEM to dem.tif file for use by TauDEM ____________________________ # TauDEM needs an uncompressed dem. Create in GDB because CopyRaster # cannot control compression when exporting to .tif arcpy.env.compression = "NONE" dem_nocompression = os.path.join(output_workspace, os.path.basename(dem) + "_nocompression") arcpy.CopyRaster_management(in_raster=dem, out_rasterdataset=dem_nocompression) arcpy.AddMessage("Uncompressed DEM created") # Create .tif version of dem (TauDEM only accepts .tif input). Stored at # the folder above the output_workspace demfile = os.path.join(os.path.dirname(output_workspace), "dem.tif") arcpy.CopyRaster_management(in_raster=dem_nocompression, out_rasterdataset=demfile) arcpy.AddMessage("Temporary `dem.tif` created") # TauDEM Remove pits - PitRemove __________________________________________ # output elevation with pits filled felfile = os.path.join(os.path.dirname(output_workspace), "dem_fel.tif") # Construct the taudem command line cmd = 'mpiexec -n ' + str( processes ) + ' pitremove -z ' + '"' + demfile + '"' + ' -fel ' + '"' + felfile + '"' arcpy.AddMessage("\nTauDEM command: " + cmd) # Submit command to operating system os.system(cmd) # Capture contents of shell and print it to the arcgis dialog box process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE) arcpy.AddMessage('\nProcess started:\n') for line in process.stdout.readlines(): arcpy.AddMessage(line) arcpy.AddMessage("Pits Removed Calculated") # TauDEM D Infinity flow direction - DinfFlowDir __________________________ # output flow direction (ang) and slope (slp) rasters angfile = os.path.join(os.path.dirname(output_workspace), "dem_ang.tif") slpfile = os.path.join(os.path.dirname(output_workspace), "dem_slp.tif") # Construct command cmd = 'mpiexec -n ' + str( processes ) + ' DinfFlowDir -fel ' + '"' + felfile + '"' + ' -ang ' + '"' + angfile + '"' + ' -slp ' + '"' + slpfile + '"' arcpy.AddMessage("\nTauDEM command: " + cmd) # Submit command to operating system os.system(cmd) # Capture contents of shell and print it to the arcgis dialog box process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE) arcpy.AddMessage('\nProcess started:\n') for line in process.stdout.readlines(): arcpy.AddMessage(line) arcpy.AddMessage("Flow Direction Calculated") # TauDEM D-infinity Contributing Area - AreaDinf __________________________ # output specific area (sca) scafile = os.path.join(os.path.dirname(output_workspace), "sca.tif") # Construct command # No outlet file, weight file, or edge contanimation checking cmd = 'mpiexec -n ' + str( processes ) + ' AreaDinf -ang ' + '"' + angfile + '"' + ' -sca ' + '"' + scafile + '"' + ' -nc ' arcpy.AddMessage("\nTauDEM command: " + cmd) # Submit command to operating system os.system(cmd) # Capture contents of shell and print it to the arcgis dialog box process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE) arcpy.AddMessage('\nProcess started:\n') for line in process.stdout.readlines(): arcpy.AddMessage(line) # Copy contributing area raster to output_workspace contributing_area = os.path.join(output_workspace, "contributing_area") arcpy.env.compression = "LZ77" arcpy.CopyRaster_management(in_raster=scafile, out_rasterdataset=contributing_area) arcpy.AddMessage("Contributing Area Calculated") # Return arcpy.SetParameter(3, contributing_area) # Cleanup arcpy.Delete_management(in_data=dem_nocompression) arcpy.Delete_management(in_data=demfile) arcpy.Delete_management(in_data=felfile) arcpy.Delete_management(in_data=angfile) arcpy.Delete_management(in_data=slpfile) arcpy.Delete_management(in_data=scafile) arcpy.AddMessage("Temp datasets deleted")
def StreamNetwork(output_workspace, contrib_area, threshold, processes): # Check out the ArcGIS Spatial Analyst extension license arcpy.CheckOutExtension("Spatial") # Set environment variables arcpy.env.overwriteOutput = True arcpy.env.workspace = output_workspace # List parameter values arcpy.AddMessage("Workspace: {}".format(arcpy.env.workspace)) arcpy.AddMessage("Contributing Area: " "{}".format(arcpy.Describe(contrib_area).baseName)) arcpy.AddMessage("Threshold: {}".format(str(threshold))) arcpy.AddMessage("Processes: {}".format(str(processes))) # Convert the GDB contrib_area raster to .tif _____________________________ # TauDEM needs an uncompressed raster. Create in GDB because CopyRaster # cannot control compression when exporting to .tif arcpy.env.compression = "NONE" contrib_area_nocompression = os.path.join( output_workspace, os.path.basename(contrib_area) + "_nocompression") arcpy.CopyRaster_management(in_raster=contrib_area, out_rasterdataset=contrib_area_nocompression) arcpy.AddMessage("Uncompressed contrib_area created") contrib_area_tif = os.path.join(os.path.dirname(output_workspace), "contrib_area.tif") arcpy.CopyRaster_management(in_raster=contrib_area_nocompression, out_rasterdataset=contrib_area_tif) arcpy.AddMessage("Uncompressed contrib_area_tif created") # TauDEM Stream definition by threshold - Threshold _______________________ # output thresholded stream raster stream_grid = os.path.join(os.path.dirname(output_workspace), "stream_grid.tif") # Construct command cmd = 'mpiexec -n ' + str( processes ) + ' Threshold -ssa ' + '"' + contrib_area_tif + '"' + ' -src ' + '"' + stream_grid + '"' + ' -thresh ' + str( threshold) arcpy.AddMessage("\nTauDEM command: " + cmd) # Submit command to operating system os.system(cmd) # Capture contents of shell and print it to the arcgis dialog box process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE) arcpy.AddMessage('\nProcess started:\n') for line in process.stdout.readlines(): arcpy.AddMessage(line) # Thin stream network - arcpy.sa.Thin _____________________________________ stream_thin = arcpy.sa.Thin(in_raster=stream_grid, corners="SHARP") stream_thin_path = os.path.join(os.path.dirname(output_workspace), "stream_thin.tif") arcpy.CopyRaster_management(in_raster=stream_thin, out_rasterdataset=stream_thin_path) # Convert raster stream to polyline _______________________________________ # output vector stream network stream_network = os.path.join(output_workspace, "stream_network") # Convert the `stream_thin` raster to a polyline arcpy.RasterToPolyline_conversion(in_raster=stream_thin_path, out_polyline_features=stream_network) arcpy.AddMessage("Stream network created") # Add the `ReachName` field # Check if the field already exists and if not add it field_names = [f.name for f in arcpy.ListFields(stream_network)] if "ReachName" not in field_names: arcpy.AddField_management(in_table=stream_network, field_name="ReachName", field_type="TEXT") # Return arcpy.SetParameter(4, stream_network) # Cleanup arcpy.Delete_management(in_data=contrib_area_nocompression) arcpy.Delete_management(in_data=contrib_area_tif) arcpy.Delete_management(in_data=stream_grid) arcpy.Delete_management(in_data=stream_thin_path) arcpy.AddMessage("Temp datasets deleted")
arcpy.compression = "JPEG 50" arcpy.env.resamplingMethod = "BILINEAR" start = time.time(); i = 0 while i < len(os.listdir(orthos)): file = os.listdir(orthos)[i] if file.endswith(".tif") | file.endswith(".jpg"): end = time.time() print (str(i) + " / " + str(len(os.listdir(orthos)))) print("Remaining time: " + str(datetime.timedelta(seconds=int(end - start) / i) * (len(os.listdir(orthos)) - i))) # Expecting to have a .tfw file first. if i == 0 | i == 1: arcpy.RasterToGeodatabase_conversion(orthos + "/" + file, gdb); if file.endswith(".tif"): arcpy.CopyRaster_management(gdb + "/T" + file[:file.index(".")], gdb + "/" + name) arcpy.Delete_management(gdb + "/T" + file[:file.index(".")]) else: arcpy.CopyRaster_management(gdb + "/" + file[:file.index(".")], gdb + "/" + name) arcpy.Delete_management(gdb + "/" + file[:file.index(".")]) start = time.time() else: arcpy.Mosaic_management(inputs = orthos + "/" + file, target = gdb + "/" + name, mosaic_type = "LAST", colormap = "FIRST", background_value = "", nodata_value = "", onebit_to_eightbit = "NONE", mosaicking_tolerance = "0", MatchingMethod = "NONE") i += 1 print("BUILDING PYRAMIDS") arcpy.BuildPyramids_management( in_raster_dataset = gdb + "/" + name,
# This moves rasters from OpennessGrids to MetroOpenness.gdb for fn in OPEN_Metro: infile = fn[:-1] print(infile) outfile = fn[:-1] print(outfile) outfilegdb = outfile[:-4] print(outfilegdb) outraster = out_workspace1 + "\\" + outfilegdb print(outraster) outcopy = arcpy.CopyRaster_management(infile, outraster, pixel_type="32_bit_float") # This moves rasters from OpennessGrids to NortheastOpenness.gdb for fn in OPEN_Northeast: infile = fn[:-1] print(infile) outfile = fn[:-1] print(outfile) outfilegdb = outfile[:-4] print(outfilegdb) outraster = out_workspace2 + "\\" + outfilegdb print(outraster)
out_name = "%s%s_%i_sp%i.tif" % (results_dir, out_str, i, sp_code) if arcpy.Exists(out_name): print "raster " + str(i) + " species " + str( sp_code) + " already done" else: if i == 1: jnk = arcpy.Raster(os.path.join(rootdir, f)) #jnk=jnk>0 jnk0 = arcpy.sa.IsNull(jnk) jnk = arcpy.sa.Con(jnk0, 0, 1) ensemble = jnk else: past_ensemble_loc = "%s%s_%i_sp%i.tif" % ( results_dir, out_str, i - 1, past_sp) past_ensemble = arcpy.Raster(past_ensemble_loc) jnk = arcpy.Raster(os.path.join(rootdir, f)) jnk0 = arcpy.sa.IsNull(jnk) jnk = arcpy.sa.Con(jnk0, 0, 1) ensemble = jnk + past_ensemble arcpy.CopyRaster_management(ensemble, out_name, "", "", "", "", "", "32_BIT_UNSIGNED") #ensemble.save(out_name) i = i + 1 past_sp = sp_code else: print "species " + str(sp_code) + " not in quantile" arcpy.CheckInExtension("Spatial")
PROTECTED_AREAS_DISSOLVED_shp = "%scratchworkspace%\\PROTECTED_AREAS_DISSOLVED.shp" composition = "%workspace%\\composition" connectivity = "%workspace%\\connectivity" bookhab_mv = "%workspace%\\bookhab_mv" PROTECTED_AREAS_DISSOLVED_FirstIteration_shp = "%workspace%\\PROTECTED_AREAS_DISSOLVED_FirstIteration.shp" connectivity1 = "%workspace%\\connectivity1" composition1 = "%workspace%\\composition1" bookhab_mv1 = "%workspace%\\bookhab_mv1" #Check to see what iteration number the model is on, and act accordingly if IterationNumber == "0": gp.AddMessage("This is the first iteration") gp.makesummaryshapefile() # Process: Copy Features arcpy.CopyFeatures_management(PROTECTED_AREAS_DISSOLVED_shp, PROTECTED_AREAS_DISSOLVED_FirstIteration_shp, "", "0", "0", "0") # Process: Copy Raster arcpy.CopyRaster_management(connectivity, connectivity1, "", "", "", "NONE", "NONE", "") # Process: Copy Raster (2) arcpy.CopyRaster_management(composition, composition1, "", "", "", "NONE", "NONE", "") # Process: Copy Raster (2) arcpy.CopyRaster_management(bookhab_mv, bookhab_mv1, "", "", "", "NONE", "NONE", "") else: gp.AddMessage("This is not the first iteration, no shapefile being created") #have an output variable so that the script tool can be linked as a precondition to another model gp.AddMessage("OutputMessage")
def mosaic(dnight, sets, filter): ''' This module creates the mosaic of full-resolution images for each data set. ''' #set arcpy environment variables part 2/2 arcpy.CheckOutExtension("Spatial") arcpy.env.workspace = filepath.rasters + 'scratch_fullres/' arcpy.env.scratchWorkspace = filepath.rasters + 'scratch_fullres' #filter paths F = {'V': '', 'B': 'B/'} f = {'V': '', 'B': 'b'} for s in sets: #file paths calsetp = filepath.calibdata + dnight + '/S_0%s/%s' % (s[0], F[filter]) gridsetp = filepath.griddata + dnight + '/S_0%s/%sfullres/' % ( s[0], F[filter]) if os.path.exists(gridsetp): shutil.rmtree(gridsetp) os.makedirs(gridsetp) #read in the registered images coordinates file = filepath.calibdata + dnight + '/pointerr_%s.txt' % s[0] Obs_AZ, Obs_ALT = n.loadtxt(file, usecols=(3, 4)).T Obs_AZ[n.where(Obs_AZ > 180)] -= 360 Obs_AZ[35] %= 360 #read in the best-fit zeropoint and plate scale file = filepath.calibdata + dnight + '/extinction_fit_%s.txt' % filter zeropoint, platescale, exptime = n.loadtxt(file, usecols=(2, 8, 9), unpack=True, ndmin=2) #loop through each file in the set for w in range(len(Obs_AZ) + 1): v = w + 1 if w == 45: w = 35 Obs_AZ[w] -= 360 if v in range(0, 50, 5): print 'Generating fullres image %i/45' % v arcpy.CopyRaster_management(calsetp + '/tiff/ib%03d.tif' % (w + 1), 'ib%03d.tif' % v, "DEFAULTS", "", "", "", "", "16_BIT_UNSIGNED") #re-define projection to topocentric coordinates arcpy.DefineProjection_management("ib%03d.tif" % v, tc(Obs_AZ[w], Obs_ALT[w])) #warp image to remove barrel distortion image arcpy.Warp_management("ib%03d.tif" % v, source_pnt, target_pnt, 'ibw%03d.tif' % v, "POLYORDER3", "BILINEAR") #reproject into GCS arcpy.ProjectRaster_management('ibw%03d.tif' % v, 'fwib%03d.tif' % v, geogcs, "BILINEAR", "0.0261") #clip to image boundary rectangle = clip_envelope(Obs_AZ, Obs_ALT, w) arcpy.Clip_management("fwib%03d.tif" % v, rectangle, "fcib%03d" % v) #mosaic raster list must start with an image with max pixel value > 256 v = 1 mstart = 1 while v < (len(Obs_AZ) + 1): im = imread(filepath.rasters + 'scratch_fullres/ib%03d.tif' % v) if n.max(im) > 255: mstart = v break v += 1 #mosaic raster list R1 = ';'.join(['fcib%03d' % i for i in range(mstart, 47)]) R2 = ';'.join(['fcib%03d' % i for i in range(1, mstart)]) R = R1 + ';' + R2 #mosaic to topocentric coordinate image; save in Griddata\ print "Mosaicking into all sky full-resolution image" arcpy.MosaicToNewRaster_management(R, gridsetp, 'skytopo', geogcs, "32_BIT_FLOAT", "0.0261", "1", "BLEND", "FIRST") #convert to magnitudes per square arc second print "Converting the mosaic to mag per squard arcsec" psa = 2.5 * n.log10( (platescale[int(s[0]) - 1] * 60)**2) # platescale adjustment stm1 = arcpy.sa.Raster(gridsetp + os.sep + 'skytopo') stm2 = stm1 / exptime[0] stm3 = arcpy.sa.Log10(stm2) stm4 = 2.5 * stm3 skytopomags = zeropoint[int(s[0]) - 1] + psa - stm4 #save mags mosaic to disk skytopomags.save(gridsetp + os.sep + 'skytopomags') print "Creating layer files for full-resolution mosaic" layerfile = filepath.griddata + dnight + '/skytopomags%s%s.lyr' % ( f[filter], s[0]) arcpy.MakeRasterLayer_management( gridsetp + 'skytopomags', dnight + '_%s_fullres%s' % (s[0], f[filter])) arcpy.SaveToLayerFile_management( dnight + '_%s_fullres%s' % (s[0], f[filter]), layerfile, "RELATIVE") #Set layer symbology to magnitudes layer symbologyLayer = filepath.rasters + 'magnitudes.lyr' arcpy.ApplySymbologyFromLayer_management(layerfile, symbologyLayer) lyrFile = arcpy.mapping.Layer(layerfile) lyrFile.replaceDataSource(gridsetp, 'RASTER_WORKSPACE', 'skytopomags', 'FALSE') lyrFile.save()
# IAU2000:49900 Mars encoding? output_raster.SetProjection( srs.ExportToWkt() ) # Exports the coordinate system # to the file output_raster.GetRasterBand(1).WriteArray(grid_z) # Writes my array to the raster output_raster.FlushCache() output_raster = None arcpy.Clip_management('VOD_%s_%s_%s.tif' %(year,date,pass_type), "#",'VOD_%s_%s_%s_clip.tif' %(year,date,pass_type),Dir_CA+'/'+"CA.shp", "0", "ClippingGeometry") ##mapping algebra * 10000 inRaster = 'VOD_%s_%s_%s_clip.tif'%(year,date,pass_type) arcpy.CheckOutExtension("Spatial") outRaster = Raster(inRaster)*scale_factor outRaster.save(Dir_fig+'/'+'VOD_%s_%s_%s_clip_map.tif'%(year,date,pass_type)) ##copy raster inRaster=outRaster pixel_type='16_BIT_UNSIGNED' arcpy.CopyRaster_management(inRaster, 'VOD_%s_%s_%s_clip_map_copy.tif'%(year,date,pass_type), pixel_type='16_BIT_UNSIGNED',nodata_value='0') ##make raster table inRaster='VOD_%s_%s_%s_clip_map_copy.tif'%(year,date,pass_type) arcpy.BuildRasterAttributeTable_management(inRaster, "Overwrite")
def freq(city, inDir, workFld): import traceback, time, arcpy, os from arcpy import env arcpy.CheckOutExtension('Spatial') #-------- DIRECTORY SETUP ------------------------------------------------ """ Report File Directory """ try: os.makedirs(str(workFld) + '/Logs') except: pass reportfileDir = str(workFld) + '/Logs' """ Frequent Directory """ try: arcpy.CreateFileGDB_management(str(workFld), str(city) + '_Freq.gdb') except: pass freqDir = str(workFld) + '/' + city + '_Freq.gdb' """ Current Workspace """ workDir = freqDir """ Final Geodatabase """ try: arcpy.CreateFileGDB_management(str(workFld), str(city) + '_Final.gdb') except: pass finDir = str(workFld) + '/' + city + '_Final.gdb' """ Dasymetric Directory """ dasyDir = str(inDir) + '/Input.gdb/Dasy_10232015' """ Projection File Directory """ prjDir = str(inDir) + '/Prj' """ Set Workspace Environments """ arcpy.env.workspace = workDir arcpy.env.scratch = str(inDir) + '/Scratch.gdb' arcpy.env.overwriteOutput = True #----------------------------------------------------------------------------- # BEGIN ANALYSIS #----------------------------------------------------------------------------- try: #-------- LOGFILE CREATION --------------------------------------------- """ Create report file for each metric """ tmpName = city + '_BG__' + time.strftime('%Y%m%d_%H-%M') reportfileName = reportfileDir + '/' + tmpName + '.txt' BGRF = open(reportfileName, 'w') tmpName = city + '_BG_Pop_' + time.strftime('%Y%m%d_%H-%M') reportfileName = reportfileDir + '/' + tmpName + '.txt' BG_PopRF = open(reportfileName, 'w') tmpName = city + '_Bnd_' + time.strftime('%Y%m%d_%H-%M') reportfileName = reportfileDir + '/' + tmpName + '.txt' BndRF = open(reportfileName, 'w') try: loglist = sorted(f for f in os.listdir(reportfileDir) if f.startswith(str(city) + '_Reuse')) tmpName = loglist[-1] except: tmpName = city + '_Reuse_' + time.strftime('%Y%m%d_%H-%M') + '.txt' reportfileName = reportfileDir + '/' + tmpName try: ReuseRF = open(reportfileName, 'a') except: ReuseRF = open(reportfileName, 'w') print 'Creating Reuse Log' """ Write out first lines of report files """ print 'Frequent Start Time: ' + time.asctime() BGRF.write( "Obtain 2010 Urban Areas Polygon File, 2000 Urban Areas Polygon File, 2010 Block Groups, and 2010 Blocks from the US Census Bureau along with associated population tables.--201203--\n" ) BGRF.write( "Join the population tables with the associated blocks and block groups.--201203--\n" ) BGRF.write( "Clip blocks to the 2010 Urban Area for the EnviroAtlas city.--201203--\n" ) BGRF.write( "Summarize the block population by block group in a new table; urban areas are defined using blocks, so this will determine the amount of people within each block group who are within the defined urban area.--201203--\n" ) BGRF.write( "Join the summarized block population table with the block groups polygon file.--201203--\n" ) BGRF.write( "Calculate the percentage of the block group population that is within the urban area: [summarized block population by block group]/[total block group population] * 100--201203--\n" ) BGRF.write( "Extract the block groups with greater than or equal to 50% of their population within the urban area to a new feature class.--201203--\n" ) BGRF.write( "Append all block groups to the new feature class that will fill in any holes in the community boundary.--201203--\n" ) BGRF.write( "Delete any block groups that only touch the main body of the community boundary at one corner or are islands set apart from the main body of the community boundary.--201203--\n" ) BG_PopRF.write( "Begin with EnviroAtlas community block groups.--201203--\n") BG_PopRF.write( "Append select census data from 2010 US Census SF1 Tables to block groups.--201203--\n" ) BndRF.write( "Begin with the EnviroAtlas Community Block Groups.--201203--\n") BndRF.write( "Dissolve all the EnviroAtlas Community Block Groups into one polygon.--201203--\n" ) #-------- COPY INPUT DATA -------------------------------------------- """ Copy LC to Frequent if needed """ if arcpy.Exists(str(workDir) + '/LC') == False: arcpy.CopyRaster_management( str(inDir) + '/LC/' + city + '_LC.tif', str(workDir) + '/LC', '', '', '', '', 'NONE', '', '', 'NONE') else: pass """ Set Environment Variables """ arcpy.env.extent = 'LC' arcpy.env.snapRaster = 'LC' """ Copy BGs to Frequent if needed """ if arcpy.Exists(str(workDir) + '/BG_Alb') == False: arcpy.FeatureClassToFeatureClass_conversion( str(inDir) + '/Bnd_Final.gdb/' + city + '_BG_Alb', str(workDir), 'BG_Alb') arcpy.DeleteField_management('BG_Alb', ['Include', 'PopWithin']) else: pass #-------- PROCESS BOUNDARIES ----------------------------------------- """ Set Environment Variables """ arcpy.env.extent = 'BG_Alb' arcpy.env.snapRaster = dasyDir """ Get Projection Information """ descLC = arcpy.Describe(str(workDir) + '/LC') """ Project BG into UTM """ arcpy.Project_management('BG_Alb', 'BG', descLC.spatialReference) """ Copy Counties to Frequent Dir and Project to UTM """ arcpy.MakeFeatureLayer_management( str(inDir) + '/Input.gdb/Counties_Alb', 'Cty') arcpy.SelectLayerByLocation_management('Cty', 'CONTAINS', 'BG_Alb', '', 'NEW_SELECTION') arcpy.FeatureClassToFeatureClass_conversion( str(inDir) + '/Input.gdb/Counties_Alb', str(workDir), 'Counties_Alb') arcpy.SelectLayerByAttribute_management('Cty', 'CLEAR_SELECTION') arcpy.Project_management('Counties_Alb', 'Counties', descLC.spatialReference) """ Create Boundary and Buffer files """ arcpy.Dissolve_management('BG_Alb', 'Bnd_Alb') arcpy.Dissolve_management('BG', 'Bnd') arcpy.Buffer_analysis('Bnd', 'Bnd_1km', '1 kilometer') arcpy.Buffer_analysis('Bnd', 'Bnd_5km', '5 kilometers') arcpy.Clip_analysis('Bnd', 'Counties', 'Bnd_Cty') arcpy.Buffer_analysis('Bnd_Cty', 'Bnd_Cty_500m', '500 meters') ReuseRF.write("Bnd_Cty--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') """ Remove Holes from Buffer files """ for buf in ('Bnd_1km', 'Bnd_5km', 'Bnd_Cty_500m'): arcpy.EliminatePolygonPart_management(buf, buf + '_EP', 'PERCENT', '', '30', 'CONTAINED_ONLY') arcpy.Delete_management(buf) arcpy.Rename_management(buf + '_EP', buf) #-------- MANIPULATE RASTER INPUTS ------------------------------------------- """ Set Environment Variables """ arcpy.env.extent = freqDir + '/LC' arcpy.env.snapRaster = freqDir + '/LC' """ Create a polygon version of the LC Area """ ReC = arcpy.sa.Reclassify( str(workDir) + '/LC', 'Value', arcpy.sa.RemapValue([[0, 0], [10, 1], [20, 1], [21, 1], [22, 1], [30, 1], [40, 1], [52, 1], [70, 1], [80, 1], [82, 1], [91, 1], [92, 1]])) ReC.save('AreaIO') arcpy.RasterToPolygon_conversion( str(freqDir) + '/AreaIO', str(freqDir) + '/LC_Poly', 'SIMPLIFY') arcpy.EliminatePolygonPart_management( str(freqDir) + '/LC_Poly', str(freqDir) + '/LC_Poly_EP', 'PERCENT', '', '5', 'CONTAINED_ONLY') arcpy.Delete_management(str(freqDir) + '/LC_Poly') arcpy.Rename_management( str(freqDir) + '/LC_Poly_EP', str(freqDir) + '/LC_Poly') """ Set Environments """ arcpy.env.extent = 'BG_Alb' arcpy.env.snapRaster = dasyDir """ Extract the dasymetrics for the Atlas Area """ arcpy.env.extent = 'Bnd_Alb' outExtractByMask = arcpy.sa.ExtractByMask(dasyDir, 'Bnd_Alb') outExtractByMask.save('Dasy') ReuseRF.write("Dasy--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') """ Create a raster with the same cells as the dasymetric but unique values """ arcpy.RasterToPoint_conversion('Dasy', 'Dasy_Pts', 'VALUE') arcpy.PointToRaster_conversion('Dasy_Pts', 'pointid', 'Dasy_Cells', '', '', '30') ReuseRF.write("Dasy_Cells--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') """ Calculate Dasy_Pop """ arcpy.sa.ZonalStatisticsAsTable('BG_Alb', 'bgrp', 'Dasy', 'Dasy_ZS', '', 'SUM') arcpy.AddField_management('BG_Alb', 'Dasy_Pop', 'LONG') arcpy.JoinField_management('BG_Alb', 'bgrp', 'Dasy_ZS', 'bgrp', ['SUM']) arcpy.CalculateField_management('BG_Alb', 'Dasy_Pop', '!SUM!', 'PYTHON_9.3') arcpy.DeleteField_management('BG_Alb', ['SUM']) arcpy.JoinField_management('BG', 'bgrp', 'BG_Alb', 'bgrp', ['Dasy_Pop']) ReuseRF.write("Dasy_Pop--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') """ Add Field to BG to use as the value for rasterization """ arcpy.AddField_management('BG', 'EAID', 'SHORT') arcpy.CalculateField_management( "BG", "EAID", "autoIncrement()", "PYTHON_9.3", "rec=0\\ndef autoIncrement():\\n global rec\\n pStart = 1 #adjust start value, if req'd \\n pInterval = 1 #adjust interval value, if req'd\\n if (rec == 0): \\n rec = pStart \\n else: \\n rec = rec + pInterval \\n return rec" ) """ Convert the block groups into raster format """ arcpy.env.snapRaster = 'LC' arcpy.env.extent = 'LC' arcpy.PolygonToRaster_conversion('BG', 'EAID', 'BG_Rlc', 'MAXIMUM_AREA', '', 1) ReuseRF.write("BG_Rlc--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') #-------- CREATE FINAL FILES ---------------------------------------------- """ Create Final BG File """ try: arcpy.Delete_management(finDir + '/' + str(city) + '_BG') except: pass arcpy.FeatureClassToFeatureClass_conversion('BG_Alb', finDir, city + '_BG') allFields = [ f.name for f in arcpy.ListFields(finDir + '/' + city + '_BG') ] for field in allFields: if field not in [ 'bgrp', 'OBJECTID', 'Shape', 'Shape_Area', 'Shape_Length' ]: arcpy.DeleteField_management(finDir + '/' + city + '_BG', [field]) BGRF.write( "Create a final version of the feature class for use in EnviroAtlas, removing all unnecessary attributes.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') """ Create Final Bnd File """ try: arcpy.Delete_management(finDir + '/' + str(city) + '_Bnd') except: pass arcpy.FeatureClassToFeatureClass_conversion('Bnd_Alb', finDir, city + '_Bnd') BndRF.write( "Copy polygon to final geodatabase for display in EnviroAtlas removing any unnecessary attributes.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') """ Create Final BG_Pop File """ try: arcpy.Delete_management(finDir + '/' + str(city) + '_BG_Pop') except: pass arcpy.TableToTable_conversion('BG', finDir, city + '_BG_Pop') allFields = [ f.name for f in arcpy.ListFields(finDir + '/' + city + '_BG_Pop') ] for field in allFields: if field not in [ 'bgrp', 'OBJECTID', 'SUM_HOUSIN', 'SUM_POP10', 'under_1', 'under_1pct', 'under_13', 'under_13pc', 'over_70', 'over_70pct', 'NonWhite', 'NonWt_Pct', 'PLx2_Pop', 'PLx2_Pct' ]: arcpy.DeleteField_management(finDir + '/' + city + '_BG_Pop', [field]) BG_PopRF.write( "Copy records to final table for display in EnviroAtlas, removing any unnecessary attributes.--" + time.strftime('%Y%m%d--%H%M%S') + '--\n') print 'Frequent End Time: ' + time.asctime() + '\n' #-------- COMPELETE LOGFILES --------------------------------------------- BGRF.close() BndRF.close() BG_PopRF.close() ReuseRF.close() #----------------------------------------------------------------------------- # END ANALYSIS #----------------------------------------------------------------------------- except: """ This part of the script executes if anything went wrong in the main script above """ #-------- PRINT ERRORS --------------------------------------------------- print "\nSomething went wrong.\n\n" print "Python Traceback Message below:" print traceback.format_exc() print "\nArcMap Error Messages below:" print arcpy.GetMessages(2) print "\nArcMap Warning Messages below:" print arcpy.GetMessages(1) #-------- COMPLETE LOGFILE ------------------------------------------------ BGRF.write("\nSomething went wrong.\n\n") BGRF.write("Pyton Traceback Message below:") BGRF.write(traceback.format_exc()) BGRF.write("\nArcMap Error Messages below:") BGRF.write(arcpy.GetMessages(2)) BGRF.write("\nArcMap Warning Messages below:") BGRF.write(arcpy.GetMessages(1)) BGRF.write("\n\nEnded at " + time.asctime() + '\n') BGRF.write("\n---End of Log File---\n") if BGRF: BGRF.close()
def main(in_raster=None, neighborhood_size=None, out_workspace=None, out_stats_raw=None, verbose=True, window_type='Rectangle'): """ Compute depth statisitcs, averaging values over a defined neighborhood of cells. Can compute mean, standard deviation, and variance. """ out_stats = out_stats_raw.replace("'", '').split(";") out_stats = list(set(out_stats) - set(['Terrain Ruggedness (VRM)'])) arcpy.env.rasterStatistics = "STATISTICS" arcpy.env.compression = 'LZW' # compress output rasters # neighborhood is integer n_size = int(neighborhood_size) # convert our data to sets for easy comparison mean_set = set(['Mean Depth', 'Difference to Mean']) std_dev_set = set(['Standard Deviation', 'Variance']) iqr_set = set(['Interquartile Range']) kurt_set = set(['Kurtosis']) # list stats to be computed if verbose: utils.msg("The following stats will be computed: " + "{}".format(";".join(out_stats))) # these two tools both use block processing which requires NetCDF4 if 'Interquartile Range' in out_stats or 'Kurtosis' in out_stats: if not utils.NETCDF4_EXISTS: utils.msg("The interquartile range and kurtosis tools require " "the NetCDF4 Python library is installed. NetCDF4 " "is included in ArcGIS 10.3 and later.", "error") return if 'Kurtosis' in out_stats and not utils.SCIPY_EXISTS: utils.msg("The kurtosis calculation requires the SciPy library " "is installed. SciPy is included in ArcGIS 10.4 and " "later versions.", "error") return # get output name prefix and suffix parts = output_parts(in_raster, out_workspace, n_size) utils.workspace_exists(out_workspace) # set geoprocessing environments arcpy.env.scratchWorkspace = out_workspace arcpy.env.workspace = out_workspace # validate nbr type if window_type not in ('Rectangle', 'Circle'): utils.msg("Unknown window type `{}`".format(window_type), "error") try: # initialize our neighborhood if verbose: utils.msg("Calculating neighborhood...") if window_type == 'Circle': neighborhood = arcpy.sa.NbrCircle(n_size, "CELL") else: neighborhood = arcpy.sa.NbrRectangle(n_size, n_size, "CELL") overlap = int((n_size / 2.0) - 0.5) if mean_set.intersection(out_stats): mean_requested = 'Mean Depth' in out_stats if verbose and mean_requested: utils.msg("Calculating mean depth...") mean_depth = FocalStatistics(in_raster, neighborhood, "MEAN", "NODATA") mean_raster = output_name(parts, 'mean') if verbose and mean_requested: utils.msg("saving mean depth to {}".format(mean_raster)) arcpy.CopyRaster_management(mean_depth, mean_raster) if 'Difference to Mean' in out_stats: if verbose: utils.msg("Calculating relative difference to mean...") range_depth = FocalStatistics(in_raster, neighborhood, "RANGE", "NODATA") mean_diff = -(mean_depth - in_raster) / range_depth mean_diff_raster = output_name(parts, 'mean_diff') if verbose: utils.msg("saving relative different to mean to {}".format( mean_diff_raster)) arcpy.CopyRaster_management(mean_diff, mean_diff_raster) if not mean_requested: arcpy.Delete_management(mean_raster) # compute stdev in ths case if std_dev_set.intersection(out_stats): std_dev_requested = 'Standard Deviation' in out_stats if verbose and std_dev_requested: utils.msg("Calculating depth standard deviation...") std_dev_depth = FocalStatistics(in_raster, neighborhood, "STD", "NODATA") std_dev_raster = output_name(parts, 'sdev') if verbose and std_dev_requested: utils.msg("saving standard deviation depth to \ {}".format(std_dev_raster)) arcpy.CopyRaster_management(std_dev_depth, std_dev_raster) # no direct variance focal stat, have to stdev^2 if 'Variance' in out_stats: if verbose: utils.msg("Calculating depth variance...") var_depth = Power(std_dev_depth, 2) var_raster = output_name(parts, 'var') if verbose: utils.msg("saving depth variance to {}".format(var_raster)) arcpy.CopyRaster_management(var_depth, var_raster) if not std_dev_requested: arcpy.Delete_management(std_dev_raster) # limit 3D blocksize to 10^8 elements (.4GB) on 32-bit, 10^10 on 64-bit if utils.ARCH == '32-bit': limit = 10**8 else: limit = 10**10 blocksize = int(np.sqrt((limit) / (n_size**2)) - overlap * 2) # define numpy-based calculations np_sets = ((iqr_set, "interquartile range", "iqr", iqr), (kurt_set, "kurtosis", "kurt", kurtosis)) for np_set in np_sets: (in_set, label, out_label, funct) = np_set if in_set.intersection(out_stats): if verbose: utils.msg("Calculating depth {}...".format(label)) out_raster = output_name(parts, out_label) bp = utils.BlockProcessor(in_raster) bp.computeBlockStatistics(funct, blocksize, out_raster, overlap) except Exception as e: utils.msg(e, mtype='error')
# Input data source path = "C:\\Work\\DrMutemi\\Evaluation_Runs" out = "\\raster" arcpy.env.workspace = path arcpy.env.overwriteOutput = True # Set output folder OutputFolder = path + out # Loop through a list of files in the workspace # for now I only need a single file NCfiles = arcpy.ListFiles("gpcp_ond_1997.nc*") for filename in NCfiles: print("Processing: " + filename) inNCfiles = path + "\\" + filename fileroot = filename[0:(len(filename) - 3)] inVariable = "pr" outRaster = OutputFolder + "\\" + fileroot # Process: Make NetCDF Raster Layer arcpy.MakeNetCDFRasterLayer_md(inNCfiles, inVariable, "lon", "lat", inVariable, "", "", "BY_VALUE") # Process: Copy Raster arcpy.CopyRaster_management(inVariable, outRaster + ".tif", "", "", "", "NONE", "NONE", "") print arcpy.GetMessages()
def raster_project(inraster, prj_file, prj_folder, snap, region, outraster, cellsize): start_raster = datetime.datetime.now() in_raster = Raster(inraster) arcpy.Delete_management("snap") arcpy.MakeRasterLayer_management(snap, "snap") arcpy.env.snapRaster = "snap" # location prj files wgs_coord_file = prj_folder + os.sep + 'WGS_1984.prj' nad83_coord_file = prj_folder + os.sep + 'NAD_1983.prj' prj_file_path = prj_folder + os.sep + prj_file current_raster_dsc = arcpy.Describe(in_raster) current_sr = current_raster_dsc.spatialReference current_datum = current_sr.GCS.datumName ORGprj = current_sr.name # extract spatial information from prj files dsc_wgs = arcpy.Describe(wgs_coord_file) wgs_coord_sys = dsc_wgs.spatialReference dsc_nad83 = arcpy.Describe(nad83_coord_file) nad83_coord_sys = dsc_nad83.spatialReference dsc_prj = arcpy.Describe(prj_file_path) prj_sr = dsc_prj.spatialReference prj_datum = prj_sr.GCS.datumName prj_name = prj_file.replace('.prj', '') OUTprj = prj_sr.name print ORGprj, OUTprj # No re-projection - raster is copied and file name is updated if ORGprj == OUTprj: if not arcpy.Exists(outraster): print 'Copying {0}'.format(inraster) print inraster, outraster arcpy.CopyRaster_management(inraster, outraster) else: print str(outraster) + " already exists" # Check Datum then re-projects elif prj_datum == "D_WGS_1984": if current_datum != "D_WGS_1984": if not arcpy.Exists(outraster): print 'Projecting {0} into {1}'.format(inraster, prj_name) arcpy.ProjectRaster_management(in_raster, outraster, prj_sr, 'NEAREST', cellsize, "NAD_1983_To_WGS_1984_1") else: print str(outraster) + " already exists" else: if not arcpy.Exists(outraster): print 'Projecting {0} into {1}'.format(inraster, prj_name) arcpy.ProjectRaster_management(in_raster, outraster, prj_sr, 'NEAREST', cellsize) else: print str(outraster) + " already exists" else: if prj_datum == "D_North_American_1983": if current_datum != "D_North_American_1983": print 'Projecting {0} into {1}'.format(inraster, prj_name) arcpy.ProjectRaster_management(in_raster, outraster, prj_sr, 'NEAREST', cellsize, "NAD_1983_To_WGS_1984_1") else: if not arcpy.Exists(outraster): print 'Projecting {0} into {1}'.format(inraster, prj_name) arcpy.ProjectRaster_management(in_raster, outraster, prj_sr, 'NEAREST', cellsize) else: print str(outraster) + " already exists" print 'Completed projection in: {0}\n'.format( (datetime.datetime.now() - start_raster))
def function(outputFolder, inputData, aggregationColumn): try: # Set temporary variables prefix = os.path.join(arcpy.env.scratchGDB, "extent_") zones = prefix + "aggZones" outZonal = prefix + "outZonal" rasData = prefix + "rasData" # Define field names extentName = "area_km2" percName = "percentCov" # Define output files outTable = os.path.join(outputFolder, 'statExtentTable.csv') # Ensure the input data is in a projected coordinate system spatialRef = arcpy.Describe(inputData).spatialReference unit = str(spatialRef.linearUnitName) if spatialRef.Type == "Geographic": log.error( 'The input data has a Geographic Coordinate System. It must have a Projected Coordinate System.' ) sys.exit() # Check input data type dataFormat = arcpy.Describe(inputData).dataType if dataFormat in ['ShapeFile', 'FeatureClass']: inputType = 'Shp' elif dataFormat in ['RasterDataset', 'RasterLayer']: inputType = 'Ras' else: log.error( 'Input data is neither shapefile/feature class nor raster') log.error('Ensure data is one of these types') sys.exit() # If the input type is a shapefile if inputType == 'Shp': # Check if the aggregation column exists zoneFields = arcpy.ListFields(inputData) zoneFound = False for field in zoneFields: fieldName = str(field.name) if fieldName == str(aggregationColumn): zoneFound = True if zoneFound == False: log.error('Aggregation column (' + str(aggregationColumn) + ') not found in zone shapefile') log.error('Please ensure this field is present') sys.exit() # Dissolve aggregation zone based on aggregation column arcpy.Dissolve_management(inputData, zones, aggregationColumn) log.info("Dissolved aggregation zones based on: " + str(aggregationColumn)) # If extent field already exists in the shapefile, delete it here inputFields = arcpy.ListFields(zones) for field in inputFields: if field.name == extentName: arcpy.DeleteField_management(zones, extentName) # Calculate geometry arcpy.AddField_management(zones, extentName, "FLOAT") exp = "!SHAPE.AREA@SQUAREKILOMETERS!" arcpy.CalculateField_management(zones, extentName, exp, "PYTHON_9.3") log.info("Area calculated for input data classes") # Calculate the total area totalArea = 0.0 fields = [str(aggregationColumn), str(extentName)] with arcpy.da.SearchCursor(zones, fields) as cursor: for row in cursor: name = row[0] area = row[1] totalArea += area # Calculate percent coverage arcpy.AddField_management(zones, percName, "FLOAT") fieldsPerc = [str(extentName), str(percName)] with arcpy.da.UpdateCursor(zones, fieldsPerc) as updateCursor: for row in updateCursor: area = row[0] percentCoverage = (float(area) / float(totalArea)) * 100.0 row[1] = percentCoverage # Update row with percent coverage try: updateCursor.updateRow(row) except Exception: pass # Write to output table outFields = [aggregationColumn, extentName, percName] outLabels = ['Classes', 'Area (sq km)', 'Area (percent)'] with open(outTable, 'wb') as csv_file: writer = csv.writer(csv_file) writer.writerow(outLabels) with arcpy.da.SearchCursor(zones, outFields) as cursor: for row in cursor: writer.writerow(row) log.info('Extent csv table created') csv_file.close() elif inputType == 'Ras': # If the user has input a raster file # Check if the raster is type integer rasType = arcpy.GetRasterProperties_management( inputData, "VALUETYPE") rasterTypes = [3, 4, 5, 6, 7, 8] if int(str(rasType)) in rasterTypes: log.info('Input raster is integer type, proceeding...') else: log.error('Input raster is not integer type') log.error('Please ensure input raster is integer type') sys.exit() # Check if COUNT column exists inputFields = arcpy.ListFields(inputData) countFound = False for field in inputFields: if field.name == 'COUNT': countFound = True if countFound == False: log.error('COUNT column not found') log.error('Please ensure your raster has a COUNT column') sys.exit() # Get cell size of the raster cellSize = float( arcpy.GetRasterProperties_management(inputData, "CELLSIZEX").getOutput(0)) # Check units of raster if unit != 'Meter': log.error('Spatial reference units are not in metres') log.error('Please use a spatial reference that is in metres') sys.exit() # Copy raster to temporary file arcpy.CopyRaster_management(inputData, rasData) # Copy raster table to scratch GDB arcpy.TableToTable_conversion(inputData, arcpy.env.scratchGDB, "extent_table") dbfTable = os.path.join(arcpy.env.scratchGDB, "extent_table") # Add new fields to the dbfTable arcpy.AddField_management(dbfTable, extentName, "FLOAT") arcpy.AddField_management(dbfTable, percName, "FLOAT") # Calculate total area and area of each class totalArea = 0.0 fields = [str(aggregationColumn), 'COUNT', extentName] with arcpy.da.UpdateCursor(dbfTable, fields) as updateCursor: for row in updateCursor: name = row[0] count = row[1] # Calculate area in km2 area = float(count) * float(cellSize) * float( cellSize) / 1000000.0 row[2] = area totalArea += area # Update row with area try: updateCursor.updateRow(row) except Exception: pass # Calculate percent coverage of each class fieldsPerc = [str(extentName), str(percName)] with arcpy.da.UpdateCursor(dbfTable, fieldsPerc) as updateCursor: for row in updateCursor: area = row[0] percentCoverage = (float(area) / float(totalArea)) * 100.0 row[1] = percentCoverage # Update row with percent coverage try: updateCursor.updateRow(row) except Exception: pass log.info('Percent coverage calculated for each class') # Write output to CSV file outFields = [aggregationColumn, extentName, percName] outLabels = ['Classes', 'Area (sq km)', 'Area (percent)'] with open(outTable, 'wb') as csv_file: writer = csv.writer(csv_file) writer.writerow(outLabels) with arcpy.da.SearchCursor(dbfTable, outFields) as cursor: for row in cursor: writer.writerow(row) log.info('Extent csv table created') csv_file.close() log.info("Extent statistics function completed successfully") except Exception: arcpy.AddError("Extent statistics function failed") raise finally: # Remove feature layers from memory try: for lyr in common.listFeatureLayers(locals()): arcpy.Delete_management(locals()[lyr]) exec(lyr + ' = None') in locals() except Exception: pass
## Description: We convert the raw raster data (.TIF) in grid and save the grid in the folder "inter_output" print "\nStep 1 starts at", datetime.datetime.now().strftime( "%A, %B %d %Y %I:%M:%S%p") outFolder = "C:\\GIS_RGB\\Geodatabase\\Biophysical\\7_landuse\\US_nass\\inter_output\\" tifList = arcpy.ListRasters("*", "TIF") gridList = [] ## Loop on the list and export tif to grid for i in range(len(tifList)): year = tifList[i][-13:-11] oName = "popdens" + year outRaster = os.path.join(interFolder, oName) arcpy.CopyRaster_management(tifList[i], outRaster, "", "", "", "NONE", "NONE", "8_BIT_UNSIGNED", "NONE", "NONE") print "Export tif to grid", tifList[ i], "completed at", datetime.datetime.now().strftime("%I:%M:%S%p") gridList.append(outRaster) print "Step 1 Tif to Grid completed at", datetime.datetime.now().strftime( "%I:%M:%S%p") ## --------------------------------------------------------------------------- ## 2. Extract by Mask ## Description: Extracts the cells of the population density raster dataset that correspond to the study area (mask). print "\nStep 2 starts at", datetime.datetime.now().strftime( "%A, %B %d %Y %I:%M:%S%p") # Mask folderShapefiles = "C:\\GIS_RGB\\Geodatabase\\rgb_bound\\"
ag2 = arcpy.sa.Aggregate(layer2, DegradeFactor, 'MEAN', 'TRUNCATE', 'NODATA') ag2.save("tempag2.img") ag3 = arcpy.sa.Aggregate(layer3, DegradeFactor, 'MEAN', 'TRUNCATE', 'NODATA') ag3.save("tempag3.img") ag4 = arcpy.sa.Aggregate(layer4, DegradeFactor, 'MEAN', 'TRUNCATE', 'NODATA') ag4.save("tempag4.img") arcpy.AddMessage("aggregation complete") arcpy.CompositeBands_management( "tempag1.img;tempag2.img;tempag3.img;tempag4.img", composite_name) arcpy.CopyRaster_management(composite_name, integer_name, '', '', '', '', '', '16_BIT_UNSIGNED') ##take out trash arcpy.Delete_management("tempag1.img") arcpy.Delete_management("tempag2.img") arcpy.Delete_management("tempag3.img") arcpy.Delete_management("tempag4.img") arcpy.Delete_management(composite_name) if os.path.isfile(clippedname): arcpy.Delete_management(clippedname) else: print rast, 'is not in your study area' print 'done with all the rasters in:', workspace
arcpy.AddMessage("Calculating unvegetated surface from NDWI...") unvegetated_surface = Con(july_ndwi, 0, 100, "VALUE < -1000") # Add the sample representation and the unvegetated surface arcpy.AddMessage("Identifying areas of unvegetated surface in sample representation...") sample_withwater = conditional_sample + unvegetated_surface # Set the unvegetated surfaces to no data arcpy.AddMessage("Removing unvegetated surfaces from sample representation...") sample_nowater = SetNull(sample_withwater, sample_withwater, "VALUE > 1") # Calculate raster mean within a 1.5 km grid arcpy.AddMessage("Calculating spatial certainty of sample representation...") sample_zonal = FocalStatistics(sample_nowater, NbrRectangle(50, 50, "CELL"), "MEAN", "DATA" ) extract_zonal = ExtractByMask(sample_zonal, area_of_interest) arcpy.CopyRaster_management(extract_zonal, spatial_certainty, "", "", -9999, "NONE", "NONE", "32_BIT_FLOAT", "NONE", "NONE", "TIFF", "NONE") # Resample spatial certainty to 1 km grid arcpy.AddMessage("Resampling spatial certainty to 1 km grid...") arcpy.Resample_management(spatial_certainty, certainty_resample, "1000", "BILINEAR") # Set the values below a threshold to null arcpy.AddMessage("Converting spatial certainty to study area raster...") threshold = int(threshold)/100 resample_null = SetNull(certainty_resample, 1, "VALUE < %f" % threshold) # Convert raster to polygon arcpy.AddMessage("Converting raster to polygon...") arcpy.RasterToPolygon_conversion(resample_null, initial_studyarea, "SIMPLIFY", "VALUE", "SINGLE_OUTER_PART", "") # Simplify the polygon
def export_frame(imagedict, ordergeometry, buffergeometry): for image_year in imagedict.keys(): image_per_year = 0 for image in imagedict[image_year]: image_source = image['IMAGE_SOURCE'] image_collection = image['IMAGE_COLLECTION_TYPE'] auid = str(image['AUI_ID']) imagepath = image['ORIGINAL_IMAGE_PATH'] image_per_year += 1 if FactoryCode == '': sr = arcpy.SpatialReference(4326) elif FactoryCode == 'UTM': sr = arcpy.GetUTMFromLocation(centroidX, centroidY) else: sr = arcpy.SpatialReference(int(FactoryCode)) fin_image_name = os.path.join( job_fin, image_year + '_' + image_source + '_' + str(image_per_year) + '.tif') if image_collection == 'DOQQ': arcpy.overwriteOutput = True mxd = arcpy.mapping.MapDocument(mxdexport_template) df = arcpy.mapping.ListDataFrames(mxd, '*')[0] df.SpatialReference = sr lyrpath = os.path.join(scratch, str(auid) + '.lyr') arcpy.MakeRasterLayer_management(imagepath, lyrpath) image_lyr = arcpy.mapping.Layer(lyrpath) geo_lyr = arcpy.mapping.Layer(ordergeometry) arcpy.mapping.AddLayer(df, geo_lyr, 'TOP') arcpy.mapping.AddLayer(df, image_lyr, 'TOP') image_layer = arcpy.mapping.ListLayers(mxd, "", df)[0] geometry_layer = arcpy.mapping.ListLayers( mxd, 'OrderGeometry', df)[0] geometry_layer.visible = False image_extent = image_layer.getExtent() geo_extent = geometry_layer.getExtent() df.extent = geo_extent print df.scale df.extent = geo_extent if df.scale < 6000: df.scale = 6000 print df.scale df.scale = ( (df.scale / 100) + 1 ) * 100 #very important setting as it defines how much of the image will be displayed to FE w_res = 7140 h_res = int((geo_extent.height / geo_extent.width) * w_res) arcpy.RefreshActiveView() desc = arcpy.Describe(lyrpath) bandcount = desc.bandcount arcpy.env.compression = "LZW" arcpy.env.pyramid = "NONE" #arcpy.Clip_management(imagepath,'%s %s %s %s'%(geo_extent.XMin,geo_extent.YMin,geo_extent.YMax,geo_extent.XMax),os.path.join(job_fin,image_year + '_' + image_source + '_' +str(image_per_year) + '.tif'), maintain_clipping_extent = 'NO_MAINTAIN_EXTENT') #arcpy.Clip_management(imagepath,'%s %s %s %s'%(geo_extent.XMin,geo_extent.YMin,geo_extent.YMax,geo_extent.XMax),os.path.join(job_fin,image_year + '_' + image_source + '_' +str(image_per_year) + '.tif'), maintain_clipping_extent = 'NO_MAINTAIN_EXTENT') arcpy.Clip_management( imagepath, '#', fin_image_name, in_template_dataset=buffergeometry, clipping_geometry="NONE", maintain_clipping_extent='NO_MAINTAIN_EXTENT') #arcpy.ProjectRaster_management(os.path.join(job_fin,image_year + '_' + image_source + '_' +str(image_per_year) + '.tif'),os.path.join(job_fin,image_year + '_' + image_source + '_' +str(image_per_year) + '2.tif'),sr,'BILINEAR') # if bandcount == 1: # arcpy.mapping.ExportToTIFF(mxd,os.path.join(job_fin,image_year + '_' + image_source + '_' +str(image_per_year) + '.tif'),df,df_export_width=w_res,df_export_height=h_res,world_file=True,color_mode = '8-BIT_GRAYSCALE',tiff_compression = 'NONE') # else: # arcpy.mapping.ExportToTIFF(mxd,os.path.join(job_fin,image_year + '_' + image_source + '_' +str(image_per_year) + '.tif'),df,df_export_width=w_res,df_export_height=h_res,world_file=True,color_mode = '24-BIT_TRUE_COLOR',tiff_compression = 'NONE') # arcpy.DefineProjection_management(os.path.join(job_fin,image_year + '_' + image_source + '_' +str(image_per_year) + '.tif'),sr) mxd.saveACopy(os.path.join(scratch, auid + '_export.mxd')) del mxd else: arcpy.env.pyramid = "NONE" img_sr = arcpy.Describe(imagepath).spatialReference arcpy.overwriteOutput = True if img_sr.name == 'Unknown' or img_sr.name == 'GCS_Unknown': arcpy.DefineProjection_management(imagepath, 4326) arcpy.CopyRaster_management(imagepath, fin_image_name, background_value=0, nodata_value=0, transform=True) else: arcpy.ProjectRaster_management(imagepath, fin_image_name, sr, 'CUBIC') arcpy.DefineProjection_management(fin_image_name, sr) set_raster_background(fin_image_name, 'white') raster_desc = arcpy.Describe(fin_image_name) extent = raster_desc.extent centerlong = round(extent.XMin + (extent.XMax - extent.XMin) / 2, 7) centerlat = round(extent.YMin + (extent.YMax - extent.YMin) / 2, 7) set_imagedetail( extent, centerlat, centerlong, image_year + '_' + image_source + '_' + str(image_per_year) + '.tif')
XDimension = "lon" YDimension = "lat" bandDimmension = "" dimensionValues = "" valueSelectionMethod = "" for file in files: if not os.path.isdir(file): if re_shp.search(file): ncfiles.append(file) if not ncfiles: exit for ncfile in ncfiles: inNetCDFFile = files_path + '\\' + ncfile outRasterLayer = raster_path + '\\' + ncfile[:-3] + '_nc' save_raster = raster_path + '\\ASPS_' + ncfile[-29:-25] # Execute MakeNetCDFRasterLayer arcpy.MakeNetCDFRasterLayer_md(inNetCDFFile, variable, XDimension, YDimension, outRasterLayer, bandDimmension, dimensionValues, valueSelectionMethod) # Execute make raster # make raster without nodata value arcpy.CopyRaster_management(outRasterLayer, save_raster) # delete temp file arcpy.Delete_management(outRasterLayer) print 'Finish file: ' + ncfile
def function(params): try: ################### ### Read inputs ### ################### pText = common.paramsAsText(params) outputFolder = pText[1] inputDEM = common.fullPath(pText[2]) inputStudyAreaMask = pText[3] inputStreamNetwork = pText[4] streamAccThresh = pText[5] riverAccThresh = pText[6] smoothDropBuffer = pText[7] smoothDrop = pText[8] streamDrop = pText[9] rerun = common.strToBool(pText[10]) log.info('Inputs read in') ########################### ### Tool initialisation ### ########################### # Create Baseline folder if not os.path.exists(outputFolder): os.mkdir(outputFolder) # Set up logging output to file log.setupLogging(outputFolder) # Run system checks common.runSystemChecks(outputFolder, rerun) # Set up progress log file progress.initProgress(outputFolder, rerun) # Write input params to XML common.writeParamsToXML(params, outputFolder, 'PreprocessDEM') log.info('Tool initialised') ######################## ### Define filenames ### ######################## studyAreaMask = os.path.join(outputFolder, "studyAreaMask.shp") ############################### ### Set temporary variables ### ############################### prefix = os.path.join(arcpy.env.scratchGDB, 'base_') DEMTemp = prefix + 'DEMTemp' clippedDEM = prefix + 'clippedDEM' clippedStreamNetwork = prefix + 'clippedStreamNetwork' studyAreaMaskTemp = prefix + "studyAreaMaskTemp" studyAreaMaskBuff = prefix + "studyAreaMaskBuff" studyAreaMaskDiss = prefix + "studyAreaMaskDiss" log.info('Temporary variables set') ################### ### Data checks ### ################### codeBlock = 'Data checks 1' if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun): # Check DEM has a coordinate system specified DEMSpatRef = arcpy.Describe(inputDEM).SpatialReference if DEMSpatRef.Name == "Unknown": log.error( "LUCI does not permit calculations without the spatial reference for the DEM being defined." ) log.error( "Please define a projection for your DEM and try again.") sys.exit() # Reproject DEM if it has a geographic coordinate system if DEMSpatRef.type == "Geographic": baseline.reprojectGeoDEM(inputDEM, outputDEM=DEMTemp) arcpy.CopyRaster_management(DEMTemp, inputDEM) # Set environment variables arcpy.env.snapRaster = inputDEM # Get spatial references of DEM and study area mask DEMSpatRef = arcpy.Describe(inputDEM).SpatialReference maskSpatRef = arcpy.Describe(inputStudyAreaMask).SpatialReference # Reproject study area mask if it does not have the same coordinate system as the DEM if not common.equalProjections(DEMSpatRef, maskSpatRef): warning = "Study area mask does not have the same coordinate system as the DEM" log.warning(warning) common.logWarnings(outputFolder, warning) warning = "Mask coordinate system is " + maskSpatRef.Name + " while DEM coordinate system is " + DEMSpatRef.Name log.warning(warning) common.logWarnings(outputFolder, warning) warning = "Reprojecting study area mask" log.warning(warning) common.logWarnings(outputFolder, warning) arcpy.Project_management(inputStudyAreaMask, studyAreaMaskTemp, DEMSpatRef) arcpy.CopyFeatures_management(studyAreaMaskTemp, studyAreaMask) else: arcpy.CopyFeatures_management(inputStudyAreaMask, studyAreaMask) # If DEM is large, clip it to a large buffer around the study area mask (~5km) inputDEM = baseline.clipLargeDEM(inputDEM, studyAreaMask) # Check if input stream network contains data baseline.checkInputFC(inputStreamNetwork, outputFolder) ############################### ### Tidy up study area mask ### ############################### codeBlock = 'Tidy up study area mask' if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun): # Check how many polygons are in the mask shapefile numPolysInMask = int( arcpy.GetCount_management(studyAreaMask).getOutput(0)) if numPolysInMask > 1: # Reduce multiple features where possible arcpy.Union_analysis(studyAreaMask, studyAreaMaskDiss, "ONLY_FID", "", "NO_GAPS") arcpy.Dissolve_management(studyAreaMaskDiss, studyAreaMask, "", "", "SINGLE_PART", "DISSOLVE_LINES") progress.logProgress(codeBlock, outputFolder) # Buffer study area mask baseline.bufferMask(inputDEM, studyAreaMask, outputStudyAreaMaskBuff=studyAreaMaskBuff) log.info('Study area mask buffered') ####################### ### Clip input data ### ####################### codeBlock = 'Clip inputs' if not progress.codeSuccessfullyRun(codeBlock, outputFolder, rerun): baseline.clipInputs(outputFolder, studyAreaMaskBuff, inputDEM, inputStreamNetwork, outputDEM=clippedDEM, outputStream=clippedStreamNetwork) progress.logProgress(codeBlock, outputFolder) ########################### ### Run HydTopo process ### ########################### log.info("*** Preprocessing DEM ***") preprocess_dem.function(outputFolder, clippedDEM, studyAreaMask, clippedStreamNetwork, streamAccThresh, riverAccThresh, smoothDropBuffer, smoothDrop, streamDrop, rerun) except Exception: arcpy.SetParameter(0, False) log.exception("Preprocessing DEM functions did not complete") raise
def multip_direction_hillshade(raster_layer, z_factor, altitude, output): # To allow overwriting outputs change overwriteOutput option to True. arcpy.env.overwriteOutput = True z_factor = int(z_factor) altitude = int(altitude) # Process: Hillshade (Hillshade) (sa) raster_name = "hs_" + str(randint(0, 999999)) Output_raster = "%scratchFolder%/" + raster_name Hillshade = Output_raster Output_raster = arcpy.sa.Hillshade(in_raster=raster_layer, azimuth=315, altitude=altitude, model_shadows="NO_SHADOWS", z_factor=z_factor) Output_raster.save(Hillshade) # Process: Hillshade (2) (Hillshade) (sa) raster_name = "hs_" + str(randint(0, 999999)) Output_raster_2_ = "%scratchFolder%/" + raster_name Hillshade_2_ = Output_raster_2_ Output_raster_2_ = arcpy.sa.Hillshade(in_raster=raster_layer, azimuth=0, altitude=altitude, model_shadows="NO_SHADOWS", z_factor=z_factor) Output_raster_2_.save(Hillshade_2_) # Process: Hillshade (3) (Hillshade) (sa) raster_name = "hs_" + str(randint(0, 999999)) Output_raster_3_ = "%scratchFolder%/" + raster_name Hillshade_3_ = Output_raster_3_ Output_raster_3_ = arcpy.sa.Hillshade(in_raster=raster_layer, azimuth=45, altitude=altitude, model_shadows="NO_SHADOWS", z_factor=z_factor) Output_raster_3_.save(Hillshade_3_) # Process: Hillshade (4) (Hillshade) (sa) raster_name = "hs_" + str(randint(0, 999999)) Output_raster_4_ = "%scratchFolder%/" + raster_name Hillshade_4_ = Output_raster_4_ Output_raster_4_ = arcpy.sa.Hillshade(in_raster=raster_layer, azimuth=90, altitude=altitude, model_shadows="NO_SHADOWS", z_factor=z_factor) Output_raster_4_.save(Hillshade_4_) # Process: Hillshade (5 (Hillshade) (sa) raster_name = "hs_" + str(randint(0, 999999)) Output_raster_5_ = "%scratchFolder%/" + raster_name Hillshade_5_ = Output_raster_5_ Output_raster_5_ = arcpy.sa.Hillshade(in_raster=raster_layer, azimuth=135, altitude=altitude, model_shadows="NO_SHADOWS", z_factor=z_factor) Output_raster_5_.save(Hillshade_5_) # Process: Hillshade (6) (Hillshade) (sa) raster_name = "hs_" + str(randint(0, 999999)) Output_raster_6_ = "%scratchFolder%/" + raster_name Hillshade_6_ = Output_raster_6_ Output_raster_6_ = arcpy.sa.Hillshade(in_raster=raster_layer, azimuth=180, altitude=altitude, model_shadows="NO_SHADOWS", z_factor=z_factor) Output_raster_6_.save(Hillshade_6_) # Process: Hillshade (7) (Hillshade) (sa) raster_name = "hs_" + str(randint(0, 999999)) Output_raster_7_ = "%scratchFolder%/" + raster_name Hillshade_7_ = Output_raster_7_ Output_raster_7_ = arcpy.sa.Hillshade(in_raster=raster_layer, azimuth=225, altitude=altitude, model_shadows="NO_SHADOWS", z_factor=z_factor) Output_raster_7_.save(Hillshade_7_) # Process: Hillshade (8) (Hillshade) (sa) raster_name = "hs_" + str(randint(0, 999999)) Output_raster_8_ = "%scratchFolder%/" + raster_name Hillshade_8_ = Output_raster_8_ Output_raster_8_ = arcpy.sa.Hillshade(in_raster=raster_layer, azimuth=270, altitude=altitude, model_shadows="NO_SHADOWS", z_factor=z_factor) Output_raster_8_.save(Hillshade_8_) # Process: Raster Calculator (Raster Calculator) raster_name = "hs_" + str(randint(0, 999999)) Raster_Calculator = "%scratchFolder%/" + raster_name rastercalc = (Output_raster + Output_raster_2_ + Output_raster_3_ + Output_raster_4_ + Output_raster_5_ + Output_raster_6_ + Output_raster_7_ + Output_raster_8_) / 8 rastercalc.save(Raster_Calculator) arcpy.CopyRaster_management(in_raster=Raster_Calculator, out_rasterdataset=output, pixel_type="8_BIT_UNSIGNED") # # Adding Result Raster Layer to Arcmap # mxd = arcpy.mapping.MapDocument("CURRENT") # # df = arcpy.mapping.ListDataFrames(mxd) # df = mxd.activeDataFrame # # 为添加到 mxd 中的栅格取名 # arcpy.AddMessage(os.path.basename(output)) # raster_lyr = os.path.basename(output)+"_layer" # TODO 直接使用名称无法添加进去 10.6 # result = arcpy.MakeRasterLayer_management(output, raster_lyr) # layer = result.getOutput(0) # arcpy.mapping.AddLayer(data_frame=df, add_layer=layer) # Delete Processing Raster File arcpy.Delete_management(Output_raster) arcpy.Delete_management(Output_raster_2_) arcpy.Delete_management(Output_raster_3_) arcpy.Delete_management(Output_raster_4_) arcpy.Delete_management(Output_raster_5_) arcpy.Delete_management(Output_raster_6_) arcpy.Delete_management(Output_raster_7_) arcpy.Delete_management(Output_raster_8_) arcpy.Delete_management(Raster_Calculator)
def copyRas2(): arcpy.CopyRaster_management(currentRaster, mosaicRaster)