path_band_7 = 'G:/CA_P4342_R34/Bands/Band_7' path_cfmask = 'G:/CA_P4342_R34/Bands/cfmask' path_dem = 'G:/CA_P4342_R34/ancillary' path_remap = 'G:/CA_P4342_R34/ancillary' # Set the overwrite output environment env.overwriteOutput = True # Check out the ArcGIS Spatial Analyst extension license arcpy.CheckOutExtension('spatial') # Set the input environment for the Band_2 rasters env.workspace = input_dir # Get the list of Band_2 rasters from input environment inputRaster = arcpy.ListRasters("*", "img") # Get bands 1,3,4,5 and 7 for raster in inputRaster: # Define file names B1 = path_band_1 + '\%s_band_1.img' % (raster)[0:25] B3 = path_band_3 + '\%s_band_3.img' % (raster)[0:25] B4 = path_band_4 + '\%s_band_4.img' % (raster)[0:25] B5 = path_band_5 + '\%s_band_5.img' % (raster)[0:25] B7 = path_band_7 + '\%s_band_7.img' % (raster)[0:25] CF = path_cfmask + '\%s_cfmask.img' % (raster)[0:25] perslp = path_dem + '\perslp.img' espa_recode = path_remap + '\ESPA_recode.rmp' out_name = 'DSWE_P3_V3'
import arcpy import os arcpy.env.workspace = "//166.2.126.25/teui1/4_Derek/R6/Kfac/R6Predictors" filePath = arcpy.env.workspace rasterList = arcpy.ListRasters("*_clip_clip_clip.tif", "ALL") for name in rasterList: desc = arcpy.Describe(filePath + "//" + name) print name print desc.extent
import arcpy from arcpy import env env.workspace = "G:\School\Personal Drive\Fall2014\Python\Python\Data\Exercise09" rasterlist = arcpy.ListRasters() for raster in rasterlist: print raster
# coding=utf-8 import arcpy import os from arcpy.sa import * arcpy.CheckOutExtension("Spatial") arcpy.env.workspace = r'E:\pybook\MeteoGrid\tmean_year' # arcpy.env.scratchWorkspace = r'E:\pybook\MeteoGrid' + os.sep +'tmean_year' rasterlist = arcpy.ListRasters('TAVG' + '*', 'tif') rasterlistout = r'E:\pybook\MeteoGrid\tmean_year' + os.sep + 'TAVG_MEAN.tif' outCellStatistics = CellStatistics(rasterlist, 'MEAN', "DATA") outCellStatistics.save(rasterlistout) print(rasterlistout) fvars = ['tmean_year', 'prcp_mean'] for v in fvars: if v == 'tmean_year': i = 'TAVG_MEAN' x = 'TAVG_county' func = 'MEAN' else: i = 'prcp_meanMEAN' x = 'prcp_county' func = 'MEAN' arcpy.env.workspace = r'E:\pybook\MeteoGrid' + os.sep + v inValueRaster = r'E:\pybook\MeteoGrid' + os.sep + v + os.sep + i + '.tif' outTable = r'E:\pybook\MeteoGrid' + os.sep + x + os.sep + x + func + '.dbf' # Set local variables # inZoneData = "zones.shp" # zoneField = "Classes" # inValueRaster = "valueforzone" # outTable = "zonalstattblout02.dbf"
import arcpy, os, sys from arcpy import env from arcpy.sa import * #set the workspace and list all of the raster dataset #env.workspace=r'D:\Analysis\Greenland_Analysis\GreenlandHighRes\StreamExtraction\multioutput\watermask\toprocess' env.workspace = r'F:\Courtney\Greenland_Code\Courtney_Stream_Extraction\multioutput\watermask' #r'D:\2012Images\WorldView\geotiff\730\resample\watermask' env.overwriteOutput = True #output=r'D:\Analysis\Greenland_Analysis\GreenlandHighRes\StreamExtraction\multioutput\watermask\toprocess\output' img_output = r'F:\Courtney\Greenland_Code\Courtney_Stream_Extraction\multioutput\thinned_img' shp_output = r'F:\Courtney\Greenland_Code\Courtney_Stream_Extraction\multioutput\thinned_shapefile' #r'D:\2012Images\WorldView\geotiff\730\resample\watermask\output' arcpy.CheckOutExtension("Spatial") tiffs = arcpy.ListRasters("*", "img") print tiffs arcpy.CheckOutExtension("Spatial") for tiff in tiffs: print "start process " + tiff outThinnedRaster = img_output + "\\" + (tiff.split('.'))[0] + "thin.img" outFeatureClass = shp_output + "\\" + (tiff.split('.'))[0] + ".shp" thinOut = Thin(tiff, "ZERO", "NO_FILTER", "ROUND") thinOut.save(outThinnedRaster) arcpy.RasterToPolyline_conversion(outThinnedRaster, outFeatureClass, "ZERO", 50, "NO_SIMPLIFY", "")
suffix="tif" #栅格数据后缀 mask_pt=r"H:\VI_LP_simu_infat\Budyko校正\smbaisn_tif" #栅格mask数据目录 mask_suffix='tif' # mask数据后缀 out_path=r"H:\VI_LP_simu_infat\Budyko校正\smbs02" #提取结果文件夹 #计算部分 print "Processing......" import arcpy from arcpy.sa import * arcpy.CheckOutExtension("Spatial") arcpy.env.workspace=mask_pt msksras=arcpy.ListRasters('*',mask_suffix) arcpy.env.workspace=ras_path ras=arcpy.ListRasters('*',suffix) arcpy.env.snapRaster = ras[0].encode('cp936') arcpy.env.extent = ras[0].encode('cp936') print "共有"+'%d'%len(ras)+"个栅格数据" for rs in ras: rstnm=rs.encode('cp936') print "共有"+str(len(ras))+"个raster数据,正在处理第"+str(ras.index(rs)+1)+"个:"+rstnm for msk in msksras: msknm=msk.encode('cp936')
# ##User input variables # sub-directory folder where shapefile in_sum_file = r'E:\Workspace\StreamLine\ESA\Boundaries.gdb\Counties_all_overlap' region = 'VI' temp_file = 'table' run_group = 'UseLayers' # # location of use sites to run use_location = 'E:\Workspace\StreamLine\ESA\ByProjection' + os.sep + str( region) + "_" + run_group + ".gdb" use_list = [] # runs specified layers in use location; blank runs all arcpy.env.workspace = use_location if len(use_list) == 0: use_list = (arcpy.ListRasters()) # run all rasters in the input gdb # location of results out_results = r'E:\Workspace\StreamLine\ESA\Carbamates\Tabulated_HUCAB\PolBoundaries\Agg_layers' # STATIC Variables # Symbology layer so that the unique values can be applied to use layer before running zonal stats # snap raster dictionary look up snap_raster_dict = { 'CONUS': r'E:\Workspace\StreamLine\ESA\ByProjection\SnapRasters.gdb' r'\Albers_Conical_Equal_Area_cultmask_2016', 'HI': r'E:\Workspace\StreamLine\ESA\ByProjection\SnapRasters.gdb\NAD_1983_UTM_Zone_4N_HI_Ag', 'AK':
ws = r"D:\research_project_data_samples\clip_mosaic\input" Features_polygon_shp = r"D:\research_project_data_samples\clip_mosaic\shp\Features-polygon.shp" v_name = "clipped_{0}" geometries = arcpy.CopyFeatures_management(Features_polygon_shp, arcpy.Geometry()) extent_str = "" extent_str += str(geometries[0].extent.XMin) + " " extent_str += str(geometries[0].extent.YMin) + " " extent_str += str(geometries[0].extent.XMax) + " " extent_str += str(geometries[0].extent.YMax) arcpy.env.overwriteOutput = True arcpy.env.workspace = ws ras_names = arcpy.ListRasters() i = 1 for inRas in ras_names: print("\n{0}] Clipping {1} @ {2}".format(i, inRas, str(datetime.now())[1:19])) arcpy.Clip_management(inRas, extent_str, v_name.format(inRas), Features_polygon_shp, "-3.402823e+038", "ClippingGeometry", "NO_MAINTAIN_EXTENT") print("\t" + v_name.format(inRas)) i += 1 else: print("\n\n\tFinishing Rasters Clipping Process ...") time_end = datetime.now() # time object print("\nEnding Rasters Clipping Process @ " + str(time_end)[1:19]) elapsed_time = time_end - time_start
#tool to calculate species richness as the average of all SDMs for a given month import arcpy from arcpy import env import os from arcpy.sa import * env.workspace = "F:\\SDM_paper\\maxent\\Maxent_run\\extracted_rasters\\" out_workspace = "F:\\SDM_paper\\maxent\\Maxent_run\\Species_richness\\" for file in arcpy.ListFiles(): env.workspace = "F:\\SDM_paper\\maxent\\Maxent_run\\extracted_rasters\\" + file rasters = arcpy.ListRasters() cellstats = CellStatistics(rasters, "MEAN", "DATA") outfile = out_workspace + file + "_SR" cellstats.save(outfile)
def makeServiceAreas(outGDB, accFeat, costRastLoc, costRastHwy, rampPts, rampPtsID, grpFld=None, maxCost=None, attFld=None): # Checks on attFld if attFld: if not maxCost: print( 'Must specify a `maxCost` value if using `attFld`, exiting...') return if isinstance(attFld, str) and not [ attFld in [a.name for a in arcpy.ListFields(accFeat)] ]: print('Field ' + attFld + ' not found in access features, exiting...') return arcpy.env.snapRaster = costRastLoc arcpy.env.cellSize = costRastLoc arcpy.env.extent = costRastLoc arcpy.env.outputCoordinateSystem = costRastLoc make_gdb(outGDB) arcpy.env.workspace = outGDB arcpy.SetLogHistory(False) # copy access points to gdb accFeat = arcpy.CopyFeatures_management(accFeat, 'accFeat_orig') if not grpFld: # add a field to assign all rows to one group. grpFld = 'serviceArea_group' arcpy.CalculateField_management(accFeat, grpFld, "1", field_type="SHORT") grps = unique_values(accFeat, grpFld) # assign max costs if maxCost: if isinstance(maxCost, str): arcpy.CalculateField_management(accFeat, 'minutes_SA', '!' + maxCost + '!', field_type="FLOAT") else: arcpy.CalculateField_management(accFeat, 'minutes_SA', maxCost, 'PYTHON', field_type="FLOAT") # dictionary: grps: minutes grp_min = { a[0]: a[1] for a in arcpy.da.SearchCursor(accFeat, [grpFld, 'minutes_SA']) } for i in grps: n = grps.index(i) + 1 if isinstance(i, str): rastout = "grp_" + i + "_servArea" cdpts = "grp_" + i + "_inputFeat" i_q = "'" + i + "'" else: rastout = "grp_" + str(int(i)) + "_servArea" cdpts = "grp_" + str(int(i)) + "_inputFeat" i_q = i if arcpy.Exists(rastout): # skip already existing continue print("working on group " + str(i) + " (" + str(n) + " of " + str(len(grps)) + ")...") arcpy.env.extent = costRastLoc # reset extent prior to every run t0 = time.time() c = 1 # counter arcpy.Select_analysis(accFeat, cdpts, grpFld + " = " + str(i_q)) print('Number of access pts: ' + arcpy.GetCount_management(cdpts)[0]) # get service area in minutes if maxCost is not None: grpMaxCost = grp_min[i] # Make buffer to set a smaller extent, to reduce processing time. buffd = str( int(grpMaxCost * 1609) ) + ' METERS' # buffer set to straightline distance at ~60 mph (1 mile per minute) print('Cost in minutes: ' + str(grpMaxCost)) arcpy.Buffer_analysis(cdpts, "buffext", buffd) arcpy.env.extent = "buffext" else: grpMaxCost = None # local CD cd1 = arcpy.sa.CostDistance(cdpts, costRastLoc, grpMaxCost) nm = "cd" + str(c) cd1.save(nm) cds = [nm] # values to ramps rp1 = arcpy.sa.ExtractValuesToPoints(rampPts, cd1, "rp1", "NONE", "VALUE_ONLY") rp1s = arcpy.MakeFeatureLayer_management( rp1, where_clause="RASTERVALU IS NOT NULL") if int(arcpy.GetCount_management(rp1s)[0]) == 0: # No ramps reached: just output local roads only service area if attFld is not None: if isinstance(attFld, str): areaval = unique_values(cdpts, attFld)[0] area = arcpy.sa.Con("cd1", areaval, "", "Value <= " + str(grpMaxCost)) area.save(rastout) elif isinstance(attFld, int): area = arcpy.sa.Con("cd1", attFld, "", "Value <= " + str(grpMaxCost)) area.save(rastout) else: cd1.save(rastout) else: # Some ramps reached: Run highways/local loop until there is no improvement in travel time. notin = [1] while len(notin) != 0: print('Limited-access cost distance run # ' + str(int((c + 1) / 2)) + '...') arcpy.CopyFeatures_management(rp1s, "rp1s") # highway CD cd2 = arcpy.sa.CostDistance("rp1s", costRastHwy, grpMaxCost, source_start_cost="RASTERVALU") c += 1 nm = "cd" + str(c) cd2.save(nm) cds = cds + [nm] rp2 = arcpy.sa.ExtractValuesToPoints(rampPts, cd2, "rp2", "NONE", "VALUE_ONLY") # change name to avoid confusion with local ramp points arcpy.AlterField_management(rp2, "RASTERVALU", "costLAH", clear_field_alias=True) rp2s = arcpy.MakeFeatureLayer_management( rp2, where_clause="costLAH IS NOT NULL") # Check for new ramps or ramps reached at least one minute faster after latest run (LAH) notin = [] lahr = { a[0]: a[1] for a in arcpy.da.SearchCursor(rp2s, [rampPtsID, 'costLAH']) } locr = { a[0]: a[1] for a in arcpy.da.SearchCursor('rp1s', [rampPtsID, 'RASTERVALU']) } for a in lahr: if a not in locr: notin.append(a) else: if lahr[a] - locr[a] < -1: notin.append(a) if len(notin) == 0: print('No new ramps reached after LAH, moving on...') break # back to local arcpy.CopyFeatures_management(rp2s, "rp2s") cd3 = arcpy.sa.CostDistance("rp2s", costRastLoc, grpMaxCost, source_start_cost="costLAH") c += 1 nm = "cd" + str(c) cd3.save(nm) cds = cds + [nm] rp1 = arcpy.sa.ExtractValuesToPoints(rampPts, cd3, "rp1", "NONE", "VALUE_ONLY") rp1s = arcpy.MakeFeatureLayer_management( rp1, where_clause="RASTERVALU IS NOT NULL") # Check for new ramps or ramps reached at least one minute faster after latest run (Local) # Similar to process earlier, but with names reversed notin = [] locr = { a[0]: a[1] for a in arcpy.da.SearchCursor(rp1s, [rampPtsID, 'RASTERVALU']) } lahr = { a[0]: a[1] for a in arcpy.da.SearchCursor('rp2s', [rampPtsID, 'costLAH']) } for a in locr: if a not in lahr: notin.append(a) else: if locr[a] - lahr[a] < -1: notin.append(a) # end while loop if attFld is not None: if isinstance(attFld, str): # cell statistics areaval = round(unique_values(cdpts, attFld)[0], 3) area = arcpy.sa.Con( arcpy.sa.CellStatistics(cds, "MINIMUM", "DATA"), areaval, "", "Value <= " + str(grpMaxCost)) area.save(rastout) elif isinstance(attFld, int): area = arcpy.sa.Con( arcpy.sa.CellStatistics(cds, "MINIMUM", "DATA"), attFld, "", "Value <= " + str(grpMaxCost)) area.save(rastout) else: arcpy.sa.CellStatistics(cds, "MINIMUM", "DATA").save(rastout) print("Done with group: " + str(i)) t1 = time.time() print('That took ' + str(int(t1 - t0)) + ' seconds.') # garbage pickup every 10 runs, last run if n == round(n, -1) or n == len(grps): print("Deleting files...") r = arcpy.ListRasters("cd*") fc = arcpy.ListFeatureClasses("rp*") fc.append("buffext") garbagePickup(r) garbagePickup(fc) # reset extent arcpy.env.extent = costRastLoc arcpy.BuildPyramids_management(rastout) return rastout # General usage # # Environment settings # arcpy.env.parallelProcessingFactor = "100%" # Adjust to some percent (e.g. 100%) for large extent analyses (e.g. maxCost = None) # arcpy.env.mask = r'L:\David\projects\RCL_processing\RCL_processing.gdb\VA_Buff50mi_wgs84' # arcpy.env.overwriteOutput = True # # # Cost surface variables # costRastLoc = r'E:\RCL_cost_surfaces\Tiger_2019\cost_surfaces.gdb\costSurf_no_lah' # costRastHwy = r'E:\RCL_cost_surfaces\Tiger_2019\cost_surfaces.gdb\costSurf_only_lah' # rampPts = r'E:\RCL_cost_surfaces\Tiger_2019\cost_surfaces.gdb\rmpt_final' # rampPtsID = 'UniqueID' # unique ramp segment ID attribute field, since some ramps have multiple points # # # Facilities features and settings # accFeat = r'accessFeatures' # outGDB = r'serviceAreas.gdb' # # Attributes # grpFld = 'facil_code' # maxCost = 30 # attFld = None # makeServiceAreas(outGDB, accFeat, costRastLoc, costRastHwy, rampPts, rampPtsID, grpFld, maxCost, attFld)
wrk = "C:\\Users\\Public\\2015Pros\\Hypergrid\\PROs2015_Plants.gdb" #slf = "D:/PRO2014/workspace/SF_UA_2014.gdb/sf_ua_121014_dissolve_buff100m_grid" #slf= "C:\\Users\\Public\\2015Pros\\Hypergrid\\SF_UA_2014.gdb\\sf_ua_121014_dissolve_buff100m_grid" slf = "C:\\Users\\Public\\2015Pros\\DEC_Lands_2015_July\\SF_UA_GIS.gdb\\sf_ua_dissolve_grid_buff100m" # set the workspace for the input grids env.workspace = wrk #set the snapraster #env.snapRaster = "C:/_Howard/SnapRasters/snapras30met" env.snapRaster = "C:\\Users\\Public\\Rapunzel\\SnapRasters\\snapras30met" env.cellSize = env.snapRaster env.overwriteOutput = True outCoord = env.snapRaster # what's the name of the hypergrid hyp = arcpy.ListRasters("hyp*", "All")[0] # clip the hypergrid print('clipping') hyp_cl = Con(Raster(slf) >= 0, hyp, Raster(slf)) man.JoinField(hyp_cl, "VALUE", hyp, "VALUE", ["Richness"]) print('reclassifying') # reclassify to four groups based on richness # 0 = 0 (none) # 1 = 1 (low) # 2-4 = 2 (medium) # 5 and up = 3 (high) #get the max of the raster rows = arcpy.SearchCursor(hyp_cl, "", "", "Richness", "Richness A")
drive, root_folder, 'Projects/VegetationEcology/AKVEG_QuantitativeMap/Data/Data_Input/northAmericanBeringia_ModelArea.tif' ) # Loop through each grid in list and extract all predictors to study area for grid in grid_list: print(f'Extracting rasters for Grid {grid}...') # Define the grid raster grid_raster = os.path.join(input_grids, "Grid_" + grid + ".tif") # Generate a list of rasters raster_list = [] # Add topographic rasters arcpy.env.workspace = os.path.join(input_topography, 'Grid_' + grid) topo_rasters = arcpy.ListRasters('', 'TIF') for raster in topo_rasters: raster_list.append(os.path.join(arcpy.env.workspace, raster)) # Set arcpy.env.workspace arcpy.env.workspace = work_geodatabase # Define raster list count total = len(raster_list) for input_raster in raster_list: # Identify raster index count = raster_list.index(input_raster) + 1 # Define output raster and path output_raster = input_raster.replace('gridded_full', 'gridded_select') output_path = os.path.split(output_raster)[0]
seEl_clip = toLoc + 'seElclip' outExtractByMask.save(seEl_clip) print("seElev clipped succesfully") # Mask neLC outExtractByMask = arcpy.sa.ExtractByMask(neLC, neMask) neLC_clip = toLoc + 'neLCclip' outExtractByMask.save(neLC_clip) print("neLC clipped succesfully") # Mask seLC outExtractByMask = arcpy.sa.ExtractByMask(seLC, seMask) seLC_clip = toLoc + 'seLCclip' outExtractByMask.save(seLC_clip) print("neLC clipped succesfully") # Mosaic together NE and SE regions ElRast = arcpy.ListRasters("*" + "Elclip") eElev = toLoc + "elev" arcpy.MosaicToNewRaster_management( ElRast, toLoc, "elev", "", "", "", 1, ) print("Elevation mosaic successful") LCRast = arcpy.ListRasters("*" + "LCclip") eLC = fromLoc + "lcgap" arcpy.MosaicToNewRaster_management(
def _constrain_from_raster(constrain_area, rasters): """ _constrain_from_raster Constrains an area to zones where all bands of the raster contain information :param constrain_area: Original area :param rasters: Multiband rasters to be used as information source :return: Constrained area """ global MESSAGES MESSAGES.AddMessage("Constraining Area from rasters...") _verbose_print("Constrain Area: {}".format(constrain_area)) _verbose_print("rasters: {}".format(rasters)) scratch_files = [] # Obtain the name of the bands oldws = arcpy.env.workspace # Save previous workspace raster_path = arcpy.Describe(rasters.strip("'")).catalogPath arcpy.env.workspace = raster_path rasters = [os.path.join(raster_path, b) for b in arcpy.ListRasters()] arcpy.env.workspace = oldws # Restore previous workspace _verbose_print("Rasters list: {}".format(str(rasters))) # Start a progression bar to feedback for the user arcpy.SetProgressor("step", "Restricting area from missings", min_range=0, max_range=len(rasters), step_value=1) try: # TODO: Maybe this is faster if is transform to numpy arrays, make calculations and the back to raster # Initialize raster with all the Null points final_raster = arcpy.sa.IsNull(arcpy.sa.Raster(rasters[0])) arcpy.SetProgressorPosition() # loop trough all the remaining rasters adding the points where other bands have missings if len(rasters) > 1: for raster in rasters[1:]: final_raster = arcpy.sa.BooleanOr( final_raster, arcpy.sa.IsNull(arcpy.sa.Raster(raster))) arcpy.SetProgressorPosition() _verbose_print( "Area reduced with nulls from {}".format(raster)) # Set null the positions where it was found at least one null final_raster = arcpy.sa.SetNull(final_raster, final_raster) # reset the Progressor to previous state arcpy.SetProgressorLabel("Executing Select Random Points") arcpy.ResetProgressor() # Transform the raster to polygon domain_scratch = arcpy.CreateScratchName( "temp", workspace=arcpy.env.scratchWorkspace) arcpy.RasterToPolygon_conversion(final_raster, domain_scratch, "SIMPLIFY") scratch_files.append(domain_scratch) _verbose_print( "Scratch file created (domain): {}".format(domain_scratch)) # Intersect the polygon created with the original area intersect_scratch = arcpy.CreateScratchName( "temp", workspace=arcpy.env.scratchWorkspace) arcpy.Intersect_analysis([domain_scratch, constrain_area], intersect_scratch) _verbose_print( "Scratch file created (intersect): {}".format(domain_scratch)) except: raise finally: # Clean up intermediate files for s_file in scratch_files: arcpy.Delete_management(s_file) _verbose_print("Scratch file deleted: {}".format(s_file)) _verbose_print("Constrain from rasters finished") return intersect_scratch
gpsPoints = "C:/workspace/chirps/gps/" ##paths to processing data clip_rasters_1 = "C:/workspace/chirps/processing/clip_1/" resample_raster = "C:/workspace/chirps/processing/resample_raster/" clip_rasters_2 = "C:/workspace/chirps/processing/clip_2/" fs_rasters = "C:/workspace/chirps/processing/fs_rasters/" ##path to output data out_extract = "C:/workspace/chirps/processing/gps_extract/" dataGDB = "C:/workspace/chirps/processing/data.gdb/" out_csv = "C:/workspace/chirps/csv/" #focal_extract = "C:/Workspace/gates/gps/focal_extract/merge_157.shp" arcpy.env.workspace = rasters tiffs = arcpy.ListRasters("*") #Resample all Raster i = 1 for tiff in tiffs: tiffSplit = tiff.split(".") year = tiffSplit[2] month = tiffSplit[3] # Clip Raster arcpy.Clip_management( in_raster=rasters + tiff, rectangle= "-92.9054565429688 -42.5033569335938 129.547058105469 50.6832275390625", out_raster=clip_rasters_1 + tiff, in_template_dataset="C:/Workspace/gates/gps/clips/clip_5.shp", nodata_value="-3.402823e+038",
inTable2 = arcpy.SelectLayerByAttribute_management(lyr1,"NEW_SELECTION",qry1) lyrz2 = arcpy.MakeFeatureLayer_management(inTablez,"lyrz") inTablez2 = arcpy.SelectLayerByAttribute_management(lyrz2,"NEW_SELECTION",qry1) cnt = arcpy.GetCount_management(inTablez2) #print "number of selected record is:"+str(cnt) # # #=========================================.Hillshade reclassification.==========================================================> # print print "*********************: Hillshade reclassification :*********************" print env.workspace = str(dPath)+str(cityq)+"/HillShade/" datapath1 = str(dPath)+str(cityq)+"/HillShade/" #print datapath1 inRasterList1 = arcpy.ListRasters('mar*','') reclassField = "VALUE" #print inRasterList1 for inRaster1 in inRasterList1: #print "1:" +str(inRaster1) #print "Reclassifying raster: %s" %os.path.join("dataPath1",inRaster1) print inRaster1 remap1 = RemapValue([[0,153,0],[153,254,1]]) # March #print "2:" +str(inRaster1) outReclassify = Reclassify(inRaster1, reclassField, remap1, "NODATA") #print "3:" +str(inRaster1) outReclassify.save(str(dPath)+str(cityq)+"/Reclass/"+str(inRaster1)+str("rec")) del outReclassify #print "4:" +str(inRaster1) datapath2 = str(dPath)+str(cityq)+"/HillShade/" inRasterList2 = arcpy.ListRasters('jun*','')
modellist = sorted( os.listdir(dirbase + "\\" + sres + "\\downscaled\\Global_" + str(resolution))) for model in modellist: print model ######## if not os.path.exists(dirout + "\\" + sres + "_" + model + "_tiles_world_done.txt"): arcpy.env.workspace = dirbase + "\\" + sres + "\\downscaled\\Global_" + str( resolution) + "\\" + model + "\\" + str(period) print arcpy.env.workspace ##### diroutGrids = dirout + "\\" + sres + "\\Global_" + str( resolution) + "\\" + model + "\\" + str(period) print "\nProcessing", sres, model, period, "\n" rasterList = arcpy.ListRasters("bio*", "GRID") for raster in rasterList: print raster if os.path.basename(raster).split( "_" )[0] == "bio": # or os.path.basename(raster).split("_")[0] == "prec" or os.path.basename(raster).split("_")[0] == "tmean" or os.path.basename(raster).split("_")[0] == "dtr": diroutGridsVar = diroutGrids + "\\" + os.path.basename( raster).split("_")[0] if not os.path.exists(diroutGridsVar): os.system('mkdir ' + diroutGridsVar) trashList = sorted( glob.glob(diroutGridsVar + "\\" + raster + "*.*")) for trashfile in trashList:
starTime_task = datetime.now() #shipTypeFolder = out_folder + "\\" + shipType shipTypeFolder = out_folder + "\\DensityMaps1Km_" + year + "_IMO_" + shipType if not arcpy.Exists(shipTypeFolder): #arcpy.CreateFolder_management(out_folder, shipType) arcpy.CreateFolder_management(out_folder, "DensityMaps1Km_" + year + "_IMO_" + shipType) pool = multiprocessing.Pool(6) pool.map(multiProcessing_function, linesfiles) pool.close() pool.join() print "Ended create months rasters at: " + str(datetime.now()) #Join all months rasters into one year raster print "Started create year " + year + " " + shipType + " raster at: " + str(datetime.now()) + "\n" if arcpy.CheckExtension("Spatial") == "Available": arcpy.CheckOutExtension("Spatial") #arcpy.env.workspace = out_folder + "\\" + shipType arcpy.env.workspace = out_folder + "\\DensityMaps1Km_" + year + "_IMO_" + shipType rasterList = arcpy.ListRasters("*_Raster.tif") sumRasters = arcpy.sa.CellStatistics(rasterList, "SUM", "NODATA") #sumRasters.save(out_folder + "\\" + shipType + "\\" + shipType + "_" + year + "_Year_Raster" + ".tif") sumRasters.save(out_folder + "\\DensityMaps1Km_" + year + "_IMO_" + shipType + "\\" + shipType + "_" + year + "_Year_Raster" + ".tif") arcpy.CheckInExtension("Spatial") else: print("Spatial Analyst license is unavailable") print "Ended create year " + year + " " + shipType + " raster at: " + str(datetime.now()) print "Ended script for year " + year + " " + shipType + " at: " + str(datetime.now()) print "Duration: " + str(datetime.now() - startTime_script) + "\n"
import sys, string, os, time, datetime, shutil, traceback, arcpy from arcpy import env env.workspace = r"S:\muddy_virgin\Imagery_20161219\DMI16101-FLIGHT_DATE-1219-2016\6-INCH-4BAND-16BIT-ORTHOPHOTOS" ws = env.workspace arcpy.env.overwriteOutput = True arcpy.env.pyramid = "None" arcpy.env.rasterStatistics = "None" fcs = arcpy.ListRasters("", "") for fc in fcs: OrigName = fc[:-4] NameLength = len(fc) name = "" if NameLength == 9: name = fc[0] + "0" + fc[1:] elif NameLength == 8: name = fc[0] + "00" + fc[1:] else: name = fc #PyrFile = ws + '\\' + OrigName + '.rrd' #StatFile = ws + '\\' + OrigName + '.aux' TfwFile = ws + '\\' + OrigName + '.tfw' tifFile = ws + '\\' + OrigName + '.tif' tilename = name[:-4]
import re import arcpy from arcpy import env from arcpy.sa import * # 设置计算增长倍数的间隔年 year_gap = 5 # 设置arcpy临时工作空间 arcpy.env.workspace = 'E:\\workplace\\CarbonProject\\geodatabase\\carbon_temp.gdb' # 检查arcgis空间分析扩展许可 arcpy.CheckOutExtension("Spatial") # 列出数据库中所有已经进行LOG计算操作的栅格数据 rasters = arcpy.ListRasters("*log*") raster_1990 = arcpy.env.workspace + '\\' + arcpy.ListRasters("*1990_log*")[0] # 这一步确定最后一个年份数据是否是5或10结尾 # 确定是否保留做进一步处理 rasters.sort() # 保存栅格数据的最后一年和第一年的栅格数据 first_year = rasters[0] last_year = [] # 下面这个切片是不是很复杂啊~~~~~ # 数据库里的栅格数据都是以这个格式保存的: # edgar_xxxx_log (xxxx是四位数年份) # 所以,这个切片就是表示栅格数据列表中的最后一个栅格数据的年份的最后一位。 # 抱歉~我也不想这么写~
# FolderPath = r"\\dcnsbiona01a\BIODataSVC\IN\MSP\Projects\Rockweed\Imagery\Satellite" # Using network paths # https://gis.stackexchange.com/questions/85339/assigning-unc-path-to-arcpy-env-workspace GDBname = "Processing.gdb" # CreateFileGDB arcpy.CreateFileGDB_management(FolderPath, GDBname) gdbWorkspace = os.path.join(FolderPath, GDBname) FolderPath = "N:/MSP/Projects/Rockweed/Imagery/Satellite/NDVI_Tiles" arcpy.env.workspace = FolderPath # create a list of all .TIF files in the workspace rasters = arcpy.ListRasters("*", "TIF") ###################################################################################################################### # ######## Mosaic all tiles into a single raster in the geodatabase ######## # newRast = "NDVI_Mosaic" arcpy.env.workspace = FolderPath print("Mosaicing rasters") arcpy.MosaicToNewRaster_management(rasters, gdbWorkspace, newRast, "#", "32_BIT_FLOAT", "#", "1", "FIRST", "FIRST") print(str(time.ctime(int(time.time())))) # Set workspace to the Geodatabase arcpy.env.workspace = gdbWorkspace # Set processing extent environments
print("Starting script at: {}".format(start_t)) today = datetime.datetime.today() date = today.strftime('%Y%m%d') # Create a list of the rasters in the in-workspace print("Listing Geodatabases") gdbs = arcpy.ListWorkspaces("*", "FileGDB") # Loop through GeoDatabases, listing rasters within each to input in cell stats to create action area for gdb in gdbs: t2 = datetime.datetime.now() arcpy.env.workspace = gdb arcpy.env.scratchWorkspace = gdb desc = arcpy.Describe(gdb) # Nomenclature for Action Areas (AA). Adding the '_dev' indicates that the AA included developed and right-of-way. # '_Ag' for only agricultural footprints, etc. aa = "{}_{}".format(desc.basename, date) uses = arcpy.ListRasters("*") print("Running Cell Statistics for {} at {}".format( gdb, datetime.datetime.now())) out = CellStatistics(uses, "MINIMUM") out.save(aa) print("Building pyramids") arcpy.BuildPyramidsandStatistics_management(aa) print("Completed Action Area for {} in: {}".format( desc.basename, datetime.datetime.now() - t2)) print("Completed Processing of all Action Areas in: {}".format( datetime.datetime.now() - start_t))
def LoadAccumulationRasters(temp_workspace): """ This function accepts a temp workspace (folder) and: 1 - Grabs each raster file (1day, 3day, and 7day) in the workspace folder and saves info from each one - storing the info into a list of class objects. 2 - processes through the list of stored raster objects and extracts each raster from the temp folder into the final mosaic dataset folder(as it's respective file name), and then loads the raster to the mosaic dataset - overwriting any previous entries. 3 - Uses info from the original source file to populate the start time and end time on each raster after it is loaded to the mosaic dataset. 4 - deletes the temp_workspace original raster file after it is successfully added/moved to the mosaic dataset. """ try: arcpy.CheckOutExtension("Spatial") # We do not want the zero values and we also do not want the "NoData" value of 29999. # So let's extract only the values above 0 and less than 29999. inSQLClause = "VALUE > 0 AND VALUE < 29999" arcpy.env.workspace = temp_workspace arcpy.env.overwriteOutput = True # Grab some config settings that will be needed... final_RasterSourceFolder = GetConfigString('final_Folder') target_mosaic1Day = os.path.join(GetConfigString("GDBPath"), GetConfigString("1DayDSName")) target_mosaic3Day = os.path.join(GetConfigString("GDBPath"), GetConfigString("3DayDSName")) target_mosaic7Day = os.path.join(GetConfigString("GDBPath"), GetConfigString("7DayDSName")) RegEx_StartDatePattern = GetConfigString("RegEx_StartDateFilterString") Filename_StartDateFormat = GetConfigString("Filename_StartDateFormat") # Build attribute name list for updates attrNameList = [ GetConfigString('rasterStartTimeProperty'), GetConfigString('rasterEndTimeProperty') ] rasObjList = [] # List all raster in the temp_workspace rasters = arcpy.ListRasters() for raster in rasters: # Check to see if this is a valid 1, 3, or 7 day raster file... just in case there are other files if ValidAccumulationRaster(raster): keyDate = Get_StartDateTime_FromString( raster, RegEx_StartDatePattern, Filename_StartDateFormat) if keyDate is not None: # Start deriving info (start_datetime, end_datetime, and target datastet) from the raster # being processed. Build a 'raster load object' to hold the information about each raster # that we need to keep track of. rasLoadObj = RasterLoadObject() # 1.) save the original file name rasLoadObj.origFile = raster # 2.) the date and start time portion (20180801-S083000) of the raster filename is used to set # the end_datetime attribute value on the loaded raster. Save that here... rasLoadObj.endDate = keyDate # From the filename: ex. 3B-HHR-L.MS.MRG.3IMERG.20150802-S083000-E085959.0510.V05B.1day.tif # 3.) the target dataset can be identified based on the occurrence of a string sequence # ".1day.tif" present means the target is the 1DayDSName # ".3day.tif" present means the target is the 3DayDSName # ".7day.tif" present means the target is the 7DayDSName # 4.) the raster load file can also be set based on the occurrence of a string sequence # 5.) the start_datetime attribute value is calculated based on the end_datetime and depending # on whether we are processing a 1, 3, or 7 day file, the start_datetime will be # calculated by subtracting the proper amount of days from the end_datetime. if ".1day.tif" in raster: rasLoadObj.targetDataset = target_mosaic1Day rasLoadObj.loadFile = "IMERG1Day.tif" rasLoadObj.startDate = keyDate - datetime.timedelta( days=1) elif ".3day.tif" in raster: rasLoadObj.targetDataset = target_mosaic3Day rasLoadObj.loadFile = "IMERG3Day.tif" rasLoadObj.startDate = keyDate - datetime.timedelta( days=3) elif ".7day.tif" in raster: rasLoadObj.targetDataset = target_mosaic7Day rasLoadObj.loadFile = "IMERG7Day.tif" rasLoadObj.startDate = keyDate - datetime.timedelta( days=7) # At this point, we have built a raster load object that we can use later, add it to # a list and continue looping through the rasters. rasObjList.append(rasLoadObj) del rasters logging.info('Loading {0} raster files to folder {1}'.format( str(len(rasObjList)), final_RasterSourceFolder)) for rasterToLoad in rasObjList: try: # valid raster logging.debug('\t\tLoading raster: {0} as {1}'.format( rasterToLoad.origFile, rasterToLoad.loadFile)) # At this point, we have built a raster load object that we can use. We still need to: # I.) extract orig raster and save to the desired final mosaic folder as the load raster name. # (This will be overwriting an existing file in the folder.) # II.) load the 'load' raster to the proper mosaic dataset (again overwriting previously named rasters) # III.) delete the original raster from the temp extract folder # IV.) populate the loaded raster's attributes # Save the file to the final source folder and load it into the mosaic dataset # extract = arcpy.sa.ExtractByAttributes(raster, inSQLClause) extract = arcpy.sa.ExtractByAttributes(rasterToLoad.origFile, inSQLClause) loadRaster = os.path.join(final_RasterSourceFolder, rasterToLoad.loadFile) extract.save(loadRaster) # ---------- # For some reason, the extract is causing the raster attribute table (.tif.vat.dbf file) to be created # which is being locked (with a ...tif.vat.dbf.lock file) as users access the WMS service. The problem # is that the lock file is never released and future updates to the raster are failing. Therefore, here # we will just try to delete the raster attribute table right after it is created. arcpy.DeleteRasterAttributeTable_management(loadRaster) # ---------- arcpy.AddRastersToMosaicDataset_management( rasterToLoad.targetDataset, "Raster Dataset", loadRaster, "UPDATE_CELL_SIZES", "NO_BOUNDARY", "NO_OVERVIEWS", "2", "#", "#", "#", "#", "NO_SUBFOLDERS", "OVERWRITE_DUPLICATES", "BUILD_PYRAMIDS", "CALCULATE_STATISTICS", "NO_THUMBNAILS", "Add Raster Datasets", "#") # arcpy.AddRastersToMosaicDataset_management(in_mosaic_dataset=rasterToLoad.targetDataset, # raster_type="Raster Dataset", # input_path=loadRaster, # update_cellsize_ranges="UPDATE_CELL_SIZES", # update_boundary="UPDATE_BOUNDARY", # update_overviews="NO_OVERVIEWS", # maximum_pyramid_levels="", # maximum_cell_size="0", # minimum_dimension="1500", # spatial_reference="", # filter="#", # sub_folder="SUBFOLDERS", # duplicate_items_action="ALLOW_DUPLICATES", # build_pyramids="NO_PYRAMIDS", # calculate_statistics="NO_STATISTICS", # build_thumbnails="NO_THUMBNAILS", # operation_description="#", # force_spatial_reference="NO_FORCE_SPATIAL_REFERENCE", # estimate_statistics="NO_STATISTICS", # aux_inputs="") # If we get here, we have successfully added the raster to the mosaic and saved it to its final # source location, so lets go ahead and remove it from the temp extract folder now... arcpy.Delete_management(rasterToLoad.origFile) try: # Set Attributes # Update the attributes on the raster that was just added to the mosaic dataset # Initialize and build attribute expression list attrExprList = [ rasterToLoad.startDate, rasterToLoad.endDate ] # Get the raster name minus the .tif extension rasterName_minusExt = os.path.splitext( rasterToLoad.loadFile)[0] wClause = "Name = '" + rasterName_minusExt + "'" with arcpy.da.UpdateCursor(rasterToLoad.targetDataset, attrNameList, wClause) as cursor: for row in cursor: for idx in range(len(attrNameList)): row[idx] = attrExprList[idx] cursor.updateRow(row) del cursor except: # Set Attributes err = capture_exception() logging.warning( "\t...Raster attributes not set for raster {0}. Error = {1}" .format(rasterToLoad.origFile, err)) except: # valid raster err = capture_exception() logging.warning( '\t...Raster {0} not loaded into mosaic! Error = {1}'. format(rasterToLoad.origFile, err)) del rasObjList[:] except: err = capture_exception() logging.error(err)
import arcpy, os, subprocess from arcpy import env from arcpy.sa import * arcpy.CheckOutExtension("Spatial") Zones = "//166.2.126.25/R4_VegMaps/Uinta_Wasatch_Cache\Mapping/01_Veg_Type/GA3_Uintas/Segments/UWC_GA3_Uintas_Segments.shp" ZoneField = "FID_Model" work = "//166.2.126.25/R4_VegMaps/Uinta_Wasatch_Cache/Mapping/ZonalStats/DataLayers/L8_10mResample" outTableName = work + "/UWC_Zonal_Stats.txt" TemplateScript = "//166.2.126.25/R4_VegMaps/VegMapping_Tools/Derek_VegMap/Template_ZonalStats.R" arcpy.env.workspace = work arcpy.env.overwriteOutput = True rasts = arcpy.ListRasters("*.img") print rasts for rast in rasts: rastFileName = os.path.splitext(rast)[0] arcpy.env.workspace = work + "/" + rast subRasts = arcpy.ListRasters() i = 1 for subRast in subRasts: rastName = work + '/' + rastFileName + '.img/Layer_' + str(i) outName = work + '/' + rastFileName + '_' + str(i) + '.dbf' ZonalStats = ZonalStatisticsAsTable(Zones, ZoneField, rastName, outName, "NODATA", "MEAN_STD") i = i + 1 ######################################################### #Find R executable ######################################################### RnameArray = [] path = r"C:\Program Files\R"
import arcpy from arcpy import env from arcpy.sa import * # Set the current workspace arcpy.env.workspace = "C:/workspace/modis/ndvi_2017_warp_32647_npp" outfolder = "C:/workspace/modis/ndvi_2017_warp_32647_bm/" arcpy.CheckOutExtension("Spatial") # Get and print a list of GRIDs from the workspace rasters = arcpy.ListRasters("*", "TIF") for r in rasters: rname = r[:10] + ".tif" print(rname) # Execute Times outRas = Divide(r, 0.5) # Save the output outRas.save(outfolder + rname) print("ok ")
#arcpy.env.cellSize = 30 ############################################################################################################################################### # Load segments shp = "//166.2.126.25/teui1/4_Derek/GEE_Development/Dixie_NF/Dixie_segments_refData_2.shp" # Set the location of your imagery arcpy.env.workspace = "//166.2.126.25/teui1/4_Derek/AWS_POC/Zonal_Statistics_Random_Forest_Classification_Python_Test/Data/Imagery" # set the ouput directory location outRoot = "//166.2.126.25/teui1/4_Derek/AWS_POC/Zonal_Statistics_Random_Forest_Classification_Python_Test/Data/OutTables/" # Set the segments unique ID field uid = 'FID_Model' #Run zonal stats rastList = arcpy.ListRasters("*", "IMG") for rast in rastList: print(rast) d = arcpy.Describe(rast) nBands = d.bandCount outRasName = rast.split(".")[0] print(outRasName) if nBands > 1: for band in range(1, nBands + 1): print(band) outTableName = outRoot + outRasName + "_band" + str(band) + ".dbf" print(outTableName) # Check to ensure that the band names are "Layer_n" and not "Band_n" bandRas = arcpy.Raster("{}\\Layer_{}".format(rast, band)) print(bandRas) ZonalStatisticsAsTable(shp, uid, bandRas, outTableName, 'DATA',
def main(): import arcpy from arcpy import env from arcpy.sa import * # Set environment settings env.workspace = "K:/GIS/r/elevation/" env.compression_type = "NONE" if __name__ == '__main__': main() for raster in arcpy.ListRasters(): try: out_raster = env.workspace + "/exported/" + arcpy.Describe( raster).baseName + ".tif" arcpy.management.CopyRaster(raster, out_raster) except: print "Failed to copy " + arcpy.Describe(raster).baseName print arcpy.GetMessages() print("finished")
SWIR1 = Raster(d.children[5].name) SWIR2 = Raster(d.children[6].name) if Sensor == "Worldview 02": Coastal = Raster(d.children[0].name) Blue = Raster(d.children[1].name) Green = Raster(d.children[2].name) Yellow = Raster(d.children[3].name) Red = Raster(d.children[4].name) RedEdge = Raster(d.children[5].name) NIR1 = Raster(d.children[6].name) NIR2 = Raster(d.children[7].name) # Export individual Bands if selected if exVar.get(): bands = arcpy.ListRasters() print "Exporting Bands" for bandNo, bandName in enumerate(bands): outBand = Raster(bandName) * 1.0 outBand.save(outPath + "/" + inRaster[:-4] + "_B" + str(bandNo + 1) + ".tif") # Calculate and save indices for key, value in indicesSensor.iteritems(): if Sensor in value: if cbVar[indices.index(key)].get(): # Determine if indice was selected from GUI if key == 'Brightness' or key == 'Greenness' or key == 'Wetness' or key == 'Yellowness': # Check if tasseled cap index formula = Config.get(Sensor, key) # Get sensor specific tasseled cap coefficients else: formula = Config.get("Formulas", key) print key eval(formula).save(outPath + "/" + inRaster[:-4] + "_" + key + ".tif") # Save index raster
from arcpy import env from ftplib import FTP import sys, os, traceback, datetime, time from datetime import date from time import localtime, strftime import string, glob from shutil import copyfile arcpy.CheckOutExtension("Spatial") #year = str(sys.argv[1]) #arcpy.env.workspace = r'D:\Stornext\fewspsnfs2\WaterSmart\Data\NDVI\ASP_NDVI\V006_Terra' +os.sep+ str(year) #arcpy.env.workspace = r'W:\Data\NDVI\USA\V006_250m' +os.sep+ str(year) #arcpy.env.workspace = r'W:\Data\NDVI\USA\V006_1km_update' +os.sep+ str(year) arcpy.env.workspace = r'W:\Projects\Veg_ET\USA_data\NDVI_daily_1_km\MEDIAN\Med_0119_filled' rasterList = arcpy.ListRasters('*', 'TIF') #rasterList.sort() jd = str(sys.argv[1]) #file_in1 = arcpy.env.workspace +os.sep+ year+jd +'.250_m_16_days_NDVI.tif' #file_in1 = arcpy.env.workspace +os.sep+ year+jd +'.1_km_16_days_NDVI.tif' file_in1 = arcpy.env.workspace + os.sep + 'med' + jd + '.1_km_16_days_NDVI.tif' je = str(sys.argv[2]) #file_in2 = arcpy.env.workspace +os.sep+ year+je +'.250_m_16_days_NDVI.tif' #file_in2 = arcpy.env.workspace +os.sep+ year+je +'.1_km_16_days_NDVI.tif' file_in2 = arcpy.env.workspace + os.sep + 'med' + je + '.1_km_16_days_NDVI.tif' jf = jd f = int(sys.argv[3]) #16 a = range(0, f)
def create_netcdf(rasters_path, name_format, start_date, end_date, latlim, lonlim, cellsize, nc_path, epsg=4326, fill_val=-9999.0): ''' This function creates a netcdf file from a folder with geotiffs rasters to be used to run HANTS. ''' # Latitude and longitude lat_ls = pd.np.arange(latlim[0] + 0.5 * cellsize, latlim[1] + 0.5 * cellsize, cellsize) lat_ls = lat_ls[::-1] # ArcGIS numpy lon_ls = pd.np.arange(lonlim[0] + 0.5 * cellsize, lonlim[1] + 0.5 * cellsize, cellsize) lat_n = len(lat_ls) lon_n = len(lon_ls) spa_ref = arcpy.SpatialReference(4326).exportToString() ll_corner = arcpy.Point(lonlim[0], latlim[0] + cellsize / 2.0) # Note: ---- # The '+ cellsize/2.0' fixes an arcpy offset on the y-axis on arcgis 10.4.1 # Rasters dates_dt = pd.date_range(start_date, end_date, freq='D') dates_ls = [d.strftime('%Y%m%d') for d in dates_dt] arcpy.env.workspace = rasters_path ras_ls = arcpy.ListRasters() # Cell code temp_ll_ls = [ pd.np.arange(x, x + lon_n) for x in range(1, lat_n * lon_n, lon_n) ] code_ls = pd.np.array(temp_ll_ls) empty_vec = pd.np.empty((lat_n, lon_n)) empty_vec[:] = fill_val # Create netcdf file print 'Creating netCDF file...' nc_file = netCDF4.Dataset(nc_path, 'w', format="NETCDF4") # Create Dimensions lat_dim = nc_file.createDimension('latitude', lat_n) lon_dim = nc_file.createDimension('longitude', lon_n) time_dim = nc_file.createDimension('time', len(dates_ls)) # Create Variables crs_var = nc_file.createVariable('crs', 'i4') crs_var.grid_mapping_name = 'latitude_longitude' crs_var.crs_wkt = spa_ref lat_var = nc_file.createVariable('latitude', 'f8', ('latitude'), fill_value=fill_val) lat_var.units = 'degrees_north' lat_var.standard_name = 'latitude' lon_var = nc_file.createVariable('longitude', 'f8', ('longitude'), fill_value=fill_val) lon_var.units = 'degrees_east' lon_var.standard_name = 'longitude' time_var = nc_file.createVariable('time', 'l', ('time'), fill_value=fill_val) time_var.standard_name = 'time' time_var.calendar = 'gregorian' code_var = nc_file.createVariable('code', 'i4', ('latitude', 'longitude'), fill_value=fill_val) outliers_var = nc_file.createVariable('outliers', 'i4', ('latitude', 'longitude', 'time'), fill_value=fill_val) outliers_var.long_name = 'outliers' original_var = nc_file.createVariable('original_values', 'f8', ('latitude', 'longitude', 'time'), fill_value=fill_val) original_var.long_name = 'original values' hants_var = nc_file.createVariable('hants_values', 'f8', ('latitude', 'longitude', 'time'), fill_value=fill_val) hants_var.long_name = 'hants values' combined_var = nc_file.createVariable('combined_values', 'f8', ('latitude', 'longitude', 'time'), fill_value=fill_val) combined_var.long_name = 'combined values' print '\tVariables created' # Load data lat_var[:] = lat_ls lon_var[:] = lon_ls time_var[:] = dates_ls code_var[:] = code_ls # temp folder temp_dir = tempfile.mkdtemp() bbox = "{0} {1} {2} {3}".format(lonlim[0], latlim[0], lonlim[1], latlim[1]) # Raster loop print '\tExtracting data from rasters...' for tt in range(len(dates_ls)): # Raster ras = name_format.format(dates_ls[tt]) if ras in ras_ls: # Resample ras_resampled = os.path.join(temp_dir, 'r_' + ras) arcpy.management.Resample(os.path.join(rasters_path, ras), ras_resampled, cellsize) # Clip ras_clipped = os.path.join(temp_dir, 'c_' + ras) arcpy.management.Clip(ras_resampled, bbox, ras_clipped) # Raster to Array array = arcpy.RasterToNumPyArray(ras_resampled, ll_corner, lon_n, lat_n) # Store values original_var[:, :, tt] = array else: # Store values original_var[:, :, tt] = empty_vec # Close file nc_file.close() print 'NetCDF file created' # Return return nc_path