def mosaic(workspace, in_rasters, out_location, out_raster): # Description: Mosaics rasters together, ignoring background/nodata cells # Set environment settings env.workspace = workspace # Set local variables coordinate_system = arcpy.SpatialReference( "NAD 1983 Contiguous USA Albers") data_type = '32_BIT_SIGNED' cell_size = '10' bands = '1' pyramids = 'PYRAMIDS -1 NEAREST JPEG' mosaic_type = 'FIRST' colormap = 'FIRST' background = 0 # CreateRasterDataset_management (out_path, out_name, {cellsize}, # pixel_type, {raster_spatial_reference}, number_of_bands, {config_keyword}, # {pyramids}, {tile_size}, {compression}, {pyramid_origin}) arcpy.CreateRasterDataset_management(out_location, out_raster, cell_size, data_type, coordinate_system, bands, '', pyramids, '', '', '') #Mosaic_management (inputs, target, {mosaic_type}, # {colormap}, {background_value}, {nodata_value}, # {onebit_to_eightbit}, {mosaicking_tolerance}, {MatchingMethod}) arcpy.Mosaic_management(in_rasters, out_location + out_raster, mosaic_type, '', background, background, '', '', '')
def mosiacRasters(): ######Description: mosiac tiles together into a new raster tilelist = glob.glob("C:/Users/Bougie/Desktop/Gibbs/tiles/*.tif") print tilelist arcpy.env.workspace = defineGDBpath(['refine', 'masks']) arcpy.env.extent = nibble.inYTC.extent arcpy.env.snapRaster = nibble.inYTC arcpy.env.cellsize = nibble.inYTC arcpy.env.outputCoordinateSystem = nibble.inYTC # mosaic = 'traj_ytc30_2008to2015_mask' masks_gdb = defineGDBpath(['refine', 'masks']) out_name = nibble.inTraj_name + '_msk36and61_temp' outpath = masks_gdb + out_name ##### CreateRasterDataset_management (out_path, out_name, cellsize=30, pixel_type, raster_spatial_reference, number_of_bands) arcpy.CreateRasterDataset_management(masks_gdb, out_name, 30, "8_BIT_UNSIGNED", nibble.inTraj.spatialReference, 1, "", "", "", "", "") ##### Mosaic_management (inputs, target, {mosaic_type}, {colormap}, {background_value}, {nodata_value}, {onebit_to_eightbit}, {mosaicking_tolerance}, {MatchingMethod}) arcpy.Mosaic_management(tilelist, outpath, "", "", "", 0, "", "", "") ##### copy raster so it "snaps" to the other datasets -------suboptimal ##### CopyRaster_management (in_raster, out_rasterdataset, {config_keyword}, {background_value}, {nodata_value}, {onebit_to_eightbit}, {colormap_to_RGB}, {pixel_type}, {scale_pixel_value}, {RGB_to_Colormap}, {format}, {transform}) arcpy.CopyRaster_management(outpath, nibble.inTraj_name + '_msk36and61') ##### delete the initial raster arcpy.Delete_management(outpath)
def CreateEmptyRaster(): # Create an empty raster dataset # For use primarily to provide placeholders in multi-band rasters where the data for a band is missing #emptyRaster = os.path.join(toolboxesPath, "scratch\\scratch.gdb") #arcpy.AddMessage("Creating Empty Raster in '" + emptyRaster + "'") try: arcpy.CreateRasterDataset_management(scratchFolder, "EmptyRaster.tif", "#","8_BIT_UNSIGNED","#","1","#","NONE","128 128","LZ77","#") except: return scratchFolder + "\\EmptyRaster.tif" return scratchFolder + "\\EmptyRaster.tif"
def createRD(path, fd): import arcpy import os from arcpy import env env.overwriteOutput = True env.workspace = path if arcpy.Exists(fd): print(fd + ' dataset already exists!') else: fd = arcpy.CreateRasterDataset_management(path, fd) return fd
def evt_merge(x): print 'Starting EVT merge for ' + x hu8 = os.path.join(pf_path, x, 'NHD/WBDHU8.shp') evt_new = os.path.join(pf_path, x, 'LANDFIRE/LANDFIRE_200EVT.tif') evt_old = os.path.join(pf_path, x, 'LANDFIRE/LANDFIRE_140EVT.tif') evt_erase = os.path.join(pf_path, x, 'LANDFIRE/EVT_2016_Erase.shp') evt_clip_name = os.path.join(pf_path, x, 'LANDFIRE/EVT_2014_clip.tif') evt_new_shp = os.path.join(pf_path, x, 'LANDFIRE/LANDFIRE_200EVT.shp') print ' Converting LANDFIRE 200EVT to shapefile...' arcpy.RasterToPolygon_conversion(evt_new, evt_new_shp) print ' Erasing LANDFIRE 200EVT coverage from watershed boundary & buffering...' arcpy.Erase_analysis(hu8, evt_new_shp, evt_erase) evt_erase_buffer = os.path.join(pf_path, x, 'LANDFIRE/EVT_2016_Erase_30m.shp') arcpy.Buffer_analysis(evt_erase, evt_erase_buffer, "30 Meters") print ' Clipping LANDFIRE 140EVT to watershed area not covered by 200 EVT...' clip = arcpy.Clip_management(in_raster=evt_old, rectangle=None, out_raster=evt_clip_name, in_template_dataset=evt_erase_buffer, nodata_value='', clipping_geometry='ClippingGeometry') print ' Creating dataset to store output...' evt_output = arcpy.CreateRasterDataset_management(os.path.join(pf_path, x, 'LANDFIRE'), evt_out_name_with_ext, None, '16_BIT_SIGNED', outCS, 1) print ' Merging LANDFIRE EVT rasters...' arcpy.Mosaic_management(inputs=[clip, evt_new], target=evt_output, mosaic_type="LAST") print ' Creating attribute table for merged output...' arcpy.BuildRasterAttributeTable_management(evt_output) arcpy.JoinField_management(evt_output, 'Value', proj_evt140, 'VALUE') arcpy.JoinField_management(evt_output, 'Value', proj_evt200, 'VALUE') with arcpy.da.UpdateCursor(evt_output, field_names = ['EVT_NAME', 'CLASSNAME', 'EVT_CLASS', 'EVT_CLAS_1', 'EVT_PHYS', 'EVT_PHYS_1']) as cursor: for row in cursor: if row[0] == ' ': row[0] = row[1] else: pass if row[1] == ' ': row[1] = row[0] else: pass if row[2] == ' ': row[2] = row[3] else: pass if row[4] == ' ': row[4] = row[5] else: pass cursor.updateRow(row) lu_code.main(evt_output)
def mosiacRasters(nibble): ######Description: mosiac tiles together into a new raster tilelist = glob.glob("C:/Users/Bougie/Desktop/Gibbs/tiles/*.tif") print tilelist #### Note: Need to set the environment for the CopyRaster_management() function or will have misallignemnt!! # arcpy.env.workspace = data['refine']['gdb'] #### need to wrap these paths with Raster() fct or complains about the paths being a string inYTC = Raster(nibble.inYTC) inTraj = Raster(nibble.inTraj) arcpy.env.extent = inYTC.extent arcpy.env.snapRaster = inYTC arcpy.env.cellsize = inYTC arcpy.env.outputCoordinateSystem = inYTC # mosaic = 'traj_ytc30_2008to2015_mask' masks_gdb = defineGDBpath(data['refine']['gdb']) ##sub-optimal need to create this temp dataset or and then copy are remove the dataset otherwise are not alligned out_name = nibble.inTraj_name + '_msk36and61_temp' outpath = masks_gdb + out_name ##### CreateRasterDataset_management (out_path, out_name, cellsize=30, pixel_type, raster_spatial_reference, number_of_bands) arcpy.CreateRasterDataset_management(masks_gdb, out_name, 30, "8_BIT_UNSIGNED", inTraj.spatialReference, 1, "", "", "", "", "") ##### Mosaic_management (inputs, target, {mosaic_type}, {colormap}, {background_value}, {nodata_value}, {onebit_to_eightbit}, {mosaicking_tolerance}, {MatchingMethod}) # arcpy.Mosaic_management(tilelist, outpath, "", "", "", 0, "", "", "") ##### copy raster so it "snaps" to the other datasets -------suboptimal ##### CopyRaster_management (in_raster, out_rasterdataset, {config_keyword}, {background_value}, {nodata_value}, {onebit_to_eightbit}, {colormap_to_RGB}, {pixel_type}, {scale_pixel_value}, {RGB_to_Colormap}, {format}, {transform}) # arcpy.CopyRaster_management(Raster(outpath), nibble.inTraj_name+'_msk36and61') arcpy.CopyRaster_management(outpath, nibble.inTraj_name + '_msk_new', "", "", "256", "NONE", "NONE", "", "NONE", "NONE", "", "NONE")
def bps_merge(x): print 'Starting BPS merge for ' + x hu8 = os.path.join(pf_path, x, 'NHD/WBDHU8.shp') bps_new = os.path.join(pf_path, x, 'LANDFIRE/LANDFIRE_200BPS.tif') bps_old = os.path.join(pf_path, x, 'LANDFIRE/LANDFIRE_140BPS.tif') bps_erase = os.path.join(pf_path, x, 'LANDFIRE/BPS_2016_Erase.shp') bps_clip_name = os.path.join(pf_path, x, 'LANDFIRE/BPS_2014_clip.tif') bps_new_shp = os.path.join(pf_path, x, 'LANDFIRE/LANDFIRE_200BPS.shp') print ' Converting LANDFIRE 200BPS to shapefile...' arcpy.RasterToPolygon_conversion(bps_new, bps_new_shp) print ' Erasing LANDFIRE 200BPS coverage from watershed boundary & buffering...' arcpy.Erase_analysis(hu8, bps_new_shp, bps_erase) bps_erase_buffer = os.path.join(pf_path, x, 'LANDFIRE/BPS_2016_Erase_30m.shp') arcpy.Buffer_analysis(bps_erase, bps_erase_buffer, "30 Meters") print ' Clipping LANDFIRE 140BPS to watershed area not covered by 200BPS...' clip = arcpy.Clip_management(in_raster=bps_old, rectangle=None, out_raster=bps_clip_name, in_template_dataset=bps_erase_buffer, nodata_value='', clipping_geometry='ClippingGeometry') print ' Creating dataset to store output...' bps_output = arcpy.CreateRasterDataset_management(os.path.join(pf_path, x, 'LANDFIRE'), bps_out_name_with_ext, None, '16_BIT_SIGNED', outCS, 1) print ' Merging LANDFIRE BPS rasters...' arcpy.Mosaic_management(inputs=[clip, bps_new], target=bps_output, mosaic_type="LAST") print ' Creating attribute table for merged output...' arcpy.BuildRasterAttributeTable_management(bps_output) arcpy.JoinField_management(bps_output, 'Value', proj_bps140, 'VALUE') arcpy.JoinField_management(bps_output, 'Value', proj_bps200, 'VALUE') with arcpy.da.UpdateCursor(bps_output, field_names = ['BPS_NAME', 'BPS_NAME_1', 'GROUPVEG', 'GROUPVEG_1']) as cursor: for row in cursor: if row[0] == ' ': row[0] = row[1] else: pass if row[1] == ' ': row[1] = row[0] else: pass if row[2] == ' ': row[2] = row[3] else: pass cursor.updateRow(row)
#coding:UTF-8 import sys reload(sys) import arcpy from arcpy import env out_dataset_path="D:\\bysj\\file\\chen.gdb" out_name="qqq" geometry_type="POLYGON" arcpy.CreateRasterDataset_management(out_dataset_path, out_name)
def main(version='10p4'): """Generate Layer Files for each Landsat image from templates Args: version (str): ArcGIS major version number. Choices: 10p3 or 10p4 """ product_list = [ 'albedo_sur', 'cloud_score', 'evi_sur', 'fmask', 'ndvi_sur', 'ndvi_toa', 'ndwi_green_nir_sur', 'ndwi_green_swir1_sur', 'ndwi_nir_swir1_sur', 'refl_sur', # 'tasseled_cap', 'ts' ] # Search for Landsat images to build templates for in the input workspace # Code currently is assuming images are in separate folders for each year input_ws = r'..\example\images\example\landsat' # Set a different raster workspace in the layers layer_ws = r'..\example\images\example\landsat' # Save layer files to the output workspace # (separate folders for each year) output_ws = r'..\example\images\example\layers' # Folder where the template layers are stored template_ws = r'..\layers\{}'.format(version) template_lyr_fmt = 'template.{}.tif.lyr' # Need a temporary folder because of a bug in replaceDataSource temp_ws = tempfile.mkdtemp() logging.debug('\nTemp folder: {}'.format(temp_ws)) if not os.path.isdir(temp_ws): os.makedirs(temp_ws) # Check that templates exists for product in product_list: template_path = os.path.join(template_ws, template_lyr_fmt.format(product)) if not os.path.isfile(template_path): logging.error( '\nERROR: The {} template layer does not exist\n {}'.format( product, template_path)) # Process each year separately for year in os.listdir(input_ws): logging.info('\n{}'.format(year)) input_year_ws = os.path.join(input_ws, str(year)) layer_year_ws = os.path.join(layer_ws, str(year)) output_year_ws = os.path.join(output_ws, str(year)) if not os.path.isdir(input_year_ws): continue if not os.path.isdir(output_year_ws): os.makedirs(output_year_ws) for item in os.listdir(input_year_ws): if not item.endswith('.tif'): continue for product in product_list: # logging.debug('{}'.format(product)) if not item.endswith(product + '.tif'): continue logging.info('{}'.format(item)) template_path = os.path.join(template_ws, template_lyr_fmt.format(product)) layer_path = os.path.join(output_year_ws, item.replace('.tif', '.lyr')) logging.debug(' Template: {}'.format(template_path)) logging.debug(' Layer: {}'.format(layer_path)) # There is a bug in replaceDataSource (ArcGIS 10.3) # There is a problem with file names that have extra dots (".") # that causes replaceDataSource to defaults to the 1st raster # in the workspace. # To get around this, I am creating one temp raster with the # same name as the target raster temp_path = os.path.join(temp_ws, item) if arcpy.Exists(temp_path): arcpy.Delete_management(temp_path) arcpy.CreateRasterDataset_management(temp_ws, item, "1", "8_BIT_UNSIGNED", "", "1") # Open the template layer lyr = arcpy.mapping.Layer(template_path) # First set the DataSource to the temp folder raster lyr.replaceDataSource(temp_ws, 'RASTER_WORKSPACE', item, False) # Then change the workspace to the correct workspace lyr.findAndReplaceWorkspacePath( os.path.dirname(lyr.datasetName), layer_year_ws, False) lyr.name = item lyr.saveACopy(layer_path) # Delete the temp raster arcpy.Delete_management(temp_path) del lyr # Try to remove the temp folder try: shutil.rmtree(temp_ws) except: pass
def main(maxlf_dir=str(), min_lf=float(), prj_name=str(), unit=str(), version=str()): """ delineate optimum plantings required input variables: min_lf = minimum plant lifespan where plantings are considered prj_name = "TBR" # (corresponding to folder name) prj_name = "BartonsBar" (for example) unit = "us" or "si" version = "v10" # type() = 3-char str: vII """ logger = logging.getLogger("logfile") logger.info("PLACE OPTIMUM PLANT SPECIES ----- ----- ----- -----") features = cDef.FeatureDefinitions(False) # read feature IDs (required to identify plants) if unit == "us": area_units = "SQUARE_FEET_US" ft2_to_acres = config.ft2ac else: area_units = "SQUARE_METERS" ft2_to_acres = 1.0 arcpy.CheckOutExtension('Spatial') arcpy.gp.overwriteOutput = True path2pp = config.dir2pm + prj_name + "_" + version + "\\" # folder settings ras_dir = path2pp + "Geodata\\Rasters\\" shp_dir = path2pp + "Geodata\\Shapefiles\\" quant_dir = path2pp + "Quantities\\" fGl.del_ovr_files(path2pp) # Delete temporary raster calculator files # file settings xlsx_target = path2pp + prj_name + "_assessment_" + version + ".xlsx" action_ras = {} try: logger.info("Looking up MaxLifespan Rasters ...") arcpy.env.workspace = maxlf_dir action_ras_all = arcpy.ListRasters() logger.info(" >> Source directory: " + maxlf_dir) arcpy.env.workspace = path2pp + "Geodata\\" for aras in action_ras_all: for plant in features.id_list_plants: if plant in str(aras): logger.info(" -- found: " + maxlf_dir + str(aras)) action_ras.update({aras: arcpy.Raster(maxlf_dir + aras)}) if ("max" in str(aras)) and ("plant" in str(aras)): max_lf_plants = arcpy.Raster(maxlf_dir + aras) logger.info(" -- OK (read Rasters)\n") except: logger.info("ERROR: Could not find action Rasters.") return -1 # CONVERT PROJECT SHAPEFILE TO RASTER try: logger.info("Converting Project Shapefile to Raster ...") arcpy.env.workspace = shp_dir arcpy.PolygonToRaster_conversion("ProjectArea.shp", "AreaCode", ras_dir + "ProjectArea.tif", cell_assignment="CELL_CENTER", priority_field="NONE", cellsize=1) logger.info(" -- OK. Loading project raster ...") arcpy.env.workspace = path2pp + "Geodata\\" prj_area = arcpy.Raster(ras_dir + "ProjectArea.tif") logger.info(" -- OK (Shapefile2Raster conversion)\n") except arcpy.ExecuteError: logger.info("ExecuteERROR: (arcpy).") logger.info(arcpy.GetMessages(2)) arcpy.AddError(arcpy.GetMessages(2)) return -1 except Exception as e: logger.info("ExceptionERROR: (arcpy).") logger.info(e.args[0]) arcpy.AddError(e.args[0]) return -1 except: logger.info("ExceptionERROR: (arcpy) Conversion failed.") return -1 # CONVERT EXISTING PLANTS SHAPEFILE TO RASTER try: logger.info("Converting PlantExisting.shp Shapefile to Raster ...") arcpy.env.workspace = shp_dir arcpy.PolygonToRaster_conversion(shp_dir + "PlantExisting.shp", "gridcode", ras_dir + "PlantExisting.tif", cell_assignment="CELL_CENTER", priority_field="NONE", cellsize=1) arcpy.env.workspace = path2pp + "Geodata\\" logger.info(" -- OK (Shapefile2Raster conversion)\n") except arcpy.ExecuteError: logger.info("ExecuteERROR: (arcpy).") logger.info(arcpy.GetMessages(2)) arcpy.AddError(arcpy.GetMessages(2)) arcpy.CreateRasterDataset_management(ras_dir, "PlantExisting.tif", "1", "8_BIT_UNSIGNED", "World_Mercator.prj", "3", "", "PYRAMIDS -1 NEAREST JPEG", "128 128", "NONE", "") except Exception as e: logger.info("ExceptionERROR: (arcpy).") logger.info(e.args[0]) arcpy.AddError(e.args[0]) except: logger.info("WARNING: PlantExisting.shp is corrupted or non-existent.") logger.info(" >> Loading existing plant raster ...") existing_plants = arcpy.Raster(ras_dir + "PlantExisting.tif") # RETAIN RELEVANT PLANTINGS ONLY shp_4_stats = {} try: logger.info("Analyzing optimum plant types in project area ...") logger.info(" >> Cropping maximum lifespan Raster ... ") arcpy.env.extent = prj_area.extent max_lf_crop = Con((~IsNull(prj_area) & ~IsNull(max_lf_plants)), Con(IsNull(existing_plants), Float(max_lf_plants))) logger.info(" >> Saving crop ... ") max_lf_crop.save(ras_dir + "max_lf_pl_c.tif") logger.info(" -- OK ") occupied_px_ras = "" for aras in action_ras.keys(): plant_ras = action_ras[aras] if not('.tif' in str(aras)): aras_tif = str(aras) + '.tif' aras_no_end = aras else: aras_tif = aras aras_no_end = aras.split('.tif')[0] logger.info(" >> Applying MaxLifespan Raster({}) where lifespan > {} years.".format(str(plant_ras), str(min_lf))) __temp_ras__ = Con((~IsNull(prj_area) & ~IsNull(plant_ras)), Con((Float(max_lf_plants) >= min_lf), (max_lf_plants * plant_ras))) if arcpy.Exists(occupied_px_ras): logger.info(" >> Reducing to relevant pixels only ... ") __temp_ras__ = Con((IsNull(occupied_px_ras) & IsNull(existing_plants)), __temp_ras__) occupied_px_ras = Con(~IsNull(occupied_px_ras), occupied_px_ras, __temp_ras__) else: occupied_px_ras = __temp_ras__ __temp_ras__ = Con(IsNull(existing_plants), __temp_ras__) logger.info(" >> Saving raster ... ") __temp_ras__.save(ras_dir + aras_tif) logger.info(" >> Converting to shapefile (polygon for area statistics) ... ") try: shp_ras = Con(~IsNull(__temp_ras__), 1, 0) arcpy.RasterToPolygon_conversion(shp_ras, shp_dir + aras_no_end + ".shp", "NO_SIMPLIFY") except: logger.info(" !! " + aras_tif + " is not suitable for this project.") arcpy.env.workspace = maxlf_dir logger.info(" >> Calculating area statistics ... ") try: arcpy.AddField_management(shp_dir + aras_no_end + ".shp", "F_AREA", "FLOAT", 9) except: logger.info(" * field F_AREA already exists ") try: arcpy.CalculateGeometryAttributes_management(shp_dir + aras_no_end + ".shp", geometry_property=[["F_AREA", "AREA"]], area_unit=area_units) shp_4_stats.update({aras: shp_dir + aras_no_end + ".shp"}) except: shp_4_stats.update({aras: config.dir2pm + ".templates\\area_dummy.shp"}) logger.info(" !! Omitting (not applicable) ...") arcpy.env.workspace = path2pp + "Geodata\\" logger.info(" -- OK (Shapefile and raster analyses)\n") logger.info("Calculating area statistics of plants to be cleared for construction ...") try: arcpy.AddField_management(shp_dir + "PlantClearing.shp", "F_AREA", "FLOAT", 9) except: logger.info(" * cannot add field F_AREA to %s (already exists?)" % str(shp_dir + "PlantClearing.shp")) try: arcpy.CalculateGeometryAttributes_management(shp_dir + "PlantClearing.shp", geometry_property=[["F_AREA", "AREA"]], area_unit=area_units) shp_4_stats.update({"clearing": shp_dir + "PlantClearing.shp"}) except: shp_4_stats.update({"clearing": config.dir2pm + ".templates\\area_dummy.shp"}) logger.info(" * no clearing applicable ") logger.info(" -- OK (Statistic calculation)\n") except arcpy.ExecuteError: logger.info("ExecuteERROR: (arcpy).") logger.info(arcpy.GetMessages(2)) arcpy.AddError(arcpy.GetMessages(2)) return -1 except Exception as e: logger.info("ExceptionERROR: (arcpy).") logger.info(e.args[0]) arcpy.AddError(e.args[0]) return -1 except: logger.info("ExceptionERROR: (arcpy) Conversion failed.") return -1 # CLEAN UP useless shapefiles logger.info("Cleaning up redundant shapefiles ...") arcpy.env.workspace = shp_dir all_shps = arcpy.ListFeatureClasses() for shp in all_shps: if "_del" in str(shp): try: arcpy.Delete_management(shp) except: logger.info(str(shp) + " is locked. Remove manually to avoid confusion.") arcpy.env.workspace = path2pp + "Geodata\\" logger.info(" -- OK (Clean up)\n") # EXPORT STATISTIC TABLES logger.info("Exporting table statistics ...") stat_files = {} for ts in shp_4_stats.keys(): try: logger.info(" >> Exporting " + str(shp_4_stats[ts]) + " area ...") arcpy.TableToTable_conversion(shp_4_stats[ts], quant_dir, "plant_" + ts + ".txt") stat_files.update({ts: quant_dir + "plant_" + ts + ".txt"}) except: logger.info(" !! EXPORT FAILED (empty %s ?)" % str(ts)) logger.info(" -- OK (Table export)\n") arcpy.CheckInExtension('Spatial') # PREPARE AREA DATA (QUANTITIES) logger.info("Processing table statistics ...") write_dict = {} for sf in stat_files.keys(): stat_data = fGl.read_txt(stat_files[sf]) logger.info(" --> Extracting relevant area ...") polygon_count = 0 total_area_ft2 = 0.0 for row in stat_data: if row[0] == 1: total_area_ft2 += row[1] polygon_count += 1 write_dict.update({sf: total_area_ft2 * float(ft2_to_acres)}) logger.info(" --> OK") logger.info(" -- OK (Area extraction finished).") # WRITE AREA DATA TO EXCEL FILE logger.info("Writing results ...") fGl.write_dict2xlsx(write_dict, xlsx_target, "B", "C", 4) logger.info(" -- OK (PLANT PLACEMENT FINISHED)\n") return ras_dir
def bathymetricGradient(workspace, snapGrid, hucPoly, hydrographyArea, hydrographyFlowline, hydrographyWaterbody,cellsize, version = None): '''Generates the input datasets from hydrography features for enforcing a bathymetic gradient in hydroDEM (bowling). Parameters ---------- workspace : str Path to the geodatabase workspace. snapGrid : str Path to the raster snap grid used for the project. hucPoly : str Path to the bounding polygon for the local folder for which inputs are generated. hydrographyArea : str Path to the double line stream features. hydrographyFlowline : str Path to the flowline features. hydrographyWaterbody : str Path to the waterbody features. cellsize : str Output cell size to use for rasterization. version : str (optional) Package version number. Returns ------- hydro_flowlines : raster Grid representation of flowlines. hydro_areas : raster Grid representation of double line streams and flowlines. Notes ----- Outputs are written to the workspace. ''' if version: arcpy.AddMessage('StreamStats Data Preparation Tools version: %s'%(version)) arcpy.env.overwriteOutput = True # Set script to overwrite if files exist arcpy.AddMessage("Starting Bathymetric Gradient Preparations....") # Set the Geoprocessing environment... arcpy.env.scratchWorkspace = workspace arcpy.env.workspace = workspace # test if input files are present inputFiles = [snapGrid, hucPoly, hydrographyArea, hydrographyFlowline, hydrographyWaterbody] for fl in inputFiles: if arcpy.Exists(fl) == False: arcpy.AddMessage('%s missing.'%fl) arcpy.AddMessage('Please supply required input. Stopping program.') sys.exit(0) # Setup local variables and temporary layer files arcpy.AddMessage("Setting up variables...") #temporary features nhd_flow_feat = "nhd_flow" nhd_flow_Layer = "nhd_flow_Layer" nhd_area_feat = "nhd_area" nhd_area_Layer = "nhd_area_Layer" nhd_wb_feat = "nhd_wb" nhd_wb_Layer = "nhd_wb_Layer" #Output rastsers wbtempraster = os.path.join(arcpy.env.workspace,"nhdwb_tmp") areatempraster = os.path.join(arcpy.env.workspace,"nhdarea_tmp") mosaiclist = wbtempraster + ";" + areatempraster outraster1 = "hydro_flowlines" outraster2 = "hydro_areas" #convert to temporary shapefiles arcpy.FeatureClassToFeatureClass_conversion(hydrographyArea, arcpy.env.workspace, nhd_area_feat) arcpy.AddField_management(nhd_area_feat,"dummy","SHORT",None,None,None,None,"NULLABLE","NON_REQUIRED",None) arcpy.CalculateField_management(nhd_area_feat,"dummy","1", "PYTHON") arcpy.FeatureClassToFeatureClass_conversion(hydrographyWaterbody, arcpy.env.workspace, nhd_wb_feat) arcpy.AddField_management(nhd_wb_feat,"dummy","SHORT",None,None,None,None,"NULLABLE","NON_REQUIRED",None) arcpy.CalculateField_management(nhd_wb_feat,"dummy","1", "PYTHON") arcpy.FeatureClassToFeatureClass_conversion(hydrographyFlowline, arcpy.env.workspace, nhd_flow_feat) arcpy.AddField_management(nhd_flow_feat,"dummy","SHORT",None,None,None,None,"NULLABLE","NON_REQUIRED",None) arcpy.CalculateField_management(nhd_flow_feat,"dummy","1", "PYTHON") try: #hydrographyArea Processing arcpy.AddMessage("Creating temporary selection layers...") arcpy.MakeFeatureLayer_management(nhd_area_feat, nhd_area_Layer, "FType = 460", "", "") #hydrographyWaterbody Processing arcpy.MakeFeatureLayer_management(nhd_wb_feat, nhd_wb_Layer, "FType = 390 OR FType = 361", "", "") #hydrographyFlowline Processing arcpy.MakeFeatureLayer_management(nhd_flow_feat, nhd_flow_Layer, "", "", "") arcpy.SelectLayerByLocation_management(nhd_flow_Layer, "WITHIN", nhd_wb_Layer, "", "NEW_SELECTION") arcpy.SelectLayerByLocation_management(nhd_flow_Layer, "WITHIN", nhd_area_Layer, "", "ADD_TO_SELECTION") except: arcpy.AddMessage(arcpy.GetMessages()) #get snap grid cell size dsc_snap = arcpy.Describe(snapGrid) snap_cellsize = dsc_snap.MeanCellWidth # Set raster processing parameters arcpy.AddMessage("Processing rasters...") dsc = arcpy.Describe(hucPoly) extent = str(dsc.extent) arcpy.env.cellSize = snap_cellsize arcpy.env.mask = snapGrid arcpy.env.extent = SnapExtent(extent, snapGrid) # Process: Feature to Raster1 - NHD Area... try: arcpy.SelectLayerByLocation_management(nhd_area_Layer, "INTERSECT", nhd_flow_Layer, "0", "NEW_SELECTION") arcpy.FeatureToRaster_conversion(nhd_area_Layer, "dummy", areatempraster, cellsize) except: arcpy.CreateRasterDataset_management(arcpy.env.workspace,"nhdarea_tmp","10","8_BIT_UNSIGNED",snapGrid) arcpy.AddMessage(arcpy.GetMessages()) # Process: Feature to Raster2 - NHD Waterbody... try: arcpy.SelectLayerByLocation_management(nhd_wb_Layer, "INTERSECT", nhd_flow_Layer, "0", "NEW_SELECTION") arcpy.FeatureToRaster_conversion(nhd_wb_Layer, "dummy", wbtempraster, cellsize) except: arcpy.CreateRasterDataset_management(arcpy.env.workspace,"nhdwb_tmp","10","8_BIT_UNSIGNED",snapGrid) arcpy.AddMessage(arcpy.GetMessages()) # Process: Feature to Raster3 - NHD Flowline. This is the first output try: arcpy.FeatureToRaster_conversion(nhd_flow_Layer, "dummy", outraster1, cellsize) except: arcpy.AddMessage(arcpy.GetMessages()) # Process: Mosaic NHD Area and NHD Waterbody rasters To New Raster. This is the second output try: arcpy.MosaicToNewRaster_management(mosaiclist, workspace, outraster2, "", "8_BIT_UNSIGNED", "", "1", "BLEND", "FIRST") except: arcpy.AddMessage(arcpy.GetMessages()) #Delete temp files and rasters arcpy.AddMessage("Cleaning up...") for fl in [areatempraster,wbtempraster,nhd_wb_feat,nhd_flow_feat,nhd_area_feat]: if arcpy.Exists(fl): arcpy.Delete_management(fl) arcpy.AddMessage("Done!")
## {NONE | STATISTIC_MATCHING | HISTOGRAM_MATCHING ## | LINEARCORRELATION_MATCHING} try: import arcpy arcpy.env.workspace = r"D:\77211356\CropClass\results" ##Create a empty TIFF format Raster Dataset with the following parameters ##Cellsize: 2 ##Pixel type: 8 Bit Unsigned Integer ##Number of Bands: 3 ##Pyramid: Build full pyramids with NEAREST interpolation and JPEG compression ##Compression: NONE ##Projection: World_Mercator ##Tile size: 128 128 arcpy.CreateRasterDataset_management("CreateRD","EmptyTIFF.tif","10","8_BIT_UNSIGNED",\ "World_Mercator.prj", "1", "", "PYRAMIDS -1 NEAREST JPEG",\ "128 128", "NONE", "") ##Mosaic two TIFF images to a single TIFF image ##Background value: 0 ##Nodata value: 9 arcpy.Mosaic_management("EXGBoundry_Clip.tif;rc.tif","EmptyTIFF.tif","LAST","FIRST","0", "0", "", "", "") ##Mosaic several 3-band TIFF images to FGDB Raster Dataset with Color Correction ##Set Mosaic Tolerance to 0.3. Mismatch larget than 0.3 will be resampled #arcpy.Mosaic_management("rgb1.tif;rgb2.tif;rgb3.tif", "Mosaic.gdb\\rgb","LAST","FIRST","", "", "", "0.3", "HISTOGRAM_MATCHING") except: print "Mosaic example failed." print arcpy.GetMessages()
def createEmptyRaster(): dataset = "D:/gibbs/production/pre/cdl/2013_30m_cdls.img" spatial_ref = arcpy.Describe(dataset).spatialReference # arcpy.CreateRasterDataset_management(arcpy.env.workspace, "ytc_fc_mosaic", "8_BIT_UNSIGNED", "1") arcpy.CreateRasterDataset_management(arcpy.env.workspace,"ytc_fc_mosaic","30","8_BIT_UNSIGNED",spatial_ref, "1")
def SnowKriging( SClist, LstDt, Fld, spr ): ##### Process to execute Kringing into Rasters for extraction by points in next function. A list of Rasters is returned X_pts = "LONLINES" Y_pts = "LATLINES" ValueField = "SNOWFALL" arcpy.env.workspace = Fld env.overwriteOutput = True RasterLst = [] for i in range(0, len(LstDt)): Yr = str(LstDt[i][0:4]) Mo = str(LstDt[i][5:7]) Dy = str(LstDt[i][8:10]) GHCNName = "GHCNData" + Yr + ".gdb" GHCNgdb = os.path.join(Fld, GHCNName) Ingdb = "SNOW" + Yr + Mo + Dy IngdbPath = os.path.join(GHCNgdb, Ingdb) print(Ingdb) print(IngdbPath) OutLyr = "SNOW" + LstDt[i] SaveLyr = "SNOW" + LstDt[i] + ".lyr" filecsv = SClist[i] print(SClist[i]) if not arcpy.Exists(GHCNgdb): arcpy.CreateFileGDB_management(Fld, GHCNName) GHCNpts = arcpy.MakeXYEventLayer_management(filecsv, X_pts, Y_pts, OutLyr, spr) GHCNSNOWPts = arcpy.SaveToLayerFile_management(GHCNpts, SaveLyr) SnowPtsGdb = arcpy.CopyFeatures_management(GHCNSNOWPts, IngdbPath) print(SnowPtsGdb) arcpy.CheckOutExtension("Spatial") print("Geo Extension") RName = ValueField + LstDt[i] + ".img" RPath = os.path.join(Fld, RName) NoName = "MIN_FLOAT.img" NoRaster = os.path.join(Fld, NoName) hasValues = False with arcpy.da.SearchCursor(SnowPtsGdb, [ValueField]) as cursor: LstValue = [] for row in cursor: if float(row[0]) not in LstValue: LstValue.append(row[0]) if len(LstValue) > 1: hasValues = True del row del cursor print(LstValue) if not arcpy.Exists(NoRaster): arcpy.CreateRasterDataset_management(Fld, NoName, "", "16_BIT_UNSIGNED", spr) if hasValues == True: print("Attempt Kriging") try: arcpy.gp.Kriging_sa(SnowPtsGdb, ValueField, RPath, "Spherical 0.099048", "0.099048", "VARIABLE 12", "") RasterLst.append(RPath) except: arcpy.CopyRaster_management(NoRaster, RPath) RasterLst.append(RPath) print("Something is Wrong") else: arcpy.CopyRaster_management(NoRaster, RPath) RasterLst.append(RPath) print("All Values Were 0") print("Kriging Completed") return RasterLst
def createEmptyRaster(filename): #create an empty raster to hold mosaic dataset arcpy.CreateRasterDataset_management(out_path="D:/projects/ksu/attributes.gdb", out_name=filename, pixel_type="8_BIT_UNSIGNED", number_of_bands=1)
out_Raster = CityName + "_" + BuildingName + "_DSM" out_Raster_Low = CityName + "_" + BuildingName + "_DSM" + "_Low" outPath = env.workspace # Project Raster by coordinate system 3857 arcpy.ProjectRaster_management(inRaster, out_Raster, spatial_reference) arcpy.AddMessage(out_Raster + " Completed") Raster_BandCounts = arcpy.GetRasterProperties_management( inRaster, "BANDCOUNT") # Raster Input BandCount inRaster_CellSize = str( arcpy.GetRasterProperties_management(inRaster, "CELLSIZEX")) arcpy.AddMessage(inRaster_CellSize) arcpy.AddMessage(type(inRaster_CellSize)) arcpy.AddMessage(float(inRaster_CellSize)) # Export Raster by new CellSize Compare if float(inRaster_CellSize) < 0.06: arcpy.CreateRasterDataset_management(outPath, out_Raster_Low, 0.06, "8_BIT_UNSIGNED", spatial_reference, Raster_BandCounts) arcpy.AddMessage(out_Raster_Low + " Created") arcpy.AddMessage(out_Raster_Low + " Mosaic_Start") arcpy.Mosaic_management(out_Raster, out_Raster_Low, background_value=0, nodata_value=0) arcpy.AddMessage(out_Raster_Low + " Mosaic_End") out_Raster_Low_CellSize = arcpy.GetRasterProperties_management( out_Raster_Low, "CELLSIZEX") arcpy.AddMessage(out_Raster_Low_CellSize)
def SolarMain(workspace, search_query, building_footprints, dsm_surface, time_configuration, output_location_parameter, output_root): """This function automates the SEEP Solar Model - which yields solar installation estimates for buildings in a feature class""" try: workspace_location = os.path.dirname(workspace) #arcpy.AddMessage(workspace_location) fc = building_footprints fields = ['Address', 'Area', 'OBJECTID', 'SHAPE@'] sql_where_clause = search_query cell_size = 0.5 desc = arcpy.Describe(fc) sr = desc.spatialReference output_location = output_location_parameter + os.sep # SCRIPT OUTPUT LIST (for Merge function) seep_sol_map_list = [] # ENVIRONMENT SETTINGS arcpy.env.workspace = workspace arcpy.env.scratchworkspace = workspace arcpy.env.cellSize = cell_size arcpy.env.overwriteOutput = True arcpy.env.outputCoordinateSystem = sr # CHECKOUT EXTENSIONS arcpy.CheckOutExtension("Spatial") # ===== Main Loop - For each row print address and area of building, based on where clause ===== cursor = arcpy.da.SearchCursor(fc, fields, where_clause=(sql_where_clause)) for row in cursor: # -- Initialize function variables object_id = str(row[2]) fc_out = workspace + os.sep + 'SInt_' + object_id select_clause = "Address = " + "'" + row[0] + "'" out_raster = workspace + os.sep + 'SInt_r' + object_id field = fields[2] # for building height # -- SetExtent - around Study area extent = desc.extent arcpy.env.extent = extent # -- Create individual feature in_feature, using Select_analysis() arcpy.Select_analysis(fc, fc_out, select_clause) # -- Create in_feature in_feature = arcpy.Describe(fc_out) # -- SetExtent - around building extent = in_feature.extent arcpy.env.extent = extent # -- Get points to run solar radiation functions on - Feature to Raster arcpy.FeatureToRaster_conversion(fc_out, field, out_raster, cell_size) # -- Raster to Point around building # Initialize function variables in_raster = out_raster out_point_feature = workspace + os.sep + 'SInt_p' + object_id arcpy.RasterToPoint_conversion(in_raster, out_point_feature) # -- Run Solar Points - on building rooftop # Init solar variables in_point_feature = out_point_feature out_sol_feature = workspace + os.sep + 'SInt_SolRaw_' + object_id diffuse_model_type = "" diffuse_proportion = 0.3 transmittivity = 0.5 # Extend Extent for Solar Radiation calculations (250 m) in_buffer = fc_out out_buffer = workspace + os.sep + 'SInt_BExtent_' + object_id distance = '250 Meters' arcpy.Buffer_analysis(in_buffer, out_buffer, distance) # Set new Extent to environment parameters buffer_obj = arcpy.Describe(out_buffer) arcpy.env.extent = buffer_obj.extent arcpy.sa.PointsSolarRadiation(dsm_surface, in_point_feature, out_sol_feature, "", "", "", time_configuration, "", "", "", "", "", "", "", "", diffuse_model_type, diffuse_proportion, transmittivity, "", "", "") # -- Create Solar Map - Feature to Raster # Initialize in_sol_map = out_sol_feature sol_field = 'T0' out_sol_map = workspace + os.sep + 'SO_SM' + object_id # Set Extents around building again extent = in_feature.extent arcpy.env.extent = extent # Execute Function arcpy.FeatureToRaster_conversion(in_sol_map, sol_field, out_sol_map, cell_size) # -- Generate suitable solar panel area total (total potential area) # See Esri Blog - Solar Siting # Initialization in_reclass_raster = out_sol_map reclass_field = "Value" # Reclassify - ideal in class 3 out_reclass = arcpy.sa.Reclassify( in_reclass_raster, reclass_field, arcpy.sa.RemapRange([[0.0, 900000.0, 1], [900000.01, 1000000.0, 2], [1000000.01, 1500000.0, 3]])) # Raster to Polygon (simplify) - using out_reclass as an input out_rc_feature = workspace + os.sep + 'SInt_RC_' + object_id arcpy.RasterToPolygon_conversion(out_reclass, out_rc_feature) # Select from Reclassified polygon - only class 3 for solar panel area rc_where_clause = "gridcode = 3" out_ideal_sol = workspace + os.sep + 'SOut_Ideal_' + object_id arcpy.Select_analysis(out_rc_feature, out_ideal_sol, rc_where_clause) # -- Determine mean solar rad on ideal rooftop location # Initialize # Check if out_ideal_sol has a feature in_zone_data = out_ideal_sol # Continue Initialization zone_field = "gridcode" in_value_raster = out_sol_map out_table = workspace + os.sep + 'SInt_IRad_' + object_id # Execute try: arcpy.sa.ZonalStatisticsAsTable(in_zone_data, zone_field, in_value_raster, out_table) except: arcpy.sa.ZonalStatisticsAsTable(out_rc_feature, zone_field, in_value_raster, out_table) actual_rad_cursor = arcpy.da.SearchCursor(out_table, ['MEAN']) actual_rad = 0.0 for out_table_row in actual_rad_cursor: actual_rad = float(out_table_row[0]) # -- Determine Ideal Rooftop Area - limited to 85% of ideal area (for irregular shapes) # uses Statistics_analysis # Initialize in_stats = out_ideal_sol out_stats = workspace + os.sep + 'SInt_StatA_' + object_id statistics_field = [["Shape_Area", "SUM"]] # Execute arcpy.Statistics_analysis(in_stats, out_stats, statistics_field) ideal_rooftop_area = arcpy.da.SearchCursor(out_stats, ['Sum_Shape_Area']) rooftop_area = 0.0 for rooftop_row in ideal_rooftop_area: rooftop_area = float(rooftop_row[0]) * 0.85 # -- Calculate System Estimates using SEEP Estimation Model (a text file) # Calculation Constants: lifetime = 33.0 average_sun_hr = 6.7 cdn_rate = 0.76 dc_ac_ratio = 1.1 reference_rad = 1000.0 temp_co = -0.0047 temp_ref = 25 temp_cell = 17 cost_initial = 0.0 cost_maint = 0.0 system_loss = 0.86 inverter_loss = 0.96 area_rating_ratio = 168.3 # Variable Calculations actual_rad_hr = actual_rad / 365.0 / average_sun_hr np_rating = rooftop_area * area_rating_ratio #arcpy.AddMessage('System Rating: ' + str(np_rating) + ' W') dc_power = (actual_rad_hr / reference_rad) * np_rating * (1 + (temp_co * (temp_cell - temp_ref))) ac_power = (dc_power / dc_ac_ratio) * (system_loss * inverter_loss) # Defining Costs if np_rating < 10000: cost_initial = 3000.0 * (np_rating / 1000.0) if (np_rating >= 10000) and (np_rating < 100000): cost_initial = 2900.0 * (np_rating / 1000.0) if np_rating < 10000.0: cost_maint = 21.0 * (np_rating / 1000.0) if (np_rating >= 10000.0) and (np_rating < 100000.0): cost_maint = 19.0 * (np_rating / 1000.0) total_system_cost = (cost_initial + cost_maint) / cdn_rate power_cost = 0.0 if ac_power > 0: # Prevents divide by zero errors when no AC power is projected power_cost = ( total_system_cost / (ac_power / 1000)) / lifetime / 365 / average_sun_hr #arcpy.AddMessage('AC output: ' + str(ac_power) + ' W') #arcpy.AddMessage('System cost: $' + str(total_system_cost)) #arcpy.AddMessage('Resulting amortized power cost: $' + str(power_cost)) # -- Return Useful Area & Calculations to Feature Class (fc_out) # Initialize seep_output = fc_out output_fields = [ 'System_Rating', 'AC_Power', 'System_Cost', 'Power_Cost' ] # Add fields (System rating, AC Power, System Cost, Power Cost) to Output Feature arcpy.AddField_management(seep_output, output_fields[0], "FLOAT") arcpy.AddField_management(seep_output, output_fields[1], "FLOAT") arcpy.AddField_management(seep_output, output_fields[2], "FLOAT") arcpy.AddField_management(seep_output, output_fields[3], "FLOAT") # Update values in new fields with arcpy.da.UpdateCursor(seep_output, output_fields) as cursor: for update_row in cursor: update_row[0] = np_rating update_row[1] = ac_power update_row[2] = total_system_cost update_row[3] = power_cost cursor.updateRow(update_row) # END UpdateCursor Loop # Save feature class as an output output_path = workspace + os.sep output_name = 'SOut_Data_' + object_id seep_output_fc = output_path + output_name arcpy.FeatureClassToFeatureClass_conversion( seep_output, output_path, output_name) # -- Append Feature Class & Raster List #seep_data_list.append(r"" + seep_output_fc) seep_sol_map_list.append(out_sol_map) #Delete Intermediates del extent, in_feature, buffer_obj, out_reclass, actual_rad_cursor, ideal_rooftop_area, cursor #arcpy.AddMessage(('Completed: {0}, {1} in {2}'.format(row[0], row[1], sql_where_clause))) arcpy.AddMessage('Building analysis completed: ' + object_id) #=========================== END MAIN LOOP ========================================== arcpy.AddMessage('Buildings processed, starting merge...') # -- The Merge (of all calculations done during this script) # Initialize seep_out_data = output_location + 'SO_' + output_root seep_out_raster = 'SM' + output_root pixel_type = "64_BIT" #arcpy.AddMessage('Initialized...') # Retrieve List of Feature Outputs seep_data_list_raw = arcpy.ListFeatureClasses("SOut_Data_*") seep_data_list = [] for s in seep_data_list_raw: ds = arcpy.Describe(s) seep_data_list.append(ds.catalogPath) # Merge Raster Solar Maps (create raster dataset, workspace to raster dataset) try: arcpy.CreateRasterDataset_management(output_location, seep_out_raster, cell_size, pixel_type) except: print 'Raster dataset exists already, proceeding...' try: arcpy.Mosaic_management(seep_sol_map_list, output_location + seep_out_raster) except: print 'No data for Mosaic - proceeding...' # -- Reset environment to proper extent extent = desc.extent arcpy.env.extent = extent # Merge Feature Classes arcpy.Merge_management(seep_data_list, seep_out_data) # -- Clean-Up try: arcpy.Delete_management(workspace) arcpy.CreateFileGDB_management(workspace_location, os.path.basename(workspace)) arcpy.AddMessage('Workspace reset...') except: arcpy.AddMessage('Workspace was not reset...') del extent pass except arcpy.ExecuteError: print arcpy.GetMessages(2) except Exception as e: print e.args[0]
def publishdayRasters(): myname = 'publishdayRasters' defaultworkspace = r'C:\TrabajoFinalMasterESRI\Data\DF_Worskpace' rasterDefaultfolder = r'C:\TrabajoFinalMasterESRI\Data\Rasters' #creamos la carpeta de los rasters del dia now = datetime.now() dayfolder = now.strftime("%d%m%Y") rasterDefaultfolder = os.path.join(rasterDefaultfolder, dayfolder) if not os.path.isdir(rasterDefaultfolder): os.mkdir(rasterDefaultfolder) # Set environment settings env.workspace = defaultworkspace env.overwriteOutput = True # Check out the ArcGIS Spatial Analyst extension license arcpy.CheckOutExtension("Spatial") arcpy.CheckOutExtension("3D") listfields = ["B1", "B2", "B3", "W1", "W2", "M1"] for fieldname in listfields: try: logging.debug('----->' + myname) # Set local variables inFeatures = "Database Connections\[email protected]\canyons.owner.MeteoPoints" cellSize = 15000 power = 2 searchRadius = RadiusVariable(12) #sitios de salida de los rasters. outVarRaster = os.path.join(rasterDefaultfolder, fieldname + '_' + dayfolder) dbraster = "Database Connections\[email protected]\canyons.owner." + fieldname kModel = "CIRCULAR" cellSize = 15000 kRadius = 20000 # Execute Kriging arcpy.Kriging_3d(inFeatures, fieldname, outVarRaster, kModel, cellSize, kRadius) #Pendiente de validar clip_features = "Database Connections\[email protected]\canyons.owner.Admin\canyons.owner.pais" #aqui harIamos el clip rectangle = extents(clip_features) arcpy.Clip_management( in_raster=outVarRaster, rectangle=rectangle, out_raster=dbraster, in_template_dataset=clip_features, clipping_geometry="ClippingGeometry", maintain_clipping_extent="NO_MAINTAIN_EXTENT") #Construimos las piramides de los rastsers arcpy.BatchBuildPyramids_management(dbraster) logging.debug('<-----' + myname) except Exception as e: logging.error('Error ejecutando ' + myname) logging.error('El raster que ha dado problemas es el ' + fieldname) arcpy.CreateRasterDataset_management( out_path="Database Connections/[email protected]", out_name="B1", cellsize="", pixel_type="8_BIT_UNSIGNED", raster_spatial_reference= "PROJCS['ETRS_1989_UTM_Zone_30N',GEOGCS['GCS_ETRS_1989',DATUM['D_ETRS_1989',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Transverse_Mercator'],PARAMETER['False_Easting',500000.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',-3.0],PARAMETER['Scale_Factor',0.9996],PARAMETER['Latitude_Of_Origin',0.0],UNIT['Meter',1.0]]", number_of_bands="1", config_keyword="", pyramids="PYRAMIDS -1 NEAREST DEFAULT 75 NO_SKIP", tile_size="128 128", compression="LZ77", pyramid_origin="-5120763,26772284 9997963,94293634") logging.error(e.message) logging.debug( 'Todos los rasters de los campos se han creado correctamente')
from arcpy import env from arcpy.sa import * outWorkspace = arcpy.GetParameterAsText(0) cell = arcpy.GetParameterAsText(1) blue = arcpy.GetParameterAsText(2) green = arcpy.GetParameterAsText(3) red = arcpy.GetParameterAsText(4) nir = arcpy.GetParameterAsText(5) rededge = arcpy.GetParameterAsText(6) outName = arcpy.GetParameterAsText(7) ###create empty raster datasets arcpy.AddMessage("making raster datasets") blue_T = arcpy.CreateRasterDataset_management(outWorkspace, "blue.tif", cell, '32_BIT_FLOAT', "", 1, "", "", "", "", "") green_T = arcpy.CreateRasterDataset_management(outWorkspace, "green.tif", cell, '32_BIT_FLOAT', "", 1, "", "", "", "", "") red_T = arcpy.CreateRasterDataset_management(outWorkspace, "red.tif", cell, '32_BIT_FLOAT', "", 1, "", "", "", "", "") nir_T = arcpy.CreateRasterDataset_management(outWorkspace, "nir.tif", cell, '32_BIT_FLOAT', "", 1, "", "", "", "", "") rededge_T = arcpy.CreateRasterDataset_management(outWorkspace, "rededge.tif", cell, '32_BIT_FLOAT', "", 1, "", "", "", "", "") #####################################################################################