def main(ini_path): """Check new daily static output to historical outputs Args: ini_path (str): file path of the project INI file Returns: None """ logging.info('\nChecking daily_stats output against historical ouput files. Only crop 03 for now.') config = util.read_ini(ini_path, section='CROP_ET') try: project_ws = config.get('CROP_ET', 'project_folder') except: logging.error( 'project_folder parameter must be set in the INI file, exiting') return False project_ws = 'D:\et-demands\example' daily_stats_path =os.path.join(project_ws, 'daily_stats') validation_stats_path =os.path.join(project_ws, 'validation_files') ex_crop_list = ['03', '07', '11', '13','38','58','60','67','78','79'] for crop in ex_crop_list: print(crop)
def main(ini_path, overwrite_flag=False): """Download soil available water capacity (AWC), clay and sand shapefiles Parameters ---------- ini_path : str File path of the parameter INI file. overwrite_flag : bool, optional If True, overwrite existing files (the default is False). Returns ------- None Notes ----- Only the STATSGO 0to152cm shapefiles are currently available in the bucket. Other products, depths, or SSURGO shapefiles may eventually be available. """ logging.info('\nDownload soil shapefiles') logging.debug('INI: {}'.format(ini_path)) config = util.read_ini(ini_path, section='CROP_ET') base_url = 'https://storage.googleapis.com/openet/statsgo/shapefiles' ext_list = ['.shp', '.dbf', '.prj', '.shx', '.sbn', '.sbx'] awc_name = 'AWC_WTA_0to152cm_statsgo.shp' clay_name = 'Clay_WTA_0to152cm_statsgo.shp' sand_name = 'Sand_WTA_0to152cm_statsgo.shp' awc_url = '{}/{}'.format(base_url, awc_name) clay_url = '{}/{}'.format(base_url, clay_name) sand_url = '{}/{}'.format(base_url, sand_name) awc_path = config.get('CROP_ET', 'awc_path') clay_path = config.get('CROP_ET', 'clay_path') sand_path = config.get('CROP_ET', 'sand_path') if not os.path.isdir(os.path.dirname(awc_path)): os.makedirs(os.path.dirname(awc_path)) if not os.path.isdir(os.path.dirname(clay_path)): os.makedirs(os.path.dirname(clay_path)) if not os.path.isdir(os.path.dirname(sand_path)): os.makedirs(os.path.dirname(sand_path)) if not os.path.isfile(awc_path) or overwrite_flag: logging.info('\nDownloading AWC shapefile') for ext in ext_list: logging.debug(' {}'.format(awc_url.replace('.shp', ext))) logging.debug(' {}'.format(awc_path.replace('.shp', ext))) util.url_download(awc_url.replace('.shp', ext), awc_path.replace('.shp', ext)) else: logging.info('\nAWC shapefile already downloaded') if not os.path.isfile(clay_path) or overwrite_flag: logging.info('\nDownloading Clay shapefile') for ext in ext_list: logging.debug(' {}'.format(clay_url.replace('.shp', ext))) logging.debug(' {}'.format(clay_path.replace('.shp', ext))) util.url_download(clay_url.replace('.shp', ext), clay_path.replace('.shp', ext)) else: logging.info('\nClay shapefile already downloaded') if not os.path.isfile(sand_path) or overwrite_flag: logging.info('\nDownloading Sand shapefile') for ext in ext_list: logging.debug(' {}'.format(sand_url.replace('.shp', ext))) logging.debug(' {}'.format(sand_path.replace('.shp', ext))) util.url_download(sand_url.replace('.shp', ext), sand_path.replace('.shp', ext)) else: logging.info('\nSand shapefile already downloaded')
def main(ini_path, overwrite_flag=False): """Clip CDL rasters to a target extent and rebuild color table Parameters ---------- ini_path : str File path of the parameter INI file. overwrite_flag (bool): If True, overwrite the output raster (the default is False). Returns ------- None Notes ----- The script will attempt to clip into the project/gis/cdl folder. If the CDL raster is already in this folder, it will be overwritten. """ logging.debug('INI: {}'.format(ini_path)) config = util.read_ini(ini_path, section='CROP_ET') zones_path = config.get('CROP_ET', 'cells_path') gis_ws = config.get('CROP_ET', 'gis_folder') cdl_input_ws = config.get('CROP_ET', 'cdl_folder') cdl_year = int(config.get('CROP_ET', 'cdl_year')) cdl_format = config.get('CROP_ET', 'cdl_format') cdl_output_ws = os.path.join(gis_ws, 'cdl') cdl_input_path = os.path.join(cdl_input_ws, cdl_format.format(cdl_year, 'img')) cdl_output_path = os.path.join(cdl_output_ws, cdl_format.format(cdl_year, 'img')) # Keep the CDL raster in the default IMG format output_format = 'HFA' pyramids_flag = True stats_flag = True # if pyramids_flag: levels = '2 4 8 16 32 64 128' # gdal.SetConfigOption('USE_RRD', 'YES') # gdal.SetConfigOption('HFA_USE_RRD', 'YES') # gdal.SetConfigOption('HFA_COMPRESS_OVR', 'YES') if os.name == 'posix': shell_flag = False else: shell_flag = True # Check input folders if not os.path.isfile(zones_path): logging.error( '\nERROR: The ET zone shapefile doesn\'t exist, exiting\n' ' {}'.format(zones_path)) sys.exit() elif not os.path.isfile(cdl_input_path): logging.error('\nERROR: The input CDL raster doesn\'t exist, exiting\n' ' {}'.format(cdl_input_path)) sys.exit() if not os.path.isdir(cdl_output_ws): os.makedirs(cdl_output_ws) logging.info('\nGIS Workspace: {}'.format(gis_ws)) logging.info('CDL Input Path: {}'.format(cdl_input_path)) logging.info('CDL Output Path: {}'.format(cdl_output_path)) # TODO: Add logic to handle doing the clip inplace if cdl_input_path == cdl_output_path: logging.error('\nThe script does not currently handle clipping the ' 'CDL raster in place, exiting') sys.exit() # CDL Raster Properties cdl_ds = gdal.Open(cdl_input_path) cdl_proj = cdl_ds.GetProjection() cdl_osr = gdc.proj_osr(cdl_proj) cdl_geo = cdl_ds.GetGeoTransform() cdl_x, cdl_y = gdc.geo_origin(cdl_geo) cdl_cs = gdc.geo_cellsize(cdl_geo, x_only=True) logging.debug('\nCDL Input Raster Properties') logging.debug(' Geo: {}'.format(cdl_geo)) logging.debug(' Snap: {} {}'.format(cdl_x, cdl_y)) logging.debug(' Cellsize: {}'.format(cdl_cs)) logging.debug(' Projection: {}'.format(cdl_osr.ExportToWkt())) # logging.debug(' OSR: {}'.format(cdl_osr)) # logging.debug(' Extent: {}'.format(zones_extent)) # Reference all output rasters zone raster zones_ds = ogr.Open(zones_path, 0) zones_lyr = zones_ds.GetLayer() zones_osr = zones_lyr.GetSpatialRef() # zones_wkt = gdc.osr_proj(zones_ds) zones_extent = gdc.feature_lyr_extent(zones_lyr) zones_ds = None logging.debug('\nET Zones Shapefile Properties') logging.debug(' Extent: {}'.format(zones_extent)) logging.debug(' Projection: {}'.format(zones_osr.ExportToWkt())) # logging.debug(' OSR: {}'.format(zones_osr)) # Subset/clip properties # Project the extent to the CDL spatial reference logging.debug('\nCDL Output Raster Properties') clip_extent = zones_extent.project(zones_osr, cdl_osr) logging.debug(' Projected: {}'.format(clip_extent)) # Adjust the clip extent to the CDL snap point and cell size clip_extent.buffer(10 * cdl_cs) clip_extent.adjust_to_snap(snap_x=cdl_x, snap_y=cdl_y, cs=cdl_cs, method='EXPAND') logging.debug(' Snapped: {}'.format(clip_extent)) # Limit the subset extent to CDL extent clip_extent.clip(clip_extent) logging.debug(' Clipped: {}'.format(clip_extent)) clip_ullr = clip_extent.ul_lr_swap() logging.debug(' Clipped UL/LR: {}'.format(clip_ullr)) # Overwrite if os.path.isfile(cdl_output_path) or overwrite_flag: logging.info('\nDeleting existing raster') logging.debug(' {}'.format(cdl_output_path)) # subprocess.run( subprocess.check_output( ['gdalmanage', 'delete', '-f', output_format, cdl_output_path], shell=shell_flag) # remove_file(cdl_output_path) # Clip if not os.path.isfile(cdl_output_path): logging.info('\nClipping CDL raster') logging.debug(' {}\n {}'.format(cdl_input_path, cdl_output_path)) # subprocess.run( subprocess.check_output( ['gdal_translate', '-of', output_format, '-co', 'COMPRESSED=YES'] + ['-projwin'] + str(clip_ullr).split() + ['-a_ullr'] + str(clip_ullr).split() + [cdl_input_path, cdl_output_path], shell=shell_flag) if os.path.isfile(cdl_input_path.replace('.img', '.img.vat.dbf')): shutil.copyfile(cdl_input_path.replace('.img', '.img.vat.dbf'), cdl_output_path.replace('.img', '.img.vat.dbf')) # Statistics if stats_flag and os.path.isfile(cdl_output_path): logging.info('\nComputing statistics') logging.debug(' {}'.format(cdl_output_path)) # subprocess.run( subprocess.check_output([ 'gdalinfo', '-stats', '-nomd', '-noct', '-norat', cdl_output_path ], shell=shell_flag) # Pyramids if pyramids_flag and os.path.isfile(cdl_output_path): logging.info('\nBuilding pyramids') logging.debug(' {}'.format(cdl_output_path)) # subprocess.run( subprocess.check_output(['gdaladdo', '-ro', cdl_output_path] + levels.split(), shell=shell_flag)
def main(ini_path): """Interpolate Preliminary Calibration Zones to All Zones Args: ini_path (str): file path of the project INI file Returns: None """ logging.info('\nInterpolating Calibration Data from Subset Point Data') # INI path crop_et_sec = 'CROP_ET' config = util.read_ini(ini_path, section=crop_et_sec) try: project_ws = config.get(crop_et_sec, 'project_folder') except: logging.error('project_folder parameter must be set in the INI file, ' 'exiting') return False try: gis_ws = config.get(crop_et_sec, 'gis_folder') except: logging.error('gis_folder parameter must be set in the INI file, ' 'exiting') return False try: et_cells_path = config.get(crop_et_sec, 'cells_path') except: logging.error('et_cells_path parameter must be set in the INI file, ' 'exiting') return False try: calibration_ws = config.get(crop_et_sec, 'spatial_cal_folder') except: calibration_ws = os.path.join(project_ws, 'calibration') try: crop_params_name = config.get(crop_et_sec, 'crop_params_name') except: logging.error('crop_params_name parameter must be set in the INI file, ' 'exiting') return False # Sub folder names static_ws = os.path.join(project_ws, 'static') crop_params_path = os.path.join(static_ws, crop_params_name) crop_et_ws = config.get(crop_et_sec, 'crop_et_folder') bin_ws = os.path.join(crop_et_ws, 'bin') # Check input folders if not os.path.exists(calibration_ws): logging.critical('\nERROR: The calibration folder does not exist. ' '\n Run build_spatial_crop_params.py') sys.exit() # Check input folders if not os.path.isdir(project_ws): logging.critical('\nERROR: The project folder does not exist' '\n {}'.format(project_ws)) sys.exit() elif not os.path.isdir(gis_ws): logging.critical('\nERROR: The GIS folder does not exist' '\n {}'.format(gis_ws)) sys.exit() logging.info('\nGIS Workspace: {}'.format(gis_ws)) # ET cells field names cell_id_field = 'CELL_ID' cell_station_id_field = 'STATION_ID' # cell_name_field = 'CELL_NAME' # crop_acres_field = 'CROP_ACRES' # Do distance calculations in decimal degrees to match Arc script gcs_osr = osr.SpatialReference() gcs_osr.ImportFromEPSG(4326) # Read in the cell locations and values et_cells_data = defaultdict(dict) input_driver = _arcpy.get_ogr_driver(et_cells_path) input_ds = input_driver.Open(et_cells_path, 0) input_lyr = input_ds.GetLayer() input_osr = input_lyr.GetSpatialRef() # gcs_osr = input_osr.CloneGeogCS() for input_ftr in input_lyr: input_fid = input_ftr.GetFID() logging.debug(' FID: {}'.format(input_fid)) input_id = input_ftr.GetField(input_ftr.GetFieldIndex(cell_id_field)) input_geom = input_ftr.GetGeometryRef() centroid_geom = input_geom.Clone() # Do distance calculations in decimal degrees to match Arc script centroid_geom.Transform( osr.CoordinateTransformation(input_osr, gcs_osr)) centroid_geom = centroid_geom.Centroid() et_cells_data[input_id]['X'] = centroid_geom.GetX() et_cells_data[input_id]['Y'] = centroid_geom.GetY() input_ds = None # Read crop parameters using ET Demands functions/methods logging.info('\nReading default crop parameters') sys.path.append(bin_ws) import crop_parameters crop_param_dict = crop_parameters.read_crop_parameters(crop_params_path) # Get list of crops specified in ET cells crop_field_list = [ field for field in _arcpy.list_fields(et_cells_path) if re.match('CROP_\d{2}', field)] crop_number_list = [int(f.split('_')[1]) for f in crop_field_list] logging.info('Cell crop numbers: {}'.format( ', '.join(list(util.ranges(crop_number_list))))) logging.debug('Cell crop fields: {}'.format(', '.join(crop_field_list))) # Get Crop Names for each Crop in crop_number_list crop_name_list = [] logging.debug('\nBuilding crop name list') for crop_num in crop_number_list: try: crop_param = crop_param_dict[crop_num] except: continue # logging.info('{:>2d} {}'.format(crop_num, crop_param.name)) logging.debug('{}'.format(crop_param)) # Replace other characters with spaces, then remove multiple spaces crop_name = re.sub('[-"().,/~]', ' ', str(crop_param.name).lower()) crop_name = ' '.join(crop_name.strip().split()).replace(' ', '_') crop_name_list.append(crop_name) # Location of preliminary calibration .shp files (ADD AS INPUT ARG?) prelim_calibration_ws = os.path.join(calibration_ws, 'preliminary_calibration') logging.info('\nInterpolating calibration parameters') for crop_num, crop_name in zip(crop_number_list, crop_name_list): # Preliminary calibration .shp subset_cal_file = os.path.join( prelim_calibration_ws, 'crop_{0:02d}_{1}{2}').format(crop_num, crop_name, '.shp') final_cal_file = os.path.join( calibration_ws, 'crop_{0:02d}_{1}{2}').format(crop_num, crop_name, '.shp') if not _arcpy.exists(subset_cal_file): logging.info( '\nCrop No: {} Preliminary calibration file not found. ' 'skipping.'.format(crop_num)) continue logging.info('\nInterpolating Crop: {:02d}'.format(crop_num)) # Params to Interpolate # param_list = ['T30_CGDD', 'CGDD_EFC', 'CGDD_TERM', 'KillFrostC'] param_list = ['MAD_Init', 'MAD_Mid', 'T30_CGDD', 'PL_GU_Date', 'CGDD_Tbase', 'CGDD_EFC', 'CGDD_Term', 'Time_EFC', 'Time_Harv', 'KillFrostC'] # Read in the calibration locations and values subset_cal_data = defaultdict(dict) input_driver = _arcpy.get_ogr_driver(subset_cal_file) input_ds = input_driver.Open(subset_cal_file, 0) input_lyr = input_ds.GetLayer() input_osr = input_lyr.GetSpatialRef() # gcs_osr = input_osr.CloneGeogCS() for input_ftr in input_lyr: input_fid = input_ftr.GetFID() logging.debug(' FID: {}'.format(input_fid)) input_id = input_ftr.GetField(input_ftr.GetFieldIndex( cell_id_field)) input_geom = input_ftr.GetGeometryRef() centroid_geom = input_geom.Clone() # Do distance calculations in decimal degrees to match Arc script centroid_geom.Transform( osr.CoordinateTransformation(input_osr, gcs_osr)) centroid_geom = centroid_geom.Centroid() subset_cal_data[input_id]['X'] = centroid_geom.GetX() subset_cal_data[input_id]['Y'] = centroid_geom.GetY() for f in param_list: subset_cal_data[input_id][f] = input_ftr.GetField( input_ftr.GetFieldIndex(f)) input_ds = None # Compute interpolated calibration parameters final_cal_data = defaultdict(dict) for cell_id, cell_dict in et_cells_data.items(): final_cal_data[cell_id] = {} logging.debug(' {}'.format(cell_id)) # Precompute distances to all subset cells weight = {} for subset_id, subset_dict in subset_cal_data.items(): distance = math.sqrt( (subset_dict['X'] - cell_dict['X']) ** 2 + (subset_dict['Y'] - cell_dict['Y']) ** 2) try: weight[subset_id] = distance ** -2.0 except: weight[subset_id] = 0 weight_total = sum(weight.values()) # Brute force IDW using all subset cell for param in param_list: # If any weight is zero, use the values directly # There is probably a better way of flagging these d0 = [id for id, w in weight.items() if w == 0] if d0: final_cal_data[cell_id][param] = subset_cal_data[ d0[0]][param] else: final_cal_data[cell_id][param] = sum([ data[param] * weight[id] for id, data in subset_cal_data.items()]) final_cal_data[cell_id][param] /= weight_total # Overwrite values in calibration .shp with interpolated values output_ds = input_driver.Open(final_cal_file, 1) output_lyr = output_ds.GetLayer() for output_ftr in output_lyr: output_id = output_ftr.GetField( output_ftr.GetFieldIndex(cell_id_field)) for param in param_list: output_ftr.SetField( input_ftr.GetFieldIndex(param), round(final_cal_data[output_id][param], 1)) output_lyr.SetFeature(output_ftr) output_ds = None
def main(ini_path, zone_type='huc8', area_threshold=10, beef_cuttings=4, dairy_cuttings=5, overwrite_flag=False, cleanup_flag=False): """Build static text files needed to run ET-Demands model Args: ini_path (str): file path of the project INI file zone_type (str): Zone type (huc8, huc10, county) area_threshold (float): CDL area threshold [acres] beef_cuttings (int): Initial number of beef hay cuttings dairy_cuttings (int): Initial number of dairy hay cuttings overwrite_flag (bool): If True, overwrite existing files cleanup_flag (bool): If True, remove temporary files Returns: None """ logging.info('\nBuilding ET-Demands Static Files') # Input units station_elev_units = 'FEET' # Default values permeability = -999 soil_depth = 60 # inches aridity = 50 irrigation = 1 crops = 86 # Input paths # DEADBEEF - For now, get cropET folder from INI file # This function may eventually be moved into the main cropET code crop_et_sec = 'CROP_ET' config = util.read_ini(ini_path, section=crop_et_sec) try: project_ws = config.get(crop_et_sec, 'project_folder') except: logging.error('project_folder parameter must be set in the INI file, ' 'exiting') return False try: gis_ws = config.get(crop_et_sec, 'gis_folder') except: logging.error('gis_folder parameter must be set in the INI file, ' 'exiting') return False try: et_cells_path = config.get(crop_et_sec, 'cells_path') except: logging.error('cells_path parameter must be set in the INI file, ' 'exiting') return False try: stations_path = config.get(crop_et_sec, 'stations_path') except: logging.error('stations_path parameter must be set in the INI file, ' 'exiting') return False try: crop_et_ws = config.get(crop_et_sec, 'crop_et_folder') except: logging.error('crop_et_ws parameter must be set in the INI file, ' 'exiting') return False try: template_ws = config.get(crop_et_sec, 'template_folder') except: template_ws = os.path.join(os.path.dirname(crop_et_ws), 'static') logging.info( '\nStatic text file "template_folder" parameter was not set ' 'in the INI\n Defaulting to: {}'.format(template_ws)) # Read data from geodatabase or shapefile # if '.gdb' in et_cells_path and not et_cells_path.endswith('.shp'): # _flag = False # _path = os.path.dirname(et_cells_path) # gdb_path = r'D:\Projects\CAT_Basins\AltusOK\et-demands_py\et_demands.gdb' # _cells_path = os.path.join(gdb_path, 'et_cells') # Output sub-folder names static_ws = os.path.join(project_ws, 'static') # Weather station shapefile field names station_id_field = 'STATION_ID' if zone_type == 'huc8': station_zone_field = 'HUC8' elif zone_type == 'huc10': station_zone_field = 'HUC10' elif zone_type == 'county': station_zone_field = 'COUNTYNAME' elif zone_type == 'gridmet': station_zone_field = 'GRIDMET_ID' station_id_field = 'GRIDMET_ID' station_lat_field = 'LAT' station_lon_field = 'LON' if station_elev_units.upper() in ['FT', 'FEET']: station_elev_field = 'ELEV_FT' elif station_elev_units.upper() in ['M', 'METERS']: station_elev_field = 'ELEV_M' # station_elev_field = 'ELEV_FT' # ET Cell field names cell_lat_field = 'LAT' cell_lon_field = 'LON' cell_id_field = 'CELL_ID' cell_name_field = 'CELL_NAME' cell_station_id_field = 'STATION_ID' # awc_field = 'AWC' clay_field = 'CLAY' sand_field = 'SAND' awc_in_ft_field = 'AWC_IN_FT' hydgrp_num_field = 'HYDGRP_NUM' hydgrp_field = 'HYDGRP' # huc_field = 'HUC{}'.format(huc) # permeability_field = 'PERMEABILITY' # soil_depth_field = 'SOIL_DEPTH' # aridity_field = 'ARIDITY' # dairy_cutting_field = 'DAIRY_CUTTINGS' # beef_cutting_field = 'BEEF_CUTTINGS' # Static file names cell_props_name = 'ETCellsProperties.txt' cell_crops_name = 'ETCellsCrops.txt' cell_cuttings_name = 'MeanCuttings.txt' crop_params_name = 'CropParams.txt' crop_coefs_name = 'CropCoefs.txt' crop_coefs_eto = 'CropCoefs_eto.txt' crop_coefs_etr = 'CropCoefs_etr.txt' eto_ratio_name = 'EToRatiosMon.txt' static_list = [ crop_params_name, crop_coefs_name, crop_coefs_eto, crop_coefs_etr, cell_props_name, cell_crops_name, cell_cuttings_name, eto_ratio_name ] # Check input folders if not os.path.isdir(crop_et_ws): logging.critical('\nERROR: The INI cropET folder does not exist' '\n {}'.format(crop_et_ws)) sys.exit() elif not os.path.isdir(project_ws): logging.critical('\nERROR: The project folder does not exist' '\n {}'.format(project_ws)) sys.exit() elif not os.path.isdir(gis_ws): logging.critical('\nERROR: The GIS folder does not exist' '\n {}'.format(gis_ws)) sys.exit() logging.info('\nGIS Workspace: {}'.format(gis_ws)) logging.info('Project Workspace: {}'.format(project_ws)) logging.info('CropET Workspace: {}'.format(crop_et_ws)) logging.info('Template Workspace: {}'.format(template_ws)) # Check input files if not arcpy.Exists(et_cells_path): logging.critical('\nERROR: The ET Cell shapefile does not exist' '\n {}'.format(et_cells_path)) sys.exit() elif not arcpy.Exists(stations_path): logging.critical( '\nERROR: The weather station shapefile does not exist' '\n {}'.format(stations_path)) sys.exit() for static_name in static_list: if not os.path.isfile(os.path.join(template_ws, static_name)): logging.error('\nERROR: The static template does not exist' '\n {}'.format( os.path.join(template_ws, static_name))) sys.exit() logging.debug('ET Cells Path: {}'.format(et_cells_path)) logging.debug('Stations Path: {}'.format(stations_path)) # Check units if station_elev_units.upper() not in ['FEET', 'FT', 'METERS', 'M']: logging.error( '\nERROR: Station elevation units {} are invalid' '\n Units must be METERS or FEET'.format(station_elev_units)) sys.exit() # Build output table folder if necessary if not os.path.isdir(static_ws): os.makedirs(static_ws) # Read Weather station/cell data logging.info('\nReading station shapefile') logging.debug(' {}'.format(stations_path)) fields = [ station_zone_field, station_id_field, station_elev_field, station_lat_field, station_lon_field ] logging.debug(' Fields: {}'.format(fields)) station_data_dict = defaultdict(dict) with arcpy.da.SearchCursor(stations_path, fields) as s_cursor: for row in s_cursor: for field in fields[1:]: # Key/match on strings even if ID is an integer station_data_dict[str( row[0])][field] = row[fields.index(field)] for k, v in station_data_dict.items(): logging.debug(' {}: {}'.format(k, v)) # Read ET Cell zonal stats logging.info('\nReading ET Cell Zonal Stats') logging.debug(' {}'.format(et_cells_path)) crop_field_list = sorted([ f.name for f in arcpy.ListFields(et_cells_path) if re.match('CROP_\d{2}', f.name) ]) fields = [ cell_id_field, cell_name_field, cell_lat_field, awc_in_ft_field, clay_field, sand_field, hydgrp_num_field, hydgrp_field ] fields = fields + crop_field_list logging.debug(' Fields: {}'.format(fields)) cell_data_dict = defaultdict(dict) with arcpy.da.SearchCursor(et_cells_path, fields) as s_cursor: for row in s_cursor: for field in fields[1:]: # Key/match on strings even if ID is an integer cell_data_dict[str(row[0])][field] = row[fields.index(field)] # Update ET Cell STATION_ID value fields = [cell_id_field, cell_station_id_field] with arcpy.da.UpdateCursor(et_cells_path, fields) as u_cursor: for row in u_cursor: try: row[1] = station_data_dict[row[0]][station_id_field] u_cursor.updateRow(row) except KeyError: pass # Convert elevation units if necessary if station_elev_units.upper() in ['METERS', 'M']: logging.debug(' Convert station elevation from meters to feet') for k in station_data_dict.keys(): station_data_dict[k][station_elev_field] /= 0.3048 logging.info('\nCopying template static files') for static_name in static_list: # if (overwrite_flag or # os.path.isfile(os.path.join(static_ws, static_name))): logging.debug(' {}'.format(static_name)) shutil.copy(os.path.join(template_ws, static_name), static_ws) # shutil.copyfile( # .path.join(template_ws, static_name), # .path.join(static_ws, crop_params_name)) logging.info('\nWriting static text files') cell_props_path = os.path.join(static_ws, cell_props_name) cell_crops_path = os.path.join(static_ws, cell_crops_name) cell_cuttings_path = os.path.join(static_ws, cell_cuttings_name) # crop_params_path = os.path.join(static_ws, crop_params_name) # crop_coefs_path = os.path.join(static_ws, crop_coefs_name) eto_ratio_path = os.path.join(static_ws, eto_ratio_name) # Write cell properties logging.debug(' {}'.format(cell_props_path)) with open(cell_props_path, 'a') as output_f: for cell_id, cell_data in sorted(cell_data_dict.items()): if cell_id in station_data_dict.keys(): station_data = station_data_dict[cell_id] station_id = station_data[station_id_field] station_lat = '{:>9.4f}'.format( station_data[station_lat_field]) station_lon = '{:>9.4f}'.format( station_data[station_lon_field]) station_elev = '{:.2f}'.format( station_data[station_elev_field]) else: logging.debug(' Cell_ID {} was not found in the ' 'station data'.format(cell_id)) station_id, station_lat, station_lon, station_elev = '', '', '', '' # There is an extra/unused column in the template and excel files output_list = [ cell_id, cell_data[cell_name_field], station_id, station_lat, station_lon, station_elev, permeability, '{:.4f}'.format(cell_data[awc_in_ft_field]), soil_depth, cell_data[hydgrp_field], cell_data[hydgrp_num_field], aridity, '' ] output_f.write('\t'.join(map(str, output_list)) + '\n') del output_list del station_id, station_lat, station_lon, station_elev # Write cell crops logging.debug(' {}'.format(cell_crops_path)) with open(cell_crops_path, 'a') as output_f: for cell_id, cell_data in sorted(cell_data_dict.items()): if cell_id in station_data_dict.keys(): station_id = station_data_dict[cell_id][station_id_field] else: logging.debug(' Cell_ID {} was not found in the ' 'station data'.format(cell_id)) station_id = '' output_list = [ cell_id, cell_data[cell_name_field], station_id, irrigation ] crop_list = ['CROP_{:02d}'.format(i) for i in range(1, crops + 1)] crop_area_list = [ cell_data[crop] if crop in cell_data.keys() else 0 for crop in crop_list ] crop_flag_list = [ 1 if area > area_threshold else 0 for area in crop_area_list ] output_list = output_list + crop_flag_list output_f.write('\t'.join(map(str, output_list)) + '\n') del crop_list, crop_area_list, crop_flag_list, output_list # Write cell cuttings logging.debug(' {}'.format(cell_cuttings_path)) with open(cell_cuttings_path, 'a') as output_f: for cell_id, cell_data in sorted(cell_data_dict.items()): output_list = [ cell_id, cell_data[cell_name_field], '{:>9.4f}'.format(cell_data[cell_lat_field]), dairy_cuttings, beef_cuttings ] output_f.write('\t'.join(map(str, output_list)) + '\n') del output_list # Write monthly ETo ratios logging.debug(' {}'.format(eto_ratio_path)) with open(eto_ratio_path, 'a') as output_f: for cell_id, cell_data in sorted(cell_data_dict.items()): if cell_id in station_data_dict.keys(): station_data = station_data_dict[cell_id] station_id = station_data[station_id_field] # station_lat = '{:>9.4f}'.format(station_data[station_lat_field]) # station_lon = '{:>9.4f}'.format(station_data[station_lon_field]) # station_elev = '{:.2f}'.format(station_data[station_elev_field]) else: logging.debug(' Cell_ID {} was not found in the ' 'station data'.format(cell_id)) # station_id, station_lat, station_lon, station_elev = '', '', '', '' continue output_list = [station_id, ''] + [1.0] * 12 output_f.write('\t'.join(map(str, output_list)) + '\n') del output_list
def main(ini_path, area_threshold=10, beef_cuttings=4, dairy_cuttings=5, overwrite_flag=False): """Build static text files needed to run ET-Demands model Parameters ---------- ini_path : str File path of the parameter INI file. area_threshold : float CDL area threshold [acres] (the default is 10 acres). beef_cuttings : int Initial number of beef hay cuttings (the default is 4). dairy_cuttings : int Initial number of dairy hay cuttings (the default is 5). overwrite_flag : bool If True, overwrite existing files (the default is False). Returns ------- None """ logging.info('\nBuilding ET-Demands Static Files') # Input units # Why isn't this read from .ini CROP_ET section? # station_elev_units = 'FEET' # Default values permeability = -999 soil_depth = 60 # inches aridity = 50 irrigation = 1 # DEADBEEF - The number of crops should not be hardcoded here crops = 89 # Input paths # DEADBEEF - For now, get cropET folder from INI file # This function may eventually be moved into the main cropET code crop_et_sec = 'CROP_ET' config = util.read_ini(ini_path, section=crop_et_sec) try: project_ws = config.get(crop_et_sec, 'project_folder') except: logging.error('project_folder parameter must be set in the INI file, ' 'exiting') return False try: gis_ws = config.get(crop_et_sec, 'gis_folder') except: logging.error('gis_folder parameter must be set in the INI file, ' 'exiting') return False try: et_cells_path = config.get(crop_et_sec, 'cells_path') except: logging.error('cells_path parameter must be set in the INI file, ' 'exiting') return False try: stations_path = config.get(crop_et_sec, 'stations_path') except: logging.error('stations_path parameter must be set in the INI file, ' 'exiting') return False try: crop_et_ws = config.get(crop_et_sec, 'crop_et_folder') except: logging.error('crop_et_folder parameter must be set in the INI file, ' 'exiting') return False try: template_ws = config.get(crop_et_sec, 'template_folder') except: template_ws = os.path.join(os.path.dirname(crop_et_ws), 'static') logging.info( '\nStatic text file "template_folder" parameter was not set ' 'in the INI\n Defaulting to: {}'.format(template_ws)) # elevation units try: station_elev_units = config.get(crop_et_sec, 'elev_units') except: logging.error('elev_units must be set in crop_et section of INI file, ' 'exiting') return False # Read data from geodatabase or shapefile # if '.gdb' in et_cells_path and not et_cells_path.endswith('.shp'): # _flag = False # _path = os.path.dirname(et_cells_path) # gdb_path = r'D:\Projects\CAT_Basins\AltusOK\et-demands_py\et_demands.gdb' # _cells_path = os.path.join(gdb_path, 'et_cells') # Output sub-folder names static_ws = os.path.join(project_ws, 'static') # Weather station shapefile fields station_id_field = 'STATION_ID' station_lat_field = 'LAT' station_lon_field = 'LON' if station_elev_units.upper() in ['FT', 'FEET']: station_elev_field = 'ELEV_FT' elif station_elev_units.upper() in ['M', 'METERS']: station_elev_field = 'ELEV_M' # station_elev_field = 'ELEV_FT' # ET Cell field names cell_lat_field = 'LAT' # cell_lon_field = 'LON' cell_id_field = 'CELL_ID' cell_name_field = 'CELL_NAME' # cell_station_id_field = 'STATION_ID' # awc_field = 'AWC' clay_field = 'CLAY' sand_field = 'SAND' awc_in_ft_field = 'AWC_IN_FT' hydgrp_num_field = 'HYDGRP_NUM' hydgrp_field = 'HYDGRP' # huc_field = 'HUC{}'.format(huc) # permeability_field = 'PERMEABILITY' # soil_depth_field = 'SOIL_DEPTH' # aridity_field = 'ARIDITY' # dairy_cutting_field = 'DAIRY_CUTTINGS' # beef_cutting_field = 'BEEF_CUTTINGS' # Static file names cell_props_name = 'ETCellsProperties.txt' cell_crops_name = 'ETCellsCrops.txt' cell_cuttings_name = 'MeanCuttings.txt' crop_params_name = 'CropParams.txt' crop_coefs_name = 'CropCoefs.txt' crop_coefs_eto = 'CropCoefs_eto.txt' crop_coefs_etr = 'CropCoefs_etr.txt' eto_ratio_name = 'EToRatiosMon.txt' etr_ratio_name = 'ETrRatiosMon.txt' static_list = [ crop_params_name, crop_coefs_name, crop_coefs_eto, crop_coefs_etr, cell_props_name, cell_crops_name, cell_cuttings_name, eto_ratio_name, etr_ratio_name ] # Check input folders if not os.path.isdir(crop_et_ws): logging.critical('\nERROR: The INI cropET folder does not exist' '\n {}'.format(crop_et_ws)) sys.exit() elif not os.path.isdir(project_ws): logging.critical('\nERROR: The project folder does not exist' '\n {}'.format(project_ws)) sys.exit() elif not os.path.isdir(gis_ws): logging.critical('\nERROR: The GIS folder does not exist' '\n {}'.format(gis_ws)) sys.exit() logging.info('\nGIS Workspace: {}'.format(gis_ws)) logging.info('Project Workspace: {}'.format(project_ws)) logging.info('CropET Workspace: {}'.format(crop_et_ws)) logging.info('Template Workspace: {}'.format(template_ws)) # Check input files if not _arcpy.exists(et_cells_path): logging.critical('\nERROR: The ET Cell shapefile does not exist' '\n {}'.format(et_cells_path)) sys.exit() elif not _arcpy.exists(stations_path): logging.critical( '\nERROR: The weather station shapefile does not exist' '\n {}'.format(stations_path)) sys.exit() for static_name in static_list: if not os.path.isfile(os.path.join(template_ws, static_name)): logging.error('\nERROR: The static template does not exist' '\n {}'.format( os.path.join(template_ws, static_name))) sys.exit() logging.debug('ET Cells Path: {}'.format(et_cells_path)) logging.debug('Stations Path: {}'.format(stations_path)) # Check units if station_elev_units.upper() not in ['FEET', 'FT', 'METERS', 'M']: logging.error( '\nERROR: Station elevation units {} are invalid' '\n Units must be METERS or FEET'.format(station_elev_units)) sys.exit() # Build output table folder if necessary if not os.path.isdir(static_ws): os.makedirs(static_ws) # Read weather station/cell data logging.info('\nReading station shapefile') logging.debug(' {}'.format(stations_path)) fields = [ station_id_field, station_elev_field, station_lat_field, station_lon_field ] logging.debug(' Fields: {}'.format(fields)) station_data_dict = defaultdict(dict) try: for fid, row in _arcpy.search_cursor(stations_path, fields).items(): # print(fid) # print(row) # Switch to station_id_field as index (instead of FID) for f in fields[1:]: station_data_dict[str(row[station_id_field])][f] = row[f] for k, v in station_data_dict.items(): logging.debug(' {}: {}'.format(k, v)) except: logging.error( 'Expected Field Not Found. Check input shapefile : {}'.format( stations_path)) return False # Read ET Cell zonal stats logging.info('\nReading ET Cell Zonal Stats') logging.debug(' {}'.format(et_cells_path)) crop_field_list = sorted([ f for f in _arcpy.list_fields(et_cells_path) if re.match('CROP_\d{2}', f) ]) fields = [ cell_id_field, cell_name_field, cell_lat_field, station_id_field, awc_in_ft_field, clay_field, sand_field, hydgrp_num_field, hydgrp_field ] fields = fields + crop_field_list logging.debug(' Fields: {}'.format(fields)) cell_data_dict = defaultdict(dict) for fid, row in _arcpy.search_cursor(et_cells_path, fields).items(): # Switch to cell_id_field as index (instead of FID) for f in fields[1:]: cell_data_dict[str(row[cell_id_field])][f] = row[f] # Convert elevation units if necessary if station_elev_units.upper() in ['METERS', 'M']: logging.debug(' Convert station elevation from meters to feet') for k in station_data_dict.keys(): station_data_dict[k][station_elev_field] /= 0.3048 logging.info('\nCopying template static files') for static_name in static_list: # if (overwrite_flag or # os.path.isfile(os.path.join(static_ws, static_name))): logging.debug(' {}'.format(static_name)) shutil.copy(os.path.join(template_ws, static_name), static_ws) # shutil.copyfile( # .path.join(template_ws, static_name), # .path.join(static_ws, crop_params_name)) logging.info('\nWriting static text files') cell_props_path = os.path.join(static_ws, cell_props_name) cell_crops_path = os.path.join(static_ws, cell_crops_name) cell_cuttings_path = os.path.join(static_ws, cell_cuttings_name) # crop_params_path = os.path.join(static_ws, crop_params_name) # crop_coefs_path = os.path.join(static_ws, crop_coefs_name) eto_ratio_path = os.path.join(static_ws, eto_ratio_name) etr_ratio_path = os.path.join(static_ws, etr_ratio_name) # Write cell properties logging.debug(' {}'.format(cell_props_path)) with open(cell_props_path, 'a') as output_f: for cell_id, cell_data in sorted(cell_data_dict.items()): try: station_id = cell_data[station_id_field] except KeyError: logging.info( ' {} field was not found in the cell data'.format( station_id_field)) if station_id: #STATION_ID can be either a str or int in the cells .shp station_data = station_data_dict[str(station_id)] station_lat = '{:>9.4f}'.format( station_data[station_lat_field]) station_lon = '{:>9.4f}'.format( station_data[station_lon_field]) station_elev = '{:.2f}'.format( station_data[station_elev_field]) else: station_lat, station_lon, station_elev = '', '', '' # There is an extra/unused column in the template and excel files output_list = [ cell_id, cell_data[cell_name_field], station_id, station_lat, station_lon, station_elev, permeability, '{:.4f}'.format(cell_data[awc_in_ft_field]), soil_depth, cell_data[hydgrp_field], cell_data[hydgrp_num_field], aridity, '' ] output_f.write('\t'.join(map(str, output_list)) + '\n') del output_list del station_id, station_lat, station_lon, station_elev # Write cell crops logging.debug(' {}'.format(cell_crops_path)) with open(cell_crops_path, 'a') as output_f: for cell_id, cell_data in sorted(cell_data_dict.items()): try: station_id = cell_data[station_id_field] except KeyError: logging.info( ' {} field was not found in the cell data'.format( station_id_field)) station_id = '' output_list = [ cell_id, cell_data[cell_name_field], station_id, irrigation ] crop_list = ['CROP_{:02d}'.format(i) for i in range(1, crops + 1)] crop_area_list = [] for crop in crop_list: if crop in cell_data.keys() and cell_data[crop] is not None: crop_area_list.append(cell_data[crop]) else: crop_area_list.append(0) crop_flag_list = [ 1 if area > area_threshold else 0 for area in crop_area_list ] output_list = output_list + crop_flag_list output_f.write('\t'.join(map(str, output_list)) + '\n') del crop_list, crop_area_list, crop_flag_list, output_list # Write cell cuttings logging.debug(' {}'.format(cell_cuttings_path)) with open(cell_cuttings_path, 'a') as output_f: for cell_id, cell_data in sorted(cell_data_dict.items()): output_list = [ cell_id, cell_data[cell_name_field], '{:>9.4f}'.format(cell_data[cell_lat_field]), dairy_cuttings, beef_cuttings ] output_f.write('\t'.join(map(str, output_list)) + '\n') # Write monthly ETo ratios logging.debug(' {}'.format(eto_ratio_path)) with open(eto_ratio_path, 'a') as output_f: for cell_id, cell_data in sorted(cell_data_dict.items()): try: station_id = cell_data[station_id_field] except KeyError: logging.info(' {} field was not found in the cell data, ' 'skipping'.format(station_id_field)) # station_id = '' continue output_f.write('\t'.join(map(str, [station_id, ''] + [1.0] * 12)) + '\n') # Write monthly ETr ratios logging.debug(' {}'.format(etr_ratio_path)) with open(etr_ratio_path, 'a') as output_f: for cell_id, cell_data in sorted(cell_data_dict.items()): try: station_id = cell_data[station_id_field] except KeyError: logging.info(' {} field was not found in the cell data, ' 'skipping'.format(station_id_field)) # station_id = '' continue output_f.write('\t'.join(map(str, [station_id, ''] + [1.0] * 12)) + '\n')
def main(ini_path, overwrite_flag=False): """Download CONUS CDL zips Parameters ---------- ini_path : str File path of the parameter INI file. overwrite_flag : bool If True, overwrite existing files (the default is False). Returns ------- None """ logging.info('\nDownload and extract CONUS CDL rasters') logging.debug('INI: {}'.format(ini_path)) config = util.read_ini(ini_path, section='CROP_ET') site_url = 'ftp.nass.usda.gov' site_folder = 'download/res' cdl_ws = config.get('CROP_ET', 'cdl_folder') cdl_year = int(config.get('CROP_ET', 'cdl_year')) cdl_format = config.get('CROP_ET', 'cdl_format') logging.info('Year: {}'.format(cdl_year)) zip_name = cdl_format.format(cdl_year, 'zip') zip_url = site_url + '/' + zip_name zip_path = os.path.join(cdl_ws, zip_name) cdl_path = os.path.join(cdl_ws, cdl_format.format(cdl_year, 'img')) # zip_url_size = remote_size(zip_url) # if os.path.isfile(zip_path): # zip_path_size = local_size(zip_path) # if not os.path.isfile(zip_path): # zip_path_size = 0 # if zip_url_size == zip_path_size: # size_flag = False # if zip_url_size != zip_path_size: # size_flag = True size_flag = False if not os.path.isdir(cdl_ws): os.makedirs(cdl_ws) if os.path.isfile(zip_path) and (overwrite_flag or size_flag): os.remove(zip_path) if not os.path.isfile(zip_path): logging.info(' Download CDL files') logging.debug(' {}'.format(zip_url)) logging.debug(' {}'.format(zip_path)) util.ftp_download(site_url, site_folder, zip_name, zip_path) # util.url_download(zip_url, zip_path) if os.path.isfile(cdl_path) and overwrite_flag: util.remove_file(cdl_path) if os.path.isfile(zip_path) and not os.path.isfile(cdl_path): logging.info(' Extracting CDL files') with zipfile.ZipFile(zip_path) as zf: zf.extractall(cdl_ws)
def main(ini_path, overwrite_flag=False): """Build CDL shapefiles for agricultural pixels Parameters ---------- ini_path : str File path of the parameter INI file. overwrite_flag : bool If True, overwrite existing shapefile (the default is False). Returns ------- None """ logging.info('\nBuilding Agricultural CDL Shapefile') logging.debug('INI: {}'.format(ini_path)) config = util.read_ini(ini_path, section='CROP_ET') zone_path = config.get('CROP_ET', 'cells_path') crop_path = config.get('CROP_ET', 'crop_path') temp_path = crop_path.replace('.shp', '_temp.shp') cdl_ws = config.get('CROP_ET', 'cdl_folder') cdl_year = int(config.get('CROP_ET', 'cdl_year')) cdl_format = config.get('CROP_ET', 'cdl_format') # It might make more sense to pass the non-ag CDL values instead cdl_crops = util.parse_int_set(config.get('CROP_ET', 'cdl_crops')) # cdl_nonag = util.parse_int_set(config.get('CROP_ET', 'cdl_nonag')) cdl_path = os.path.join(cdl_ws, cdl_format.format(cdl_year, 'img')) # Output field name in the crops shapefile crop_field = config.get('CROP_ET', 'crop_field') shp_driver = ogr.GetDriverByName('ESRI Shapefile') if os.path.isfile(crop_path): if overwrite_flag: shp_driver.DeleteDataSource(crop_path) else: return True if not os.path.isfile(zone_path): logging.error( '\nERROR: The ET zone shapefile doesn\'t exist, exiting\n' ' {}'.format(zone_path)) sys.exit() elif not os.path.isfile(cdl_path): logging.error('\nERROR: The CDL raster doesn\'t exist, exiting\n' ' {}'.format(cdl_path)) sys.exit() logging.debug('Zones: {}'.format(zone_path)) # CDL Raster Properties cdl_ds = gdal.Open(cdl_path) cdl_band = cdl_ds.GetRasterBand(1) try: cdl_nodata = int(cdl_band.GetNoDataValue()) except TypeError: cdl_nodata = 0 cdl_gtype = cdl_band.DataType cdl_proj = cdl_ds.GetProjection() cdl_osr = gdc.proj_osr(cdl_proj) cdl_geo = cdl_ds.GetGeoTransform() cdl_x, cdl_y = gdc.geo_origin(cdl_geo) cdl_cs = gdc.geo_cellsize(cdl_geo, x_only=True) cdl_extent = gdc.raster_ds_extent(cdl_ds) logging.debug('\nCDL Raster Properties') logging.debug(' Geo: {}'.format(cdl_geo)) logging.debug(' Snap: {} {}'.format(cdl_x, cdl_y)) logging.debug(' Cellsize: {}'.format(cdl_cs)) logging.debug(' Nodata: {}'.format(cdl_nodata)) logging.debug(' GDAL Type: {}'.format(cdl_gtype)) logging.debug(' Extent: {}'.format(cdl_extent)) logging.debug(' Projection: {}'.format(cdl_osr.ExportToWkt())) # logging.debug(' OSR: {}'.format(cdl_osr)) # ET Zones Properties zone_ds = shp_driver.Open(zone_path, 0) zone_lyr = zone_ds.GetLayer() zone_osr = zone_lyr.GetSpatialRef() zone_wkt = gdc.osr_proj(zone_osr) zone_extent = gdc.feature_lyr_extent(zone_lyr) logging.debug('\nET Zones Shapefile Properties') logging.debug(' Extent: {}'.format(zone_extent)) logging.debug(' Projection: {}'.format(zone_osr.ExportToWkt())) # logging.debug(' OSR: {}'.format(zones_osr)) if zone_osr.IsGeographic(): logging.error('\nERROR: The ET zones shapefile must be in a projected ' 'coordinate system, exiting') sys.exit() # Subset/clip properties # Project the extent to the CDL spatial reference logging.debug('\nClip Subset') clip_extent = zone_extent.project(zone_osr, cdl_osr) logging.debug(' Projected: {}'.format(clip_extent)) # Adjust the clip extent to the CDL snap point and cell size clip_extent.buffer(10 * cdl_cs) clip_extent.adjust_to_snap(snap_x=cdl_x, snap_y=cdl_y, cs=cdl_cs, method='EXPAND') logging.debug(' Snapped: {}'.format(clip_extent)) # Limit the subset extent to the CDL extent clip_extent.clip(cdl_extent) logging.debug(' Clipped: {}'.format(clip_extent)) # Compute the clip geotransform and shape clip_geo = clip_extent.geo(cs=cdl_cs) clip_rows, clip_cols = clip_extent.shape(cs=cdl_cs) logging.debug(' Rows/Cols: {} {}'.format(clip_rows, clip_cols)) # Building a raster mask was a little more efficient than selecting # touching features later on. logging.debug('\nBuilding ET Zones mask') zone_count = zone_lyr.GetFeatureCount() if zone_count < 255: zone_mask_gtype = gdal.GDT_Byte zone_mask_nodata = 255 elif zone_count < 65535: zone_mask_gtype = gdal.GDT_UInt16 zone_mask_nodata = 65535 else: zone_mask_gtype = gdal.GDT_UInt32 zone_mask_nodata = 4294967295 memory_driver = gdal.GetDriverByName('GTiff') # zones_mask_ds = memory_driver.Create( # os.path.join(os.path.dirname(zones_path), 'zones_mask.tiff'), # clip_cols, clip_rows, 1, zones_mask_gtype) memory_driver = gdal.GetDriverByName('MEM') zone_mask_ds = memory_driver.Create('', clip_cols, clip_rows, 1, zone_mask_gtype) zone_mask_ds.SetProjection(cdl_proj) zone_mask_ds.SetGeoTransform(clip_geo) zone_mask_band = zone_mask_ds.GetRasterBand(1) zone_mask_band.Fill(zone_mask_nodata) zone_mask_band.SetNoDataValue(zone_mask_nodata) gdal.RasterizeLayer(zone_mask_ds, [1], zone_lyr, burn_values=[1]) # zones_mask_ds = None # zones_mask_band = zones_mask_ds.GetRasterBand(1) zone_mask = zone_mask_band.ReadAsArray(0, 0, clip_cols, clip_rows) zone_mask = (zone_mask != zone_mask_nodata) zone_mask_ds = None logging.debug('\nBuilding initial CDL polygon shapefile') if os.path.isfile(temp_path): shp_driver.DeleteDataSource(temp_path) polygon_ds = shp_driver.CreateDataSource(temp_path) polygon_lyr = polygon_ds.CreateLayer('OUTPUT_POLY', geom_type=ogr.wkbPolygon) field_defn = ogr.FieldDefn(crop_field, ogr.OFTInteger) polygon_lyr.CreateField(field_defn) # TODO: Process CDL by tile # logging.debug('\nProcessing CDL by tile') # tile_list = [[0, 0]] # for tile_i, tile_j in tile_list: # logging.debug(' Tile: {} {}'.format(tile_i, tile_j)) logging.debug('\nConverting CDL raster to polygon') # Read the CDL subset array clip_xi, clip_yi = array_geo_offsets(cdl_geo, clip_geo) logging.debug(' Subset i/j: {} {}'.format(clip_xi, clip_yi)) cdl_array = cdl_band.ReadAsArray(clip_xi, clip_yi, clip_cols, clip_rows) cdl_ds = None # Apply the zones mask if np.any(zone_mask): cdl_array[~zone_mask] = cdl_nodata # Set non-agricultural pixels to nodata logging.debug('\nMasking non-crop pixels') cdl_array_values = np.unique(cdl_array) nodata_mask = np.zeros(cdl_array.shape, dtype=np.bool) for value in range(1, 255): if value in cdl_crops: continue elif value not in cdl_array_values: continue # logging.debug(' Value: {}'.format(value)) nodata_mask |= (cdl_array == value) cdl_array[nodata_mask] = cdl_nodata # # DEADBEEF - This is using the remap ranges # # It is probably more efficient than processing each crop separately # nodata_mask = np.zeros(cdl_array.shape, dtype=np.bool) # for [start, end, value] in cdl_agmask_remap: # if value == 1: # continue # logging.debug([start, end, value]) # nodata_mask |= (cdl_array >= start) & (cdl_array <= end) # cdl_array[nodata_mask] = cdl_nodata # Create an in-memory raster to read the CDL into # Set the mask band separately memory_driver = gdal.GetDriverByName('MEM') memory_ds = memory_driver.Create('', clip_cols, clip_rows, 2, cdl_gtype) memory_ds.SetGeoTransform(clip_geo) memory_ds.SetProjection(cdl_proj) memory_band = memory_ds.GetRasterBand(1) memory_band.SetNoDataValue(cdl_nodata) mask_band = memory_ds.GetRasterBand(2) # Write the CDL subset array to the memory raster logging.debug('\nWriting array') memory_band.WriteArray(cdl_array, 0, 0) mask_band.WriteArray(cdl_array != cdl_nodata, 0, 0) # Polygonize the CDL array logging.debug('\nConverting raster to polygon') gdal.Polygonize(memory_band, mask_band, polygon_lyr, 0) # Cleanup mask_band = None memory_band = None memory_ds = None polygon_lyr = None polygon_ds = None del cdl_array, nodata_mask, zone_mask # Write projection/spatial reference prj_osr = gdc.proj_osr(cdl_proj) prj_osr.MorphToESRI() with open(temp_path.replace('.shp', '.prj'), 'w') as prj_f: prj_f.write(prj_osr.ExportToWkt()) # Project crops to zones spatial reference logging.debug('\nProjecting crops to ET zones spatial reference') # ogr2ogr.project(temp_path, crop_path, zones_wkt) arcpy.project(temp_path, crop_path, zone_osr) logging.debug('\nRemoving temporary crops shapefile') arcpy.delete(temp_path)
def main(ini_path, overwrite_flag=False): """Calculate zonal statistics needed to run ET-Demands model Parameters ---------- ini_path : str File path of the parameter INI file. overwrite_flag : boolean True : overwrite existing shapefile False : default Returns ------- None """ logging.info('\nComputing ET-Demands Zonal Stats') logging.debug('INI: {}'.format(ini_path)) config = util.read_ini(ini_path, section='CROP_ET') gis_ws = config.get('CROP_ET', 'gis_folder') zone_path = config.get('CROP_ET', 'cells_path') crop_path = config.get('CROP_ET', 'crop_path') awc_path = config.get('CROP_ET', 'awc_path') clay_path = config.get('CROP_ET', 'clay_path') sand_path = config.get('CROP_ET', 'sand_path') crop_field = config.get('CROP_ET', 'crop_field') crosswalk_path = config.get('CROP_ET', 'crosswalk_path') soil_crop_mask_flag = config.getboolean('CROP_ET', 'soil_crop_mask_flag') save_crop_mask_flag = config.getboolean('CROP_ET', 'save_crop_mask_flag') # TODO: Read field names from INI cell_lat_field = 'LAT' cell_lon_field = 'LON' # cell_id_field = 'CELL_ID' # cell_name_field = 'CELL_NAME' # cell_station_id_field = 'STATION_ID' acreage_field = 'AG_ACRES' awc_field = 'AWC' clay_field = 'CLAY' sand_field = 'SAND' awc_in_ft_field = 'AWC_IN_FT' hydgrp_num_field = 'HYDGRP_NUM' hydgrp_field = 'HYDGRP' # +/- buffer distance (in zone units) simplify_threshold = 0.01 sqm_2_acres = 0.000247105381 sqft_2_acres = 0.0000229568 # Check if crosswalk file exists if not os.path.isfile(zone_path): logging.error( '\nERROR: The ET zone shapefile doesn\'t exist, exiting\n' ' {}'.format(zone_path)) sys.exit() elif not os.path.isfile(crop_path): logging.error('\nERROR: The crop shapefile doesn\'t exist, exiting\n' ' {}'.format(crop_path)) sys.exit() elif not os.path.isfile(crosswalk_path): logging.error( '\nERROR: The CDL Crosswalk file does not exist, exiting\n' ' Check the filename: {}'.format(crosswalk_path)) sys.exit() # Scratch files scratch_ws = os.path.join(gis_ws, 'scratch') if not os.path.isdir(scratch_ws): os.makedirs(scratch_ws) zone_crop_path = os.path.join(scratch_ws, 'zone_crop.shp') # if os.name == 'posix': # shell_flag = False # else: # shell_flag = True shp_driver = ogr.GetDriverByName('ESRI Shapefile') # Master zonal stats dictionary crop_stats = defaultdict(dict) zone_stats = defaultdict(dict) # Link zones to crops (and crops to zones, but this isn't being used) zone_crops = defaultdict(list) # crop_zones = defaultdict(list) # # Copy the zone_path # if overwrite_flag and _arcpy.exists(et_cells_path): # _arcpy.delete(et_cells_path) # # Just copy the input shapefile # if not _arcpy.exists(et_cells_path): # _arcpy.copy(zone_path, et_cells_path) # Add lat/lon fields logging.info('\nAdding Fields') zone_field_list = _arcpy.list_fields(zone_path) if cell_lat_field not in zone_field_list: logging.debug(' {}'.format(cell_lat_field)) _arcpy.add_field(zone_path, cell_lat_field, ogr.OFTReal) if cell_lon_field not in zone_field_list: logging.debug(' {}'.format(cell_lon_field)) _arcpy.add_field(zone_path, cell_lon_field, ogr.OFTReal) # # Cell ID/name # if cell_id_field not in zone_field_list: # logging.debug(' {}'.format(cell_id_field)) # _arcpy.add_field(zone_path, cell_id_field, ogr.OFTString, width=24) # if cell_name_field not in zone_field_list: # logging.debug(' {}'.format(cell_name_field)) # _arcpy.add_field(zone_path, cell_name_field, ogr.OFTString, # width=48) # Add soil fields if awc_field not in zone_field_list: logging.debug(' {}'.format(awc_field)) _arcpy.add_field(zone_path, awc_field, ogr.OFTReal) if clay_field not in zone_field_list: logging.debug(' {}'.format(clay_field)) _arcpy.add_field(zone_path, clay_field, ogr.OFTReal) if sand_field not in zone_field_list: logging.debug(' {}'.format(sand_field)) _arcpy.add_field(zone_path, sand_field, ogr.OFTReal) if awc_in_ft_field not in zone_field_list: logging.debug(' {}'.format(awc_in_ft_field)) _arcpy.add_field(zone_path, awc_in_ft_field, ogr.OFTReal, width=8, precision=4) if hydgrp_num_field not in zone_field_list: logging.debug(' {}'.format(hydgrp_num_field)) _arcpy.add_field(zone_path, hydgrp_num_field, ogr.OFTInteger) if hydgrp_field not in zone_field_list: logging.debug(' {}'.format(hydgrp_field)) _arcpy.add_field(zone_path, hydgrp_field, ogr.OFTString, width=1) if acreage_field not in zone_field_list: logging.debug(' {}'.format(acreage_field)) _arcpy.add_field(zone_path, acreage_field, ogr.OFTReal) # Crop fields are only added for needed crops below # for crop_num in crop_num_list: # field_name = 'CROP_{0:02d}'.format(crop_num) # if field_name not in zone_field_list: # logging.debug(' {}'.format(field_name)) # _arcpy.add_field(zone_path, field_name, ogr.OFTInteger) # Rebuild the field list zone_field_list = _arcpy.list_fields(zone_path) # Update field width/precision logging.debug('\nUpdating ET zone field width and precision') shp_driver = ogr.GetDriverByName('ESRI Shapefile') input_ds = shp_driver.Open(zone_path, 1) input_lyr = input_ds.GetLayer() input_defn = input_lyr.GetLayerDefn() logging.debug(' {:<10} {:<10} {:<5} {:<9}'.format('Name', 'Type', 'Width', 'Precision')) for i in range(input_defn.GetFieldCount()): fieldDefn = input_defn.GetFieldDefn(i) copyDefn = ogr.FieldDefn(fieldDefn.GetName(), fieldDefn.GetType()) logging.debug(' {:<10s} {:<10s} {:>5d} {:>9d}'.format( fieldDefn.GetName(), fieldDefn.GetFieldTypeName(fieldDefn.GetType()), fieldDefn.GetWidth(), fieldDefn.GetPrecision())) if (fieldDefn.GetFieldTypeName(fieldDefn.GetType()) == 'Real' and fieldDefn.GetWidth() < 24 and fieldDefn.GetPrecision() > 0): copyDefn.SetWidth(24) copyDefn.SetPrecision(15) else: continue input_lyr.AlterFieldDefn(i, copyDefn, (ogr.ALTER_WIDTH_PRECISION_FLAG)) input_ds = None # Calculate lat/lon logging.info('\nCalculating ET zone lat/lon') cell_lat_lon_func(zone_path, cell_lat_field, cell_lon_field) # Load the ET zones shapefile geometries into memory # Build the spatial index in the zone spatial reference logging.debug('\nReading ET zone shapefile features') zone_full_rtree = rtree.index.Index() zone_full_wkt_dict = dict() zone_full_ds = shp_driver.Open(zone_path, 0) zone_full_lyr = zone_full_ds.GetLayer() zone_full_osr = zone_full_lyr.GetSpatialRef() # Check that the ET zones shapefile is in a projected coordinate system if zone_full_osr.IsGeographic(): logging.error('\nERROR: The ET zones shapefile must be in a ' 'projected coordinate system, exiting') sys.exit() zone_full_unit = zone_full_osr.GetLinearUnitsName() if zone_full_unit.upper() not in ['METER', 'METERS', 'METRE']: logging.error( '\nERROR: Unsupported unit type: {}'.format(zone_full_unit)) sys.exit() for zone_ftr in zone_full_lyr: zone_fid = zone_ftr.GetFID() zone_geom = zone_ftr.GetGeometryRef() zone_geom = zone_geom.Buffer(0) zone_extent = gdc.Extent(zone_geom.GetEnvelope()) zone_extent = zone_extent.ogrenv_swap() zone_full_rtree.insert(zone_fid, list(zone_extent)) zone_full_wkt_dict[zone_fid] = zone_geom.ExportToWkt() zone_full_ds = None # DEADBEEF - Commented out for testing # Read the crop shapefile and identify intersecting features logging.debug('\nReading crop shapefile features') crop_dict = defaultdict(dict) crop_ds = shp_driver.Open(crop_path, 0) crop_lyr = crop_ds.GetLayer() crop_osr = crop_lyr.GetSpatialRef() crop_tx = osr.CoordinateTransformation(crop_osr, zone_full_osr) # crop_lyr_name = zones_lyr.GetName() for crop_ftr in crop_lyr: crop_fid = crop_ftr.GetFID() if crop_fid % 100000 == 0 and crop_fid != 0: print('test') logging.info('FID: {}'.format(crop_fid)) crop_geom = crop_ftr.GetGeometryRef() proj_geom = crop_geom.Clone() proj_geom.Transform(crop_tx) proj_geom = proj_geom.Buffer(0) proj_extent = gdc.Extent(proj_geom.GetEnvelope()) proj_extent = proj_extent.ogrenv_swap() zone_fid_list = list(zone_full_rtree.intersection(list(proj_extent))) if not zone_fid_list: continue # Link zones to crops and crops to zones for zone_fid in zone_fid_list: zone_crops[zone_fid].append(crop_fid) # crop_zones[crop_fid] = zone_fid_list crop_dict[crop_fid] = { 'fid': crop_fid, 'wkt': proj_geom.ExportToWkt(), 'value': crop_ftr.GetField(crop_field), } crop_ds = None # Read ET demands crop number crosswalk # Link ET demands crop number (1-84) with input crop values (i.e. CDL) # Key is input crop number, value is crop number, ignore comment # note that crosswalk list cannot contain NaN or empty entries # NaN in etd_no forces data to type float and breaks str/int logic below (line 284) logging.info('\nReading Crop Crosswalk File\n {}'.format(crosswalk_path)) cross_df = pd.read_csv(crosswalk_path) cross_dict = dict() for index, row in cross_df.iterrows(): # cross_dict[int(row.cdl_no)] = list(map(int, str(row.etd_no).split(','))) cross_dict[row.cdl_no] = list(map(int, str(row.etd_no).split(','))) # logging.debug(crop_num_dict) # Build the crop list # Because the spatial index is extent based, # this may include crops that don't intersect the zones. input_crops = sorted(list(set(c['value'] for c in crop_dict.values()))) try: etd_crops = sorted( list( set(x for c in crop_dict.values() for x in cross_dict[c['value']]))) except KeyError as e: logging.error('\nError: Input crop not found in crosswalk file. ' 'Missing Crop: {}\n Exiting.'.format(e)) sys.exit() logging.info('\nInput Crops: {}'.format(', '.join(map(str, input_crops)))) logging.info('Demands Crops: {}'.format(', '.join(map(str, etd_crops)))) # Build the crop clipped ET zones shapefile # The shapefile only needs to be saved if the soils are being masked to # the agricultural areas. It would probably be possibly to avoid saving # and keep the geometries in memory instead. if save_crop_mask_flag: logging.info('\nBuilding crop clipped zone shapefile') if os.path.exists(zone_crop_path): shp_driver.DeleteDataSource(zone_crop_path) zone_crop_ds = shp_driver.CreateDataSource(zone_crop_path) zone_crop_lyr_name = os.path.splitext( os.path.basename(zone_crop_path))[0] zone_crop_lyr = zone_crop_ds.CreateLayer(zone_crop_lyr_name, geom_type=ogr.wkbPolygon) zone_crop_lyr.CreateField(ogr.FieldDefn('ZONE_FID', ogr.OFTInteger)) if soil_crop_mask_flag: zone_crop_rtree = rtree.index.Index() zone_crop_wkt_dict = dict() # Process crops (by zone) and compute area weighted stats # Write clipped zones (if necessary) logging.info('\nComputing crop area/type zonal stats') for zone_fid, crop_fid_list in sorted(zone_crops.items()): # if zone_fid % 1000 == 0 and zone_fid != 0: # logging.info('ZONE FID: {}'.format(zone_fid)) logging.info('ZONE FID: {}'.format(zone_fid)) logging.debug('CROP FID: {}'.format(crop_fid_list)) if not crop_fid_list: logging.debug(' No crop FIDs, skipping zone') continue zone_poly = loads(zone_full_wkt_dict[zone_fid]) zone_crop_polys = [] zone_crop_area = 0 # Initialize zonal stats crop acreages for etd_crop in etd_crops: field = 'CROP_{:02d}'.format(etd_crop) crop_stats[zone_fid][field] = 0 crop_stats[zone_fid][acreage_field] = 0 # Process all intersecting/neighboring crops for crop_fid in crop_fid_list: input_crop_dict = crop_dict[crop_fid] crop_value = input_crop_dict['value'] crop_poly = loads(input_crop_dict['wkt']) clip_poly = zone_poly.intersection(crop_poly) if not clip_poly or clip_poly.is_empty: continue elif not clip_poly.is_valid: logging.error('\nERROR: Invalid clip geometry') input('ENTER') clip_area = clip_poly.area if not clip_area or clip_area <= 0: continue zone_crop_area += clip_area zone_crop_polys.append(clip_poly) for etd_crop in cross_dict[crop_value]: field = 'CROP_{:02d}'.format(etd_crop) crop_stats[zone_fid][field] += clip_area if soil_crop_mask_flag or save_crop_mask_flag: # Combine all polygons/multipolygons into a single multipolygon zone_crop_poly = unary_union(zone_crop_polys)\ .buffer(simplify_threshold).buffer(-simplify_threshold) # .simplify(simplify_threshold, preserve_topology=False)\ # zone_crop_poly = cascaded_union(zone_crop_polys) if zone_crop_poly.is_empty: logging.debug( ' ZONE FID: {} - empty polygon, skipping'.format( zone_fid)) continue if soil_crop_mask_flag: # Save the crop masked zone to memory zone_crop_rtree.insert(zone_fid, list(zone_crop_poly.bounds)) zone_crop_wkt_dict[zone_fid] = zone_crop_poly.wkt if save_crop_mask_flag: # Write the crop masked zone to shapefile zone_ftr = ogr.Feature(zone_crop_lyr.GetLayerDefn()) zone_ftr.SetField('ZONE_FID', zone_fid) zone_ftr.SetGeometry( ogr.CreateGeometryFromWkt(zone_crop_poly.wkt)) zone_crop_lyr.CreateFeature(zone_ftr) zone_ftr = None if save_crop_mask_flag: zone_crop_ds.ExecuteSQL( "RECOMPUTE EXTENT ON {}".format(zone_crop_lyr_name)) zone_crop_ds = None # Write projection/spatial reference to prj file # Format OSR as ESRI WKT prj_osr = zone_full_osr.Clone() prj_osr.MorphToESRI() with open(zone_crop_path.replace('.shp', '.prj'), 'w') as prj_f: prj_f.write(prj_osr.ExportToWkt()) # Rebuild the crop list from the stats crop_field_list = sorted( list( set([ crop_field for zone_crop_dict in crop_stats.values() for crop_field in zone_crop_dict.keys() ]))) logging.info('\nCrop Fields: {}'.format(', '.join(map(str, etd_crops)))) logging.debug('\nAdding crop fields to zones shapefile') for crop_field in crop_field_list: if crop_field not in zone_field_list: logging.debug(' Field: {}'.format(crop_field)) _arcpy.add_field(zone_path, crop_field, ogr.OFTReal) logging.debug('\nConverting crop areas to acres (if needed)') zone_ds = shp_driver.Open(zone_path, 0) zone_lyr = zone_ds.GetLayer() zone_osr = zone_lyr.GetSpatialRef() zone_unit = zone_osr.GetLinearUnitsName() if zone_unit.upper() not in ['METER', 'METERS', 'METRE']: raise ValueError('Unsupported unit type: {}'.format(zone_unit)) for zone_fid, crop_stat_dict in crop_stats.items(): for crop_field, crop_area in crop_stat_dict.items(): if crop_area < 0: continue elif crop_field not in crop_field_list: continue elif zone_unit.upper() in ['METER', 'METERS', 'METRE']: crop_stats[zone_fid][crop_field] = crop_area * sqm_2_acres # elif zone_unit in ['Feet']: # crop_stats[zone_fid][crop_field] = crop_area * sqft_2_acres # Compute total crop acreage per zone crop_stats[zone_fid][acreage_field] = sum( crop_stats[zone_fid].values()) logging.debug('\nWriting crop zonal stats to zones shapefile') _arcpy.update_cursor(zone_path, crop_stats) # NOTE - Defining here to avoid passing zone_stats as an input def zonal_stats(input_path, input_field, zone_wkt_dict, zone_rtree): logging.debug('\nComputing {} zonal stats'.format(input_field)) total_dict = dict() area_dict = dict() # Read the soil shapefile and identify intersecting features input_ds = shp_driver.Open(input_path, 0) input_lyr = input_ds.GetLayer() input_osr = input_lyr.GetSpatialRef() input_tx = osr.CoordinateTransformation(input_osr, zone_osr) for input_ftr in input_lyr: # input_fid = input_ftr.GetFID() input_value = input_ftr.GetField(input_field) # added .Buffer(0) to clean up soil .shp intersecting geom 8/19/20 input_geom = input_ftr.GetGeometryRef().Buffer(0) # input_geom = input_ftr.GetGeometryRef() proj_geom = input_geom.Clone() proj_geom.Transform(input_tx) input_poly = loads(proj_geom.ExportToWkt()) proj_extent = gdc.Extent(proj_geom.GetEnvelope()) proj_extent = proj_extent.ogrenv_swap() zone_fid_list = list(zone_rtree.intersection(list(proj_extent))) if not zone_fid_list: continue # Process all intersecting/neighboring features for zone_fid in zone_fid_list: try: zone_poly = loads(zone_wkt_dict[zone_fid]) except KeyError: continue clip_poly = zone_poly.intersection(input_poly) if not clip_poly or clip_poly.is_empty: continue elif not clip_poly.is_valid: logging.error('\nERROR: Invalid clip geometry') input('ENTER') elif not clip_poly.area or clip_poly.area <= 0: continue if zone_fid not in total_dict.keys(): total_dict[zone_fid] = 0 area_dict[zone_fid] = 0 if clip_poly.area > 0: total_dict[zone_fid] += input_value * clip_poly.area area_dict[zone_fid] += clip_poly.area input_ds = None # Compute area weighted values and save to master zonal stats dict for zone_fid, zone_total in total_dict.items(): zone_stats[zone_fid][ input_field] = zone_total / area_dict[zone_fid] if soil_crop_mask_flag: # # Load the crop masked zone shapefile # logging.debug('\nReading zone crop mask shapefile features into # memory') # zone_crop_rtree = rtree.index.Index() # zone_crop_wkt_dict = dict() # zone_crop_ds = shp_driver.Open(zone_crop_path, 0) # zone_crop_lyr = zone_crop_ds.GetLayer() # for zone_crop_ftr in zone_crop_lyr: # zone_crop_fid = zone_crop_ftr.GetFID() # zone_fid = zone_crop_ftr.GetField('ZONE_FID') # zone_crop_geom = zone_crop_ftr.GetGeometryRef() # if not zone_crop_geom: # continue # zone_crop_geom = zone_crop_geom.Buffer(0) # zone_crop_extent = gdc.Extent(zone_crop_geom.GetEnvelope()) # zone_crop_extent = zone_crop_extent.ogrenv_swap() # zone_crop_rtree.insert(zone_crop_fid, list(zone_crop_extent)) # zone_crop_wkt_dict[zone_fid] = zone_crop_geom.ExportToWkt() # zone_crop_ds = None # Compute soil zonal stats for the crop masked ET zones # Process files separately even though geometries are probably the same zonal_stats(awc_path, awc_field, zone_crop_wkt_dict, zone_crop_rtree) zonal_stats(clay_path, clay_field, zone_crop_wkt_dict, zone_crop_rtree) zonal_stats(sand_path, sand_field, zone_crop_wkt_dict, zone_crop_rtree) else: # Compute soil zonal stats for the full ET zones # Process files separately even though geometries are probably the same zonal_stats(awc_path, awc_field, zone_full_wkt_dict, zone_full_rtree) zonal_stats(clay_path, clay_field, zone_full_wkt_dict, zone_full_rtree) zonal_stats(sand_path, sand_field, zone_full_wkt_dict, zone_full_rtree) logging.debug('\nWriting soil zonal stats to zones shapefile') _arcpy.update_cursor(zone_path, zone_stats) # Calculate AWC in in/feet logging.info('Calculating AWC in in/ft') _arcpy.calculate_field(zone_path, awc_in_ft_field, '!{}! * 12'.format(awc_field)) # Calculate hydrologic group logging.info('Calculating hydrologic group') fields = (clay_field, sand_field, hydgrp_num_field, hydgrp_field) values = _arcpy.search_cursor(zone_path, fields) for fid, row in values.items(): if row[sand_field] > 50: values[fid][hydgrp_num_field], values[fid][hydgrp_field] = 1, 'A' elif row[clay_field] > 40: values[fid][hydgrp_num_field], values[fid][hydgrp_field] = 3, 'C' else: values[fid][hydgrp_num_field], values[fid][hydgrp_field] = 2, 'B' _arcpy.update_cursor(zone_path, values)
def main(ini_path, zone_type='gridmet', overwrite_flag=False): """Interpolate Preliminary Calibration Zones to All Zones Args: ini_path (str): file path of the project INI file zone_type (str): Zone type (huc8, huc10, county, gridmet) overwrite_flag (bool): If True (default), overwrite existing files Returns: None """ logging.info('\nInterpolating Calibration Data from Subset Point Data') # INI path crop_et_sec = 'CROP_ET' config = util.read_ini(ini_path, section=crop_et_sec) try: project_ws = config.get(crop_et_sec, 'project_folder') except: logging.error('project_folder parameter must be set in the INI file, ' 'exiting') return False try: gis_ws = config.get(crop_et_sec, 'gis_folder') except: logging.error('gis_folder parameter must be set in the INI file, ' 'exiting') return False try: et_cells_path = config.get(crop_et_sec, 'cells_path') except: logging.error('et_cells_path parameter must be set in the INI file, ' 'exiting') return False try: calibration_ws = config.get(crop_et_sec, 'spatial_cal_folder') except: calibration_ws = os.path.join(project_ws, 'calibration') # Sub folder names static_ws = os.path.join(project_ws, 'static') crop_params_path = os.path.join(static_ws, 'CropParams.txt') crop_et_ws = config.get(crop_et_sec, 'crop_et_folder') bin_ws = os.path.join(crop_et_ws, 'bin') # Check input folders if not os.path.exists(calibration_ws): logging.critical('ERROR: The calibration folder does not exist. ' 'Run build_spatial_crop_params_arcpy.py, exiting') sys.exit() # Check input folders if not os.path.isdir(project_ws): logging.critical('ERROR: The project folder does not exist' '\n {}'.format(project_ws)) sys.exit() elif not os.path.isdir(gis_ws): logging.critical('ERROR: The GIS folder does not exist' '\n {}'.format(gis_ws)) sys.exit() logging.info('\nGIS Workspace: {}'.format(gis_ws)) # Check input zone type (GRIDMET ONLY FOR NOW!!!!) if zone_type == 'gridmet': station_zone_field = 'GRIDMET_ID' station_id_field = 'GRIDMET_ID' # DEADBEEF - Added for testing elif zone_type == 'huc8': station_zone_field = 'HUC8' station_id_field = 'STATION_ID' else: logging.error( '\nFUNCTION ONLY SUPPORTS GRIDMET ZONE TYPE AT THIS TIME') sys.exit() arcpy.env.overwriteOutput = overwrite_flag arcpy.CheckOutExtension('Spatial') cells_dd_path = os.path.join(gis_ws, 'ETCells_dd.shp') cells_ras_path = os.path.join(gis_ws, 'ETCells_ras.img') arcpy.Project_management(et_cells_path, cells_dd_path, arcpy.SpatialReference('WGS 1984')) temp_path = os.path.join(calibration_ws, 'temp') if not os.path.exists(temp_path): os.makedirs(temp_path) temp_pt_file = os.path.join(temp_path, 'temp_pt_file.shp') # Read crop parameters using ET Demands functions/methods logging.info('\nReading Default Crop Parameters') sys.path.append(bin_ws) import crop_parameters crop_param_dict = crop_parameters.read_crop_parameters(crop_params_path) # Get list of crops specified in ET cells crop_field_list = [ field.name for field in arcpy.ListFields(et_cells_path) if re.match('CROP_\d{2}', field.name) ] logging.debug('Cell crop fields: {}'.format(', '.join(crop_field_list))) crop_number_list = [ int(f_name.split('_')[1]) for f_name in crop_field_list ] crop_number_list = [crop_num for crop_num in crop_number_list] logging.info('Cell crop numbers: {}'.format(', '.join( list(util.ranges(crop_number_list))))) # Get Crop Names for each Crop in crop_number_list crop_name_list = [] logging.debug('\nBuilding crop name list') for crop_num in crop_number_list: try: crop_param = crop_param_dict[crop_num] except: continue # logging.info('{:>2d} {}'.format(crop_num, crop_param.name)) logging.debug('{}'.format(crop_param)) # Replace other characters with spaces, then remove multiple spaces crop_name = re.sub('[-"().,/~]', ' ', str(crop_param.name).lower()) crop_name = ' '.join(crop_name.strip().split()).replace(' ', '_') crop_name_list.append(crop_name) # Set arcpy environmental parameters arcpy.env.extent = cells_dd_path arcpy.env.outputCoordinateSystem = cells_dd_path # Convert cells_dd to cells_ras # (0.041666667 taken from GEE GRIDMET tiff) HARDCODED FOR NOW arcpy.FeatureToRaster_conversion(cells_dd_path, station_id_field, cells_ras_path, 0.041666667) # Location of preliminary calibration .shp files (ADD AS INPUT ARG?) prelim_calibration_ws = os.path.join(calibration_ws, 'preliminary_calibration') logging.info('\nInterpolating calibration parameters') for crop_num, crop_name in zip(crop_number_list, crop_name_list): # Preliminary calibration .shp subset_cal_file = os.path.join( prelim_calibration_ws, 'crop_{0:02d}_{1}{2}'.format(crop_num, crop_name, '.shp')) final_cal_file = os.path.join( calibration_ws, 'crop_{0:02d}_{1}{2}'.format(crop_num, crop_name, '.shp')) if not arcpy.Exists(subset_cal_file): logging.info( '\nCrop No: {} preliminary calibration file not found. ' 'Skipping.'.format(crop_num)) continue logging.info('\nInterpolating Crop: {:02d}'.format(crop_num)) # Polygon to Point arcpy.FeatureToPoint_management(subset_cal_file, temp_pt_file, "CENTROID") # Change Processing Extent to match final calibration file # arcpy.env.extent = cells_dd_path # arcpy.env.outputCoordinateSystem = cells_dd_path arcpy.env.snapRaster = cells_ras_path cell_size = arcpy.Raster(cells_ras_path).meanCellHeight # Params to Interpolate # Full list # param_list = ['MAD_Init', 'MAD_Mid', 'T30_CGDD', # 'PL_GU_Date', 'CGDD_Tbase', 'CGDD_EFC', # 'CGDD_Term', 'Time_EFC', 'Time_Harv', 'KillFrostC'] # Short list param_list = ['T30_CGDD', 'CGDD_EFC', 'CGDD_TERM', 'KillFrostC'] # Create final pt file based on cells raster for ExtractMultiValuesToPoints final_pt_path = os.path.join(temp_path, 'final_pt.shp') arcpy.RasterToPoint_conversion(cells_ras_path, final_pt_path, 'VALUE') # Empty list to fill with idw raster paths ras_list = [] for param in param_list: outIDW_ras = arcpy.sa.Idw(temp_pt_file, param, cell_size) outIDW_ras_path = os.path.join(temp_path, '{}{}'.format(param, '.img')) outIDW_ras.save(outIDW_ras_path) ras_list.append(outIDW_ras_path) # Extract all idw raster values to point .shp arcpy.sa.ExtractMultiValuesToPoints(final_pt_path, ras_list, 'NONE') # Read Interpolated Point Attribute table into dictionary ('GRID_CODE' is key) # https://gist.github.com/tonjadwyer/0e4162b1423c404dc2a50188c3b3c2f5 def make_attribute_dict(fc, key_field, attr_list=['*']): attdict = {} fc_field_objects = arcpy.ListFields(fc) fc_fields = [ field.name for field in fc_field_objects if field.type != 'Geometry' ] if attr_list == ['*']: valid_fields = fc_fields else: valid_fields = [ field for field in attr_list if field in fc_fields ] # Ensure that key_field is always the first field in the field list cursor_fields = [key_field ] + list(set(valid_fields) - set([key_field])) with arcpy.da.SearchCursor(fc, cursor_fields) as cursor: for row in cursor: attdict[row[0]] = dict(zip(cursor.fields, row)) return attdict cal_dict = make_attribute_dict(final_pt_path, 'GRID_CODE', param_list) # Overwrite values in calibration .shp with values from interpolated dictionary fields = ['CELL_ID'] + param_list with arcpy.da.UpdateCursor(final_cal_file, fields) as cursor: for row in cursor: for param_i, param in enumerate(param_list): row[param_i + 1] = round( cal_dict[int(row[0])][fields[param_i + 1]], 1) cursor.updateRow(row)
def main(ini_path, area_threshold=10, dairy_cuttings=5, beef_cuttings=4, crop_str='', overwrite_flag=False): """Build a feature class for each crop and set default crop parameters Apply the values in the CropParams.txt as defaults to every cell Parameters ---------- ini_path : str File path of the parameter INI file. area_threshold : float CDL area threshold [acres]. dairy_cuttings : int Initial number of dairy hay cuttings. beef_cuttings : int Initial number of beef hay cuttings. crop_str : str Comma separated list or range of crops to compare (no spaces, ex: 1,2,4-6) overwrite_flag : bool If True, overwrite existing output rasters. Returns ------- None """ logging.info('\nCalculating ET-Demands Spatial Crop Parameters') remove_empty_flag = True # Input paths # DEADBEEF - For now, get cropET folder from INI file # This function may eventually be moved into the main cropET code crop_et_sec = 'CROP_ET' config = util.read_ini(ini_path, section=crop_et_sec) try: project_ws = config.get(crop_et_sec, 'project_folder') except: logging.error('project_folder parameter must be set in the INI file, ' 'exiting') return False try: gis_ws = config.get(crop_et_sec, 'gis_folder') except: logging.error('gis_folder parameter must be set in the INI file, ' 'exiting') return False try: cells_path = config.get(crop_et_sec, 'cells_path') except: # cells_path = os.path.join(gis_ws, 'ETCells.shp') logging.error('et_cells_path parameter must be set in the INI file, ' 'exiting') return False try: stations_path = config.get(crop_et_sec, 'stations_path') except: logging.error('stations_path parameter must be set in the INI file, ' 'exiting') return False try: crop_params_name = config.get(crop_et_sec, 'crop_params_name') except: logging.error( 'crop_params_name parameter must be set in the INI file, ' 'exiting') return False crop_et_ws = config.get(crop_et_sec, 'crop_et_folder') bin_ws = os.path.join(crop_et_ws, 'bin') try: calibration_ws = config.get(crop_et_sec, 'spatial_cal_folder') except: calibration_ws = os.path.join(project_ws, 'calibration') # Sub folder names static_ws = os.path.join(project_ws, 'static') crop_params_path = os.path.join(static_ws, crop_params_name) # ET cells field names cell_id_field = 'CELL_ID' cell_name_field = 'CELL_NAME' crop_acres_field = 'CROP_ACRES' # Only keep the following ET Cell fields keep_field_list = [cell_id_field, cell_name_field, 'AG_ACRES'] # keep_field_list = ['CELL_ID', 'STATION_ID', 'HUC8', 'HUC10', 'GRIDMET_ID', # 'COUNTYNAME', 'AG_ACRES'] # keep_field_list = ['FIPS', 'COUNTYNAME'] # Check input folders if not os.path.isdir(crop_et_ws): logging.error('\nERROR: The INI cropET folder does not exist' '\n {}'.format(crop_et_ws)) sys.exit() elif not os.path.isdir(bin_ws): logging.error('\nERROR: The bin workspace does not exist' '\n {}'.format(bin_ws)) sys.exit() elif not os.path.isdir(project_ws): logging.error('\nERROR: The project folder does not exist' '\n {}'.format(project_ws)) sys.exit() elif not os.path.isdir(gis_ws): logging.error('\nERROR: The GIS folder does not exist' '\n {}'.format(gis_ws)) sys.exit() if '.gdb' not in calibration_ws and not os.path.isdir(calibration_ws): os.makedirs(calibration_ws) logging.info('\nGIS Workspace: {}'.format(gis_ws)) logging.info('Project Workspace: {}'.format(project_ws)) logging.info('CropET Workspace: {}'.format(crop_et_ws)) logging.info('Bin Workspace: {}'.format(bin_ws)) logging.info('Calib. Workspace: {}'.format(calibration_ws)) # Check input files if not os.path.isfile(crop_params_path): logging.error('\nERROR: The crop parameters file does not exist' '\n {}'.format(crop_params_path)) sys.exit() elif not os.path.isfile(cells_path): logging.error('\nERROR: The ET Cell shapefile does not exist' '\n {}'.format(cells_path)) sys.exit() elif not os.path.isfile(stations_path): logging.error('\nERROR: The weather station shapefile does not exist' '\n {}'.format(stations_path)) sys.exit() logging.debug('Crop Params Path: {}'.format(crop_params_path)) logging.debug('ET Cells Path: {}'.format(cells_path)) logging.debug('Stations Path: {}'.format(stations_path)) # For now, only allow calibration parameters in separate shapefiles ext = '.shp' # # Build output geodatabase if necessary # if calibration_ws.endswith('.gdb'): # logging.debug('GDB Path: {}'.format(calibration_ws)) # ext = '' # _arcpy.exists(calibration_ws) and overwrite_flag: # try: _arcpy.delete(calibration_ws) # except: pass # if calibration_ws is not None and not _arcpy.exists(calibration_ws): # arcpy.CreateFileGDB_management( # os.path.dirname(calibration_ws), # os.path.basename(calibration_ws)) # else: # ext = '.shp' # Field Name, Property, Field Type # Property is the string of the CropParameter class property value # It will be used to access the property using getattr dairy_cutting_field = 'Dairy_Cut' beef_cutting_field = 'Beef_Cut' param_list = [ # ['Name', 'name', ogr.OFTString], # ['ClassNum', 'class_number', ogr.OFTInteger], # ['IsAnnual', 'is_annual', 'SHORT'], # ['IrrigFlag', 'irrigation_flag', 'SHORT'], # ['IrrigDays', 'days_after_planting_irrigation', ogr.OFTInteger], # ['Crop_FW', 'crop_fw', ogr.OFTInteger], # ['WinterCov', 'winter_surface_cover_class', 'SHORT'], # ['CropKcMax', 'kc_max', ogr.OFTReal], ['MAD_Init', 'mad_initial', ogr.OFTInteger], ['MAD_Mid', 'mad_midseason', ogr.OFTInteger], # ['RootDepIni', 'rooting_depth_initial', ogr.OFTReal], # ['RootDepMax', 'rooting_depth_max', ogr.OFTReal], # ['EndRootGrw', 'end_of_root_growth_fraction_time', ogr.OFTReal], # ['HeightInit', 'height_initial', ogr.OFTReal], # ['HeightMax', 'height_max', ogr.OFTReal], # ['CurveNum', 'curve_number', ogr.OFTInteger], # ['CurveName', 'curve_name', ogr.OFTString], # ['CurveType', 'curve_type', 'SHORT'], # ['PL_GU_Flag', 'flag_for_means_to_estimate_pl_or_gu', 'SHORT'], ['T30_CGDD', 't30_for_pl_or_gu_or_cgdd', ogr.OFTReal], ['PL_GU_Date', 'date_of_pl_or_gu', ogr.OFTReal], ['CGDD_Tbase', 'tbase', ogr.OFTReal], ['CGDD_EFC', 'cgdd_for_efc', ogr.OFTInteger], ['CGDD_Term', 'cgdd_for_termination', ogr.OFTInteger], ['Time_EFC', 'time_for_efc', ogr.OFTInteger], ['Time_Harv', 'time_for_harvest', ogr.OFTInteger], ['KillFrostC', 'killing_frost_temperature', ogr.OFTReal], # ['InvokeStrs', 'invoke_stress', 'SHORT'], # ['CN_Coarse', 'cn_coarse_soil', ogr.OFTInteger], # ['CN_Medium', 'cn_medium_soil', ogr.OFTInteger], # ['CN_Fine', 'cn_fine_soil', ogr.OFTInteger] ] # if calibration_ws.endswith('.gdb'): # dairy_cutting_field = 'Dairy_Cuttings' # beef_cutting_field = 'Beef_Cuttings' # param_list = [ # # ['Name', 'name', 'STRING'], # # ['Class_Number', 'class_number', ogr.OFTInteger], # # ['Is_Annual', 'is_annual', 'SHORT'], # # ['Irrigation_Flag', 'irrigation_flag', 'SHORT'], # # ['Irrigation_Days', 'days_after_planting_irrigation', ogr.OFTInteger], # # ['Crop_FW', 'crop_fw', ogr.OFTInteger], # # ['Winter_Cover_Class', 'winter_surface_cover_class', 'SHORT'], # # ['Crop_Kc_Max', 'kc_max', ogr.OFTReal], # # ['MAD_Initial', 'mad_initial', ogr.OFTInteger], # # ['MAD_Midseason', 'mad_midseason', ogr.OFTInteger], # # ['Root_Depth_Ini', 'rooting_depth_initial', ogr.OFTReal], # # ['Root_Depth_Max', 'rooting_depth_max', ogr.OFTReal], # # ['End_Root_Growth', 'end_of_root_growth_fraction_time', ogr.OFTReal], # # ['Height_Initial', 'height_initial', ogr.OFTReal], # # ['Height_Maximum', 'height_max', ogr.OFTReal], # # ['Curve_Number', 'curve_number', ogr.OFTInteger], # # ['Curve_Name', 'curve_name', ogr.OFTString], # # ['Curve_Type', 'curve_type', 'SHORT'], # # ['PL_GU_Flag', 'flag_for_means_to_estimate_pl_or_gu', 'SHORT'], # ['T30_CGDD', 't30_for_pl_or_gu_or_cgdd', ogr.OFTReal], # ['PL_GU_Date', 'date_of_pl_or_gu', ogr.OFTReal], # ['CGDD_Tbase', 'tbase', ogr.OFTReal], # ['CGDD_EFC', 'cgdd_for_efc', ogr.OFTInteger], # ['CGDD_Termination', 'cgdd_for_termination', ogr.OFTInteger], # ['Time_EFC', 'time_for_efc', ogr.OFTInteger], # ['Time_Harvest', 'time_for_harvest', ogr.OFTInteger], # ['Killing_Crost_C', 'killing_frost_temperature', ogr.OFTReal], # # ['Invoke_Stress', 'invoke_stress', 'SHORT'], # # ['CN_Coarse_Soil', 'cn_coarse_soil', ogr.OFTInteger], # # ['CN_Medium_Soil', 'cn_medium_soil', ogr.OFTInteger], # # ['CN_Fine_Soil', 'cn_fine_soil', ogr.OFTInteger] # ] crop_add_list = [] if crop_str: try: crop_add_list = sorted(list(util.parse_int_set(crop_str))) # try: # crop_test_list = sorted(list(set( # crop_test_list + list(util.parse_int_set(crop_str))) except: pass # Don't build crop parameter files for non-crops crop_skip_list = sorted(list(set([44, 45, 46, 55, 56, 57]))) # crop_test_list = sorted(list(set(crop_test_list + [46]))) logging.info('\ncrop_add_list = {}'.format(crop_add_list)) # Read crop parameters using ET Demands functions/methods logging.info('\nReading default crop parameters') sys.path.append(bin_ws) import crop_parameters crop_param_dict = crop_parameters.read_crop_parameters(crop_params_path) # Get list of crops specified in ET cells # Currently this may only be crops with CDL acreage crop_field_list = sorted([ field for field in _arcpy.list_fields(cells_path) if re.match('CROP_\d{2}', field) ]) crop_number_list = [int(f.split('_')[-1]) for f in crop_field_list] logging.info('Cell crop numbers: {}'.format(', '.join( list(util.ranges(crop_number_list))))) logging.debug('Cell crop fields: {}'.format(', '.join(crop_field_list))) # Get crop acreages for each cell # DEADBEEF - Does this dict need to be keyed by crop then cell_id? # Could it be changed to cell_id, crop or fid, crop to make it easier to # write to the shapefile using update_cursor()? crop_acreage_dict = defaultdict(dict) field_list = [cell_id_field] + crop_field_list for fid, row in _arcpy.search_cursor(cells_path, field_list).items(): for crop_field, crop_num in zip(crop_field_list, crop_number_list): if crop_skip_list and crop_num in crop_skip_list: continue elif crop_num in crop_add_list: crop_acreage_dict[crop_num][row[cell_id_field]] = 0 elif row[crop_field]: crop_acreage_dict[crop_num][ row[cell_id_field]] = row[crop_field] else: crop_acreage_dict[crop_num][row[cell_id_field]] = 0 crop_number_list = sorted(list(set(crop_number_list) | set(crop_add_list))) # Make an empty template crop feature class logging.info('') crop_template_path = os.path.join(calibration_ws, 'crop_00_template' + ext) if overwrite_flag and _arcpy.exists(crop_template_path): logging.debug('Overwriting template crop feature class') _arcpy.delete(crop_template_path) if _arcpy.exists(crop_template_path): logging.info('Template crop feature class already exists, skipping') else: logging.info('Building template crop feature class') _arcpy.copy(cells_path, crop_template_path) # Remove unneeded et cell fields for field in _arcpy.list_fields(crop_template_path): # if (field not in keep_field_list and # field.editable and not field.required): if field not in keep_field_list: logging.debug(' Delete field: {}'.format(field)) _arcpy.delete_field(crop_template_path, field) field_list = _arcpy.list_fields(crop_template_path) # Add crop acreage field if crop_acres_field not in field_list: logging.debug(' Add field: {}'.format(crop_acres_field)) _arcpy.add_field(crop_template_path, crop_acres_field, ogr.OFTReal) _arcpy.calculate_field(crop_template_path, crop_acres_field, '0') # Add crop parameter fields if necessary for param_field, param_method, param_type in param_list: logging.debug(' Add field: {}'.format(param_field)) if param_field not in field_list: _arcpy.add_field(crop_template_path, param_field, param_type) # if dairy_cutting_field not in field_list: # logging.debug(' Add field: {}'.format(dairy_cutting_field)) # _arcpy.add_field(crop_template_path, dairy_cutting_field, # ogr.OFTInteger) # _arcpy.calculate_field(crop_template_path, dairy_cutting_field, # dairy_cuttings) # if beef_cutting_field not in field_list: # logging.debug(' Add field: {}'.format(beef_cutting_field)) # _arcpy.add_field(crop_template_path, beef_cutting_field, # ogr.OFTInteger) # _arcpy.calculate_field(crop_template_path, beef_cutting_field, # beef_cuttings) # Add an empty/zero crop field for the field mappings below # if 'CROP_EMPTY' not in _arcpy.list_fields(cells_path): # _arcpy.add_field(cells_path, 'CROP_EMPTY', ogr.OFTReal) # _arcpy.calculate_field(cells_path, 'CROP_EMPTY', '0') # Process each crop logging.info('\nBuilding crop feature classes') for crop_num in crop_number_list: try: crop_param = crop_param_dict[crop_num] except: continue logging.info('{:>2d} {}'.format(crop_num, crop_param.name)) logging.debug('{}'.format(crop_param)) # Replace other characters with spaces, then remove multiple spaces crop_name = re.sub('[-"().,/~]', ' ', str(crop_param.name).lower()) crop_name = ' '.join(crop_name.strip().split()).replace(' ', '_') crop_path = os.path.join( calibration_ws, 'crop_{0:02d}_{1}{2}'.format(crop_num, crop_name, ext)) # crop_field = 'CROP_{:02d}'.format(crop_num) # Don't check crops in add list if crop_num in crop_add_list: pass # Skip if all zone crop areas are below threshold elif all( [v < area_threshold for v in crop_acreage_dict[crop_num].values()]): logging.info('** Skipping Crop {}, All crop acreages below' ' threshold'.format(crop_num)) continue # Remove existing shapefiles if necessary if overwrite_flag and _arcpy.exists(crop_path): logging.debug(' Overwriting: {}'.format( os.path.basename(crop_path))) _arcpy.delete(crop_path) # Don't check skip list until after existing files are removed # if ((crop_test_list and crop_num not in crop_test_list) or # _skip_list and crop_num in crop_skip_list)): # .debug(' Skipping') # Copy ET cells for each crop if needed if _arcpy.exists(crop_path): logging.debug(' Shapefile already exists, skipping') continue else: # logging.debug(' {}'.format(crop_path)) _arcpy.copy(crop_template_path, crop_path) # Remove extra fields # for field in _arcpy.list_fields(crop_path): # if field not in keep_field_list: # # logging.debug(' {}'.format(field)) # _arcpy.delete_field(crop_path, field) # Add alfalfa cutting field if crop_num in [1, 2, 3, 4]: if dairy_cutting_field not in _arcpy.list_fields(crop_path): logging.debug(' Add field: {}'.format(dairy_cutting_field)) _arcpy.add_field(crop_path, dairy_cutting_field, ogr.OFTInteger) _arcpy.calculate_field(crop_path, dairy_cutting_field, str(dairy_cuttings)) if beef_cutting_field not in _arcpy.list_fields(crop_path): logging.debug(' Add field: {}'.format(beef_cutting_field)) _arcpy.add_field(crop_path, beef_cutting_field, ogr.OFTInteger) _arcpy.calculate_field(crop_path, beef_cutting_field, str(beef_cuttings)) # Write default crop parameters to file # Note: Couldn't use _arcpy.udpate_cursor directly since the # crop_acreage_dict is keyed by crop_num then by cell_id (not FID first) input_driver = _arcpy.get_ogr_driver(crop_path) input_ds = input_driver.Open(crop_path, 1) input_lyr = input_ds.GetLayer() for input_ftr in input_lyr: cell_id = input_ftr.GetField( input_ftr.GetFieldIndex(cell_id_field)) # Don't remove zero acreage crops if in add list if crop_num in crop_add_list: pass # Skip and/or remove zones without crop acreage elif crop_acreage_dict[crop_num][cell_id] < area_threshold: if remove_empty_flag: input_lyr.DeleteFeature(input_ftr.GetFID()) continue # Write parameter values for param_field, param_method, param_type in param_list: input_ftr.SetField(input_ftr.GetFieldIndex(param_field), getattr(crop_param, param_method)) # Write crop acreage if crop_num not in crop_add_list: input_ftr.SetField(input_ftr.GetFieldIndex(crop_acres_field), crop_acreage_dict[crop_num][cell_id]) input_lyr.SetFeature(input_ftr) input_ds = None
def main(ini_path, zone_type='huc8', area_threshold=10, dairy_cuttings=5, beef_cuttings=4, crop_str='', remove_empty_flag=True, overwrite_flag=False): """Build a feature class for each crop and set default crop parameters Apply the values in the CropParams.txt as defaults to every cell Args: ini_path (str): file path of the project INI file zone_type (str): Zone type (huc8, huc10, county, gridmet) area_threshold (float): CDL area threshold [acres] dairy_cuttings (int): Initial number of dairy hay cuttings beef_cuttings (int): Initial number of beef hay cuttings crop_str (str): comma separated list or range of crops to compare overwrite_flag (bool): If True, overwrite existing output rasters Returns: None """ logging.info('\nCalculating ET-Demands Spatial Crop Parameters') remove_empty_flag = True # Input paths # DEADBEEF - For now, get cropET folder from INI file # This function may eventually be moved into the main cropET code crop_et_sec = 'CROP_ET' config = util.read_ini(ini_path, section=crop_et_sec) try: project_ws = config.get(crop_et_sec, 'project_folder') except: logging.error('project_folder parameter must be set in the INI file, ' 'exiting') return False try: gis_ws = config.get(crop_et_sec, 'gis_folder') except: logging.error('gis_folder parameter must be set in the INI file, ' 'exiting') return False try: cells_path = config.get(crop_et_sec, 'cells_path') except: # cells_path = os.path.join(gis_ws, 'ETCells.shp') logging.error('et_cells_path parameter must be set in the INI file, ' 'exiting') return False try: stations_path = config.get(crop_et_sec, 'stations_path') except: logging.error('stations_path parameter must be set in the INI file, ' 'exiting') return False crop_et_ws = config.get(crop_et_sec, 'crop_et_folder') bin_ws = os.path.join(crop_et_ws, 'bin') try: calibration_ws = config.get(crop_et_sec, 'spatial_cal_folder') except: calibration_ws = os.path.join(project_ws, 'calibration') # Sub folder names static_ws = os.path.join(project_ws, 'static') # pmdata_ws = os.path.join(project_ws, 'pmdata') crop_params_path = os.path.join(static_ws, 'CropParams.txt') # ET cells field names cell_id_field = 'CELL_ID' cell_name_field = 'CELL_NAME' crop_acres_field = 'CROP_ACRES' # Only keep the following ET Cell fields keep_field_list = [cell_id_field, cell_name_field, 'AG_ACRES'] # keep_field_list = ['CELL_ID', 'STATION_ID', 'HUC8', 'HUC10', 'GRIDMET_ID, # 'COUNTYNAME', 'AG_ACRES'] # keep_field_list = ['FIPS', 'COUNTYNAME'] # Check input folders if not os.path.isdir(crop_et_ws): logging.error('\nERROR: The INI cropET folder does not exist' '\n {}'.format(crop_et_ws)) sys.exit() elif not os.path.isdir(bin_ws): logging.error('\nERROR: The bin workspace does not exist' '\n {}'.format(bin_ws)) sys.exit() elif not os.path.isdir(project_ws): logging.error('\nERROR: The project folder does not exist' '\n {}'.format(project_ws)) sys.exit() elif not os.path.isdir(gis_ws): logging.error('\nERROR: The GIS folder does not exist' '\n {}'.format(gis_ws)) sys.exit() if '.gdb' not in calibration_ws and not os.path.isdir(calibration_ws): os.makedirs(calibration_ws) logging.info('\nGIS Workspace: {}'.format(gis_ws)) logging.info('Project Workspace: {}'.format(project_ws)) logging.info('CropET Workspace: {}'.format(crop_et_ws)) logging.info('Bin Workspace: {}'.format(bin_ws)) logging.info('Calib. Workspace: {}'.format(calibration_ws)) # Check input files if not os.path.isfile(crop_params_path): logging.error('\nERROR: The crop parameters file does not exist' '\n {}'.format(crop_params_path)) sys.exit() elif not os.path.isfile(cells_path): logging.error('\nERROR: The ET Cell shapefile does not exist' '\n {}'.format(cells_path)) sys.exit() elif not os.path.isfile(stations_path) or not arcpy.Exists(stations_path): logging.error('\nERROR: The weather station shapefile does not exist' '\n {}'.format(stations_path)) sys.exit() logging.debug('Crop Params Path: {}'.format(crop_params_path)) logging.debug('ET Cells Path: {}'.format(cells_path)) logging.debug('Stations Path: {}'.format(stations_path)) # For now, only allow calibration parameters in separate shapefiles ext = '.shp' # # Build output geodatabase if necessary # if calibration_ws.endswith('.gdb'): # logging.debug('GDB Path: {}'.format(calibration_ws)) # ext = '' # if arcpy.Exists(calibration_ws) and overwrite_flag: # try: arcpy.Delete_management(calibration_ws) # except: pass # if calibration_ws is not None and not arcpy.Exists(calibration_ws): # arcpy.CreateFileGDB_management( # os.path.dirname(calibration_ws), # os.path.basename(calibration_ws)) # else: # ext = '.shp' # Field Name, Property, Field Type # Property is the string of the CropParameter class property value # It will be used to access the property using getattr dairy_cutting_field = 'Dairy_Cut' beef_cutting_field = 'Beef_Cut' param_list = [ # ['Name', 'name', 'STRING'], # ['ClassNum', 'class_number', 'LONG'], # ['IsAnnual', 'is_annual', 'SHORT'], # ['IrrigFlag', 'irrigation_flag', 'SHORT'], # ['IrrigDays', 'days_after_planting_irrigation', 'LONG'], # ['Crop_FW', 'crop_fw', 'LONG'], # ['WinterCov', 'winter_surface_cover_class', 'SHORT'], # ['CropKcMax', 'kc_max', 'FLOAT'], ['MAD_Init', 'mad_initial', 'LONG'], ['MAD_Mid', 'mad_midseason', 'LONG'], # ['RootDepIni', 'rooting_depth_initial', 'FLOAT'], # ['RootDepMax', 'rooting_depth_max', 'FLOAT'], # ['EndRootGrw', 'end_of_root_growth_fraction_time', 'FLOAT'], # ['HeightInit', 'height_initial', 'FLOAT'], # ['HeightMax', 'height_max', 'FLOAT'], # ['CurveNum', 'curve_number', 'LONG'], # ['CurveName', 'curve_name', 'STRING'], # ['CurveType', 'curve_type', 'SHORT'], # ['PL_GU_Flag', 'flag_for_means_to_estimate_pl_or_gu', 'SHORT'], ['T30_CGDD', 't30_for_pl_or_gu_or_cgdd', 'FLOAT'], ['PL_GU_Date', 'date_of_pl_or_gu', 'FLOAT'], ['CGDD_Tbase', 'tbase', 'FLOAT'], ['CGDD_EFC', 'cgdd_for_efc', 'LONG'], ['CGDD_Term', 'cgdd_for_termination', 'LONG'], ['Time_EFC', 'time_for_efc', 'LONG'], ['Time_Harv', 'time_for_harvest', 'LONG'], ['KillFrostC', 'killing_frost_temperature', 'FLOAT'], # ['InvokeStrs', 'invoke_stress', 'SHORT'], # ['CN_Coarse', 'cn_coarse_soil', 'LONG'], # ['CN_Medium', 'cn_medium_soil', 'LONG'], # ['CN_Fine', 'cn_fine_soil', 'LONG'] ] # if calibration_ws.endswith('.gdb'): # dairy_cutting_field = 'Dairy_Cuttings' # beef_cutting_field = 'Beef_Cuttings' # param_list = [ # # ['Name', 'name', 'STRING'], # # ['Class_Number', 'class_number', 'LONG'], # # ['Is_Annual', 'is_annual', 'SHORT'], # # ['Irrigation_Flag', 'irrigation_flag', 'SHORT'], # # ['Irrigation_Days', 'days_after_planting_irrigation', 'LONG'], # # ['Crop_FW', 'crop_fw', 'LONG'], # # ['Winter_Cover_Class', 'winter_surface_cover_class', 'SHORT'], # # ['Crop_Kc_Max', 'kc_max', 'FLOAT'], # # ['MAD_Initial', 'mad_initial', 'LONG'], # # ['MAD_Midseason', 'mad_midseason', 'LONG'], # # ['Root_Depth_Ini', 'rooting_depth_initial', 'FLOAT'], # # ['Root_Depth_Max', 'rooting_depth_max', 'FLOAT'], # # ['End_Root_Growth', 'end_of_root_growth_fraction_time', 'FLOAT'], # # ['Height_Initial', 'height_initial', 'FLOAT'], # # ['Height_Maximum', 'height_max', 'FLOAT'], # # ['Curve_Number', 'curve_number', 'LONG'], # # ['Curve_Name', 'curve_name', 'STRING'], # # ['Curve_Type', 'curve_type', 'SHORT'], # # ['PL_GU_Flag', 'flag_for_means_to_estimate_pl_or_gu', 'SHORT'], # ['T30_CGDD', 't30_for_pl_or_gu_or_cgdd', 'FLOAT'], # ['PL_GU_Date', 'date_of_pl_or_gu', 'FLOAT'], # ['CGDD_Tbase', 'tbase', 'FLOAT'], # ['CGDD_EFC', 'cgdd_for_efc', 'LONG'], # ['CGDD_Termination', 'cgdd_for_termination', 'LONG'], # ['Time_EFC', 'time_for_efc', 'LONG'], # ['Time_Harvest', 'time_for_harvest', 'LONG'], # ['Killing_Crost_C', 'killing_frost_temperature', 'FLOAT'], # # ['Invoke_Stress', 'invoke_stress', 'SHORT'], # # ['CN_Coarse_Soil', 'cn_coarse_soil', 'LONG'], # # ['CN_Medium_Soil', 'cn_medium_soil', 'LONG'], # # ['CN_Fine_Soil', 'cn_fine_soil', 'LONG'] # ] crop_add_list = [] if crop_str: try: crop_add_list = sorted(list(util.parse_int_set(crop_str))) # try: # crop_test_list = sorted(list(set( # crop_test_list + list(util.parse_int_set(crop_str))) except: pass # Don't build crop parameter files for non-crops crop_skip_list = sorted(list(set([44, 45, 46, 55, 56, 57]))) # crop_test_list = sorted(list(set(crop_test_list + [46]))) logging.info('\ncrop_add_list = {}'.format(crop_add_list)) # Read crop parameters using ET Demands functions/methods logging.info('\nReading default crop parameters') sys.path.append(bin_ws) import crop_parameters crop_param_dict = crop_parameters.read_crop_parameters(crop_params_path) # arcpy.CheckOutExtension('Spatial') # arcpy.env.pyramid = 'NONE 0' arcpy.env.overwriteOutput = overwrite_flag arcpy.env.parallelProcessingFactor = 8 # Get list of crops specified in ET cells # Currently this may only be crops with CDL acreage crop_field_list = [ field.name for field in arcpy.ListFields(cells_path) if re.match('CROP_\d{2}', field.name)] logging.debug('Cell crop fields: {}'.format(', '.join(crop_field_list))) crop_number_list = [ int(f_name.split('_')[-1]) for f_name in crop_field_list] crop_number_list = [ crop_num for crop_num in crop_number_list if not (crop_skip_list and crop_num in crop_skip_list)] logging.info('Cell crop numbers: {}'.format( ', '.join(list(util.ranges(crop_number_list))))) # Get crop acreages for each cell crop_acreage_dict = defaultdict(dict) field_list = [cell_id_field] + crop_field_list with arcpy.da.SearchCursor(cells_path, field_list) as cursor: for row in cursor: for i, crop_num in enumerate(crop_number_list): # logging.info('{} {}'.format(crop_num, i)) if crop_num in crop_add_list: crop_acreage_dict[crop_num][row[0]] = 0 else: crop_acreage_dict[crop_num][row[0]] = row[i + 1] crop_number_list = sorted(list(set(crop_number_list) | set(crop_add_list))) # Make an empty template crop feature class logging.info('') crop_template_path = os.path.join( calibration_ws, 'crop_00_template' + ext) if overwrite_flag and arcpy.Exists(crop_template_path): logging.debug('Overwriting template crop feature class') arcpy.Delete_management(crop_template_path) if arcpy.Exists(crop_template_path): logging.info('Template crop feature class already exists, skipping') else: logging.info('Building template crop feature class') arcpy.CopyFeatures_management(cells_path, crop_template_path) # Remove unneeded et cell fields for field in arcpy.ListFields(crop_template_path): if (field.name not in keep_field_list and field.editable and not field.required): logging.debug(' Delete field: {}'.format(field.name)) arcpy.DeleteField_management(crop_template_path, field.name) field_list = [f.name for f in arcpy.ListFields(crop_template_path)] # Add crop acreage field if crop_acres_field not in field_list: logging.debug(' Add field: {}'.format(crop_acres_field)) arcpy.AddField_management( crop_template_path, crop_acres_field, 'Float') arcpy.CalculateField_management( crop_template_path, crop_acres_field, '0', 'PYTHON_9.3') # Add crop parameter fields if necessary for param_field, param_method, param_type in param_list: logging.debug(' Add field: {}'.format(param_field)) if param_field not in field_list: arcpy.AddField_management( crop_template_path, param_field, param_type) # if dairy_cutting_field not in field_list: # logging.debug(' Add field: {}'.format(dairy_cutting_field)) # arcpy.AddField_management(crop_template_path, dairy_cutting_field, 'Short') # arcpy.CalculateField_management( # crop_template_path, dairy_cutting_field, dairy_cuttings, 'PYTHON') # if beef_cutting_field not in field_list: # logging.debug(' Add field: {}'.format(beef_cutting_field)) # arcpy.AddField_management(crop_template_path, beef_cutting_field, 'Short') # arcpy.CalculateField_management( # crop_template_path, beef_cutting_field, beef_cuttings, 'PYTHON') # Add an empty/zero crop field for the field mappings below # if len(arcpy.ListFields(cells_path, 'CROP_EMPTY')) == 0: # arcpy.AddField_management(cells_path, 'CROP_EMPTY', 'Float') # arcpy.CalculateField_management( # cells_path, 'CROP_EMPTY', '0', 'PYTHON_9.3') # Process each crop logging.info('\nBuilding crop feature classes') for crop_num in crop_number_list: try: crop_param = crop_param_dict[crop_num] except: continue logging.info('{:>2d} {}'.format(crop_num, crop_param.name)) logging.debug('{}'.format(crop_param)) # Replace other characters with spaces, then remove multiple spaces crop_name = re.sub('[-"().,/~]', ' ', str(crop_param.name).lower()) crop_name = ' '.join(crop_name.strip().split()).replace(' ', '_') crop_path = os.path.join(calibration_ws, 'crop_{0:02d}_{1}{2}'.format( crop_num, crop_name, ext)) # crop_field = 'CROP_{:02d}'.format(crop_num) # Don't check crops in add list if crop_num in crop_add_list: pass # Skip if all zone crop areas are below threshold elif all([v < area_threshold for v in crop_acreage_dict[crop_num].values()]): logging.info(' All crop acreaeges below threshold, skipping crop') continue # Remove existing shapefiles if necessary if overwrite_flag and arcpy.Exists(crop_path): logging.debug(' Overwriting: {}'.format( os.path.basename(crop_path))) arcpy.Delete_management(crop_path) # Don't check skip list until after existing files are removed # if ((crop_test_list and crop_num not in crop_test_list) or # _skip_list and crop_num in crop_skip_list)): # .debug(' Skipping') # Copy ET cells for each crop if needed if arcpy.Exists(crop_path): logging.debug(' Shapefile already exists, skipping') continue else: # logging.debug(' {}'.format(crop_path)) arcpy.Copy_management(crop_template_path, crop_path) # Remove extra fields # for field in arcpy.ListFields(crop_path): # if field.name not in keep_field_list: # # logging.debug(' {}'.format(field.name)) # arcpy.DeleteField_management(crop_path, field.name) # Add alfalfa cutting field if crop_num in [1, 2, 3, 4]: if len(arcpy.ListFields(crop_path, dairy_cutting_field)) == 0: logging.debug(' Add field: {}'.format(dairy_cutting_field)) arcpy.AddField_management( crop_path, dairy_cutting_field, 'Short') arcpy.CalculateField_management( crop_path, dairy_cutting_field, dairy_cuttings, 'PYTHON') if len(arcpy.ListFields(crop_path, beef_cutting_field)) == 0: logging.debug(' Add field: {}'.format(beef_cutting_field)) arcpy.AddField_management( crop_path, beef_cutting_field, 'Short') arcpy.CalculateField_management( crop_path, beef_cutting_field, beef_cuttings, 'PYTHON') # Write default crop parameters to file field_list = [p[0] for p in param_list] + [cell_id_field, crop_acres_field] with arcpy.da.UpdateCursor(crop_path, field_list) as cursor: for row in cursor: # Don't remove zero acreage crops if in add list if crop_num in crop_add_list: pass # Skip and/or remove zones without crop acreage elif crop_acreage_dict[crop_num][row[-2]] < area_threshold: if remove_empty_flag: cursor.deleteRow() continue # Write parameter values for i, (param_field, param_method, param_type) in enumerate(param_list): row[i] = getattr(crop_param, param_method) # Write crop acreage if crop_num not in crop_add_list: row[-1] = crop_acreage_dict[crop_num][row[-2]] cursor.updateRow(row)