def daymet_parameters( config_path, data_name='PPT', overwrite_flag=False, debug_flag=False, ): """Calculate GSFLOW DAYMET Parameters Args: config_file: Project config file path data_name (str): DAYMET data type (ALL, PPT, TMAX, TMIN, etc.) ovewrite_flag (bool): if True, overwrite existing files debug_flag (bool): if True, enable debug level logging Returns: None """ # Initialize hru_parameters class hru = support.HRUParameters(config_path) # Open input parameter config file inputs_cfg = ConfigParser.ConfigParser() try: inputs_cfg.readfp(open(config_path)) except Exception as e: logging.error('\nERROR: Config file could not be read, ' 'is not an input file, or does not exist\n' ' config_file = {}\n' ' Exception: {}\n'.format(config_path, e)) sys.exit() # Log DEBUG to file log_file_name = 'daymet_normals_log.txt' log_console = logging.FileHandler(filename=os.path.join( hru.log_ws, log_file_name), mode='w') log_console.setLevel(logging.DEBUG) log_console.setFormatter(logging.Formatter('%(message)s')) logging.getLogger('').addHandler(log_console) logging.info('\nGSFLOW DAYMET Parameters') # DAYMET daymet_ws = inputs_cfg.get('INPUTS', 'daymet_folder') daymet_proj_method = inputs_cfg.get('INPUTS', 'prism_projection_method') daymet_cs = inputs_cfg.getint('INPUTS', 'prism_cellsize') calc_jh_coef_flag = inputs_cfg.getboolean('INPUTS', 'calc_prism_jh_coef_flag') # Check input paths if not arcpy.Exists(hru.polygon_path): logging.error('\nERROR: Fishnet ({}) does not exist'.format( hru.polygon_path)) sys.exit() # Check that DAYMET folder is valid if not os.path.isdir(daymet_ws): logging.error( '\nERROR: DAYMET folder ({}) does not exist'.format(daymet_ws)) sys.exit() proj_method_list = ['BILINEAR', 'CUBIC', 'NEAREST'] if daymet_proj_method.upper() not in proj_method_list: logging.error('\nERROR: DAYMET projection method must be: {}'.format( ', '.join(proj_method_list))) sys.exit() logging.debug(' Projection method: {}'.format( daymet_proj_method.upper())) # Check other inputs if daymet_cs <= 0: logging.error('\nERROR: DAYMET cellsize must be greater than 0\n') sys.exit() # Set ArcGIS environment variables arcpy.CheckOutExtension('Spatial') env.overwriteOutput = True env.pyramid = 'PYRAMIDS 0' env.workspace = hru.param_ws env.scratchWorkspace = hru.scratch_ws # DAYMET data names if data_name == 'ALL': data_name_list = ['PPT', 'TMAX', 'TMIN'] else: data_name_list = [data_name] # Set month list month_list = ['{:02d}'.format(m) for m in range(1, 13)] # month_list.extend(['annual']) # Check fields logging.info('\nAdding DAYMET fields if necessary') for data_name in data_name_list: for month in month_list: support.add_field_func(hru.polygon_path, '{}_{}'.format(data_name, month), 'DOUBLE') # Process each DAYMET data type logging.info('\nProjecting/clipping DAYMET mean monthly rasters') for data_name in data_name_list: logging.info('\n{}'.format(data_name)) daymet_normal_re = re.compile( 'daymet_(?P<type>%s)_30yr_normal_(?P<month>\d{2}).img$' % data_name, re.IGNORECASE) # Search all files & subfolders in DAYMET folder # for images that match data type input_raster_dict = dict() for root, dirs, files in os.walk(daymet_ws): for file_name in files: daymet_normal_match = daymet_normal_re.match(file_name) if daymet_normal_match: month_str = daymet_normal_match.group('month') input_raster_dict[month_str] = os.path.join( daymet_ws, root, file_name) if not input_raster_dict: logging.error( ('\nERROR: No DAYMET rasters were found matching the ' + 'following pattern:\n {}\n\n').format( daymet_normal_re.pattern)) logging.error() sys.exit() # DAYMET input data workspace # input_ws = os.path.join(daymet_ws, data_name.lower()) # if not os.path.isdir(input_ws): # logging.error('\nERROR: The DAYMET {} folder does not exist'.format( # data_name.lower())) # sys.exit() # DAYMET output data workspace output_ws = os.path.join(hru.param_ws, data_name.lower() + '_rasters') if not os.path.isdir(output_ws): os.mkdir(output_ws) # Remove all non year/month rasters in DAYMET temp folder logging.info(' Removing existing DAYMET files') for item in os.listdir(output_ws): if daymet_normal_re.match(item): os.remove(os.path.join(output_ws, item)) # Extract, project/resample, clip # Process images by month zs_daymet_dict = dict() # env.extent = hru.extent for month in month_list: logging.info(' Month: {}'.format(month)) # Projected/clipped DAYMET raster input_raster = input_raster_dict[month] # input_name = 'daymet_{}_30yr_normal_800mM2_{}_bil.bil'.format( # data_name.lower(), input_month) # input_raster = os.path.join(input_ws, input_name) output_name = 'daymet_{}_normal_{}.img'.format( data_name.lower(), month) output_raster = os.path.join(output_ws, output_name) # Set preferred transforms input_sr = arcpy.sa.Raster(input_raster).spatialReference transform_str = support.transform_func(hru.sr, input_sr) if transform_str: logging.debug(' Transform: {}'.format(transform_str)) # Project DAYMET rasters to HRU coordinate system # DEADBEEF - Arc10.2 ProjectRaster does not extent support.project_raster_func(input_raster, output_raster, hru.sr, daymet_proj_method.upper(), daymet_cs, transform_str, '{} {}'.format(hru.ref_x, hru.ref_y), input_sr, hru) # arcpy.ProjectRaster_management( # input_raster, output_raster, hru.sr, # daymet_proj_method.upper(), daymet_cs, transform_str, # '{} {}'.format(hru.ref_x, hru.ref_y), # input_sr) # Save parameters for calculating zonal stats zs_field = '{}_{}'.format(data_name, month) zs_daymet_dict[zs_field] = [output_raster, 'MEAN'] # Cleanup del input_raster, output_raster, output_name del input_sr, transform_str, zs_field # Cleanup # arcpy.ClearEnvironment('extent') # Calculate zonal statistics logging.info('\nCalculating DAYMET zonal statistics') support.zonal_stats_func(zs_daymet_dict, hru.polygon_path, hru.point_path, hru) del zs_daymet_dict
def prism_4km_parameters( config_path, data_name='ALL', overwrite_flag=False, debug_flag=False, ): """Calculate GSFLOW PRISM Parameters Args: config_file (str): Project config file path data_name -- the prism data type (ALL, PPT, TMAX, TMIN, etc.) ovewrite_flag (bool): if True, overwrite existing files debug_flag (bool): if True, enable debug level logging Returns: None """ # Initialize hru_parameters class hru = support.HRUParameters(config_path) # Open input parameter config file inputs_cfg = ConfigParser.ConfigParser() try: inputs_cfg.readfp(open(config_path)) except Exception as e: logging.error('\nERROR: Config file could not be read, ' 'is not an input file, or does not exist\n' ' config_file = {}\n' ' Exception: {}\n'.format(config_path, e)) sys.exit() # Log DEBUG to file log_file_name = 'prism_4km_normals_log.txt' log_console = logging.FileHandler(filename=os.path.join( hru.log_ws, log_file_name), mode='w') log_console.setLevel(logging.DEBUG) log_console.setFormatter(logging.Formatter('%(message)s')) logging.getLogger('').addHandler(log_console) logging.info('\nGSFLOW PRISM Parameters') # PRISM prism_ws = inputs_cfg.get('INPUTS', 'prism_folder') prism_proj_method = inputs_cfg.get('INPUTS', 'prism_projection_method') prism_cs = inputs_cfg.getint('INPUTS', 'prism_cellsize') calc_jh_coef_flag = inputs_cfg.getboolean('INPUTS', 'calc_prism_jh_coef_flag') if calc_jh_coef_flag: # DEADBEEF - This could/should be moved to support_functions.py since it is # in this script and in both PRISM scripts. # DEM Units dem_units = inputs_cfg.get('INPUTS', 'dem_units').lower() dem_unit_types = { 'meters': 'meter', 'm': 'meter', 'meter': 'meter', 'feet': 'feet', 'ft': 'meter', 'foot': 'meter', } try: dem_units = dem_unit_types[dem_units] except: logging.error( '\nERROR: DEM unit "{}" is not supported\n'.format(dem_units)) sys.exit() # Many expressions are hardcoded to units of feet # If dem_units are in meters, scale DEM_ADJ to get to feet if dem_units == 'meter': dem_unit_scalar = 0.3048 else: dem_unit_scalar = 1.0 # Check input paths if not arcpy.Exists(hru.polygon_path): logging.error('\nERROR: Fishnet ({}) does not exist'.format( hru.polygon_path)) sys.exit() # Check that PRISM folder is valid if not os.path.isdir(prism_ws): logging.error( '\nERROR: PRISM folder ({}) does not exist'.format(prism_ws)) sys.exit() proj_method_list = ['BILINEAR', 'CUBIC', 'NEAREST'] if prism_proj_method.upper() not in proj_method_list: logging.error('\nERROR: PRISM projection method must be: {}'.format( ', '.join(proj_method_list))) sys.exit() logging.debug(' Projection method: {}'.format( prism_proj_method.upper())) # Check other inputs if prism_cs <= 0: logging.error('\nERROR: PRISM cellsize must be greater than 0\n') sys.exit() # Set ArcGIS environment variables arcpy.CheckOutExtension('Spatial') env.overwriteOutput = True env.pyramid = 'PYRAMIDS 0' env.workspace = hru.param_ws env.scratchWorkspace = hru.scratch_ws # PRISM data names if data_name == 'ALL': data_name_list = ['PPT', 'TMAX', 'TMIN'] else: data_name_list = [data_name] # Set month list month_list = ['{0:02d}'.format(m) for m in range(1, 13)] # month_list.extend(['annual']) # Check fields logging.info('\nAdding PRISM fields if necessary') for data_name in data_name_list: for month in month_list: support.add_field_func(hru.polygon_path, '{}_{}'.format(data_name, month), 'DOUBLE') # Process each PRISM data type logging.info('\nProjecting/clipping PRISM mean monthly rasters') for data_name in data_name_list: logging.info('\n{}'.format(data_name)) prism_normal_re = re.compile( 'PRISM_(?P<type>%s)_30yr_normal_4kmM2_(?P<month>\d{2})_bil.bil$' % data_name, re.IGNORECASE) # Search all files & subfolders in prism folder # for images that match data type input_raster_dict = dict() for root, dirs, files in os.walk(prism_ws): for file_name in files: prism_normal_match = prism_normal_re.match(file_name) if prism_normal_match: month_str = prism_normal_match.group('month') input_raster_dict[month_str] = os.path.join( root, file_name) if not input_raster_dict: logging.error( '\nERROR: No PRISM rasters were found matching the ' 'following pattern:\n {}\n\nDouble check that the script ' 'and folder are for the same resolution ' '(800m vs 4km)\n\n'.format(prism_normal_re.pattern)) logging.error() sys.exit() # PRISM input data workspace # input_ws = os.path.join(prism_ws, data_name.lower()) # if not os.path.isdir(input_ws): # logging.error('\nERROR: The PRISM {} folder does not exist'.format( # data_name.lower())) # sys.exit() # PRISM output data workspace output_ws = os.path.join(hru.param_ws, data_name.lower() + '_rasters') if not os.path.isdir(output_ws): os.mkdir(output_ws) # Remove all non year/month rasters in PRISM temp folder logging.info(' Removing existing PRISM files') for item in os.listdir(output_ws): # if prism_normal_re.match(item) and overwrite_flag: if prism_normal_re.match(item): os.remove(os.path.join(output_ws, item)) # Extract, project/resample, clip # Process images by month zs_prism_dict = dict() # env.extent = hru.extent for month in month_list: logging.info(' Month: {}'.format(month)) # Projected/clipped PRISM raster input_raster = input_raster_dict[month] # input_name = 'PRISM_{}_30yr_normal_4kmM2_{1}_bil.bil'.format( # data_name.lower(), input_month) # input_raster = os.path.join(input_ws, input_name) output_name = 'PRISM_{}_30yr_normal_4kmM2_{}.img'.format( data_name.lower(), month) output_raster = os.path.join(output_ws, output_name) # Set preferred transforms input_sr = arcpy.sa.Raster(input_raster).spatialReference transform_str = support.transform_func(hru.sr, input_sr) if transform_str: logging.debug(' Transform: {}'.format(transform_str)) # Project PRISM rasters to HRU coordinate system # DEADBEEF - Arc10.2 ProjectRaster does not extent support.project_raster_func(input_raster, output_raster, hru.sr, prism_proj_method.upper(), prism_cs, transform_str, '{} {}'.format(hru.ref_x, hru.ref_y), input_sr, hru) # arcpy.ProjectRaster_management( # input_raster, output_raster, hru.sr, # prism_proj_method.upper(), prism_cs, transform_str, # '{} {}'.format(hru.ref_x, hru.ref_y), # input_sr) # Save parameters for calculating zonal stats zs_field = '{}_{}'.format(data_name, month) zs_prism_dict[zs_field] = [output_raster, 'MEAN'] # Cleanup del input_raster, output_raster, output_name del input_sr, transform_str, zs_field # Cleanup # arcpy.ClearEnvironment('extent') # Calculate zonal statistics logging.info('\nCalculating PRISM zonal statistics') support.zonal_stats_func(zs_prism_dict, hru.polygon_path, hru.point_path, hru) del zs_prism_dict # Jensen-Haise Potential ET air temperature coefficient # Update Jensen-Haise PET estimate using PRISM air temperature # DEADBEEF - First need to figure out month with highest Tmax # Then get Tmin for same month if calc_jh_coef_flag: logging.info('\nRe-Calculating JH_COEF_HRU') logging.info(' Using PRISM temperature values') tmax_field_list = ['!TMAX_{:02d}!'.format(m) for m in range(1, 13)] tmin_field_list = ['!TMIN_{:02d}!'.format(m) for m in range(1, 13)] tmax_expr = 'max([{}])'.format(','.join(tmax_field_list)) arcpy.CalculateField_management(hru.polygon_path, hru.jh_tmax_field, tmax_expr, 'PYTHON') # Sort TMAX and get TMIN for same month tmin_expr = 'max(zip([{}],[{}]))[1]'.format(','.join(tmax_field_list), ','.join(tmin_field_list)) arcpy.CalculateField_management(hru.polygon_path, hru.jh_tmin_field, tmin_expr, 'PYTHON') # Pass unit scalar to convert DEM_ADJ to feet if necessary support.jensen_haise_func(hru.polygon_path, hru.jh_coef_field, hru.dem_adj_field, hru.jh_tmin_field, hru.jh_tmax_field, dem_unit_scalar)
def impervious_parameters(config_path, overwrite_flag=False, debug_flag=False): """Calculate GSFLOW Impervious Parameters Args: config_file (str): Project config file path ovewrite_flag (bool): if True, overwrite existing files debug_flag (bool): if True, enable debug level logging Returns: None """ # Initialize hru_parameters class hru = support.HRUParameters(config_path) # Open input parameter config file inputs_cfg = ConfigParser.ConfigParser() try: inputs_cfg.readfp(open(config_path)) except Exception as e: logging.error('\nERROR: Config file could not be read, ' 'is not an input file, or does not exist\n' ' config_file = {}\n' ' Exception: {}\n'.format(config_path, e)) sys.exit() # Log DEBUG to file log_file_name = 'impervious_parameters_log.txt' log_console = logging.FileHandler(filename=os.path.join( hru.log_ws, log_file_name), mode='w') log_console.setLevel(logging.DEBUG) log_console.setFormatter(logging.Formatter('%(message)s')) logging.getLogger('').addHandler(log_console) logging.info('\nGSFLOW Impervious Parameters') # imperv_orig_path = inputs_cfg.get('INPUTS', 'impervious_orig_path') # imperv_proj_method = inputs_cfg.get('INPUTS', 'impervious_projection_method') imperv_proj_method = 'NEAREST' imperv_cs = inputs_cfg.getint('INPUTS', 'impervious_cellsize') imperv_pct_flag = inputs_cfg.getboolean('INPUTS', 'impervious_pct_flag') # Check input paths if not arcpy.Exists(hru.polygon_path): logging.error('\nERROR: Fishnet ({}) does not exist'.format( hru.polygon_path)) sys.exit() # Impervious raster must exist if not arcpy.Exists(imperv_orig_path): logging.error('\nERROR: Impervious raster does not exist') sys.exit() # Check other inputs if imperv_cs <= 0: logging.error('\nERROR: soil cellsize must be greater than 0') sys.exit() imperv_proj_method_list = ['BILINEAR', 'CUBIC', 'NEAREST'] if imperv_proj_method.upper() not in imperv_proj_method_list: logging.error( '\nERROR: Impervious projection method must be: {}'.format( ', '.join(imperv_proj_method_list))) sys.exit() # Build output folder if necessary imperv_temp_ws = os.path.join(hru.param_ws, 'impervious_rasters') if not os.path.isdir(imperv_temp_ws): os.mkdir(imperv_temp_ws) # Output paths imperv_path = os.path.join(imperv_temp_ws, 'impervious_cover.img') # Set ArcGIS environment variables arcpy.CheckOutExtension('Spatial') env.overwriteOutput = True env.pyramid = 'PYRAMIDS -1' # env.pyramid = 'PYRAMIDS 0' env.workspace = imperv_temp_ws env.scratchWorkspace = hru.scratch_ws # Check field logging.info('\nAdding impervious fields if necessary') support.add_field_func(hru.polygon_path, hru.imperv_pct_field, 'DOUBLE') # add_field_func(hru.polygon_path, hru.carea_min_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.carea_max_field, 'DOUBLE') # Available Water Capacity (AWC) logging.info('\nProjecting/clipping impervious cover raster') imperv_orig_sr = arcpy.sa.Raster(imperv_orig_path).spatialReference logging.debug(' Impervious GCS: {}'.format(imperv_orig_sr.GCS.name)) # Remove existing projected raster if arcpy.Exists(imperv_path): arcpy.Delete_management(imperv_path) # Set preferred transforms transform_str = support.transform_func(hru.sr, imperv_orig_sr) logging.debug(' Transform: {}'.format(transform_str)) logging.debug(' Projection method: NEAREST') # Project impervious raster # DEADBEEF - Arc10.2 ProjectRaster does not extent # env.extent = hru.extent support.project_raster_func(imperv_orig_path, imperv_path, hru.sr, imperv_proj_method, imperv_cs, transform_str, '{} {}'.format(hru.ref_x, hru.ref_y), imperv_orig_sr, hru) # arcpy.ProjectRaster_management( # imperv_orig_path, imperv_path, hru.sr, # imperv_proj_method, imperv_cs, transform_str, # '{} {}'.format(hru.ref_x, hru.ref_y), # imperv_orig_sr) # arcpy.ClearEnvironment('extent') # List of rasters, fields, and stats for zonal statistics zs_imperv_dict = dict() zs_imperv_dict[hru.imperv_pct_field] = [imperv_path, 'MEAN'] # zs_imperv_dict[hru.carea_min_field] = [imperv_path, 'MEAN'] # zs_imperv_dict[hru.carea_max_field] = [imperv_path, 'MEAN'] # Calculate zonal statistics logging.info('\nCalculating zonal statistics') support.zonal_stats_func(zs_imperv_dict, hru.polygon_path, hru.point_path, hru) # Calculate CAREA_MIN / CAREA_MAX logging.info('\nCalculating CAREA_MIN / CAREA_MAX') if imperv_pct_flag: arcpy.CalculateField_management( hru.polygon_path, hru.imperv_pct_field, '0.01 * !{}!'.format(hru.imperv_pct_field), 'PYTHON') # arcpy.CalculateField_management( # hru.polygon_path, hru.carea_min_field, # '0.01 * !{}!'.format(hru.imperv_pct_field), 'PYTHON') arcpy.CalculateField_management( hru.polygon_path, hru.carea_max_field, '0.01 * !{}!'.format(hru.imperv_pct_field), 'PYTHON') else: # arcpy.CalculateField_management( # hru.polygon_path, hru.carea_min_field, # '!{}!'.format(hru.imperv_pct_field), 'PYTHON') arcpy.CalculateField_management(hru.polygon_path, hru.carea_max_field, '!{}!'.format(hru.imperv_pct_field), 'PYTHON')
def veg_parameters(config_path): """Calculate GSFLOW Vegetation Parameters Parameters ---------- config_path : str Project configuration file (.ini) path. Returns ------- None """ # Initialize hru_parameters class hru = support.HRUParameters(config_path) # Open input parameter config file inputs_cfg = ConfigParser.ConfigParser() try: inputs_cfg.readfp(open(config_path)) except Exception as e: logging.error('\nERROR: Config file could not be read, ' 'is not an input file, or does not exist\n' ' config_file = {}\n' ' Exception: {}\n'.format(config_path, e)) sys.exit() # Log DEBUG to file log_file_name = 'veg_parameters_log.txt' log_console = logging.FileHandler(filename=os.path.join( hru.log_ws, log_file_name), mode='w') log_console.setLevel(logging.DEBUG) log_console.setFormatter(logging.Formatter('%(message)s')) logging.getLogger('').addHandler(log_console) logging.info('\nGSFLOW Vegetation Parameters') # Landfire Vegetation Type veg_type_orig_path = inputs_cfg.get('INPUTS', 'veg_type_orig_path') veg_type_cs = inputs_cfg.getint('INPUTS', 'veg_type_cellsize') try: veg_type_field = inputs_cfg.get('INPUTS', 'veg_type_field') except ConfigParser.NoOptionError: veg_type_field = None logging.info(' Missing INI parameter, setting {} = {}'.format( 'veg_type_field', veg_type_field)) # Landfire Vegetation Cover veg_cover_orig_path = inputs_cfg.get('INPUTS', 'veg_cover_orig_path') veg_cover_cs = inputs_cfg.getint('INPUTS', 'veg_cover_cellsize') # Remap remap_ws = inputs_cfg.get('INPUTS', 'remap_folder') cov_type_remap_name = inputs_cfg.get('INPUTS', 'cov_type_remap') covden_sum_remap_name = inputs_cfg.get('INPUTS', 'covden_sum_remap') covden_win_remap_name = inputs_cfg.get('INPUTS', 'covden_win_remap') snow_intcp_remap_name = inputs_cfg.get('INPUTS', 'snow_intcp_remap') srain_intcp_remap_name = inputs_cfg.get('INPUTS', 'srain_intcp_remap') wrain_intcp_remap_name = inputs_cfg.get('INPUTS', 'wrain_intcp_remap') root_depth_remap_name = inputs_cfg.get('INPUTS', 'root_depth_remap') # Get remap conversion factors try: snow_intcp_remap_factor = inputs_cfg.getfloat( 'INPUTS', 'snow_intcp_remap_factor') except ConfigParser.NoOptionError: snow_intcp_remap_factor = 0.01 logging.info(' Missing INI parameter, setting {} = {}'.format( 'snow_intcp_remap_factor', snow_intcp_remap_factor)) try: wrain_intcp_remap_factor = inputs_cfg.getfloat( 'INPUTS', 'wrain_intcp_remap_factor') except ConfigParser.NoOptionError: wrain_intcp_remap_factor = 0.01 logging.info(' Missing INI parameter, setting {} = {}'.format( 'wrain_intcp_remap_factor', wrain_intcp_remap_factor)) try: srain_intcp_remap_factor = inputs_cfg.getfloat( 'INPUTS', 'srain_intcp_remap_factor') except ConfigParser.NoOptionError: srain_intcp_remap_factor = 0.01 logging.info(' Missing INI parameter, setting {} = {}'.format( 'srain_intcp_remap_factor', srain_intcp_remap_factor)) # Check input paths if not arcpy.Exists(hru.polygon_path): logging.error('\nERROR: Fishnet ({}) does not exist'.format( hru.polygon_path)) sys.exit() # Check that either the original vegetation raster exist if not arcpy.Exists(veg_cover_orig_path): logging.error('\nERROR: Vegetation cover raster does not exist') sys.exit() if not arcpy.Exists(veg_type_orig_path): logging.error('\nERROR: Vegetation type raster does not exist') sys.exit() # Vegetation cover can be set from another field in the raster # This is mostly for US_120EVT if not veg_type_field: logging.info('\n Using VALUE field to set vegetation type') veg_type_field = 'VALUE' elif len(arcpy.ListFields(veg_type_orig_path, veg_type_field)) == 0: logging.info(' veg_type_field {} does not exist\n Using VALUE ' 'field to set vegetation type'.format(veg_type_field)) veg_type_field = 'VALUE' elif arcpy.ListFields(veg_type_orig_path, veg_type_field)[0].type not in [ 'Integer', 'SmallInteger' ]: logging.info( ' veg_type_field {} is not an integer type\n Using VALUE ' 'field to set vegetation type'.format(veg_type_field)) veg_type_field = 'VALUE' # Check that remap folder is valid if not os.path.isdir(remap_ws): logging.error('\nERROR: Remap folder does not exist') sys.exit() # Check that remap files exist # Check remap files comment style cov_type_remap_path = os.path.join(remap_ws, cov_type_remap_name) covden_sum_remap_path = os.path.join(remap_ws, covden_sum_remap_name) covden_win_remap_path = os.path.join(remap_ws, covden_win_remap_name) snow_intcp_remap_path = os.path.join(remap_ws, snow_intcp_remap_name) srain_intcp_remap_path = os.path.join(remap_ws, srain_intcp_remap_name) wrain_intcp_remap_path = os.path.join(remap_ws, wrain_intcp_remap_name) root_depth_remap_path = os.path.join(remap_ws, root_depth_remap_name) remap_path_list = [ cov_type_remap_path, covden_sum_remap_path, covden_win_remap_path, snow_intcp_remap_path, srain_intcp_remap_path, wrain_intcp_remap_path, root_depth_remap_path ] for remap_path in remap_path_list: support.remap_check(remap_path) # Check other inputs if veg_type_cs <= 0: logging.error('\nERROR: Veg. type cellsize must be greater than 0') sys.exit() if veg_cover_cs <= 0: logging.error('\nERROR: Veg. cover cellsize must be greater than 0') sys.exit() # Build output folders if necesssary veg_temp_ws = os.path.join(hru.param_ws, 'veg_rasters') if not os.path.isdir(veg_temp_ws): os.mkdir(veg_temp_ws) # Output paths veg_cover_path = os.path.join(veg_temp_ws, 'veg_cover.img') veg_type_path = os.path.join(veg_temp_ws, 'veg_type.img') cov_type_path = os.path.join(veg_temp_ws, 'cov_type.img') covden_sum_path = os.path.join(veg_temp_ws, 'covden_sum.img') covden_win_path = os.path.join(veg_temp_ws, 'covden_win.img') snow_intcp_path = os.path.join(veg_temp_ws, 'snow_intcp.img') wrain_intcp_path = os.path.join(veg_temp_ws, 'wrain_intcp.img') srain_intcp_path = os.path.join(veg_temp_ws, 'srain_intcp.img') root_depth_path = os.path.join(veg_temp_ws, 'root_depth.img') rad_trncf_path = os.path.join(veg_temp_ws, 'rad_trncf.img') # Set ArcGIS environment variables arcpy.CheckOutExtension('Spatial') env.overwriteOutput = True env.pyramid = 'PYRAMIDS -1' # env.pyramid = 'PYRAMIDS 0' env.workspace = veg_temp_ws env.scratchWorkspace = hru.scratch_ws # Check fields logging.info('\nAdding vegetation fields if necessary') support.add_field_func(hru.polygon_path, hru.cov_type_field, 'SHORT') support.add_field_func(hru.polygon_path, hru.covden_sum_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.covden_win_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.rad_trncf_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.snow_intcp_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.srain_intcp_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.wrain_intcp_field, 'DOUBLE') # support.add_field_func(hru.polygon_path, hru.root_depth_field, 'DOUBLE') # Check that remaps have all necessary values logging.info('\nChecking remap tables against all raster cells' ' (i.e. even those outside the study area)') check_remap_keys(cov_type_remap_path, veg_type_orig_path) check_remap_keys(covden_sum_remap_path, veg_cover_orig_path) check_remap_keys(root_depth_remap_path, veg_type_orig_path) # Assume all vegetation rasters will need to be rebuilt # Check veg cover and veg type rasters # This will check for matching spat. ref., snap point, and cellsize # Project/clip veg cover to match HRU logging.info('\nProjecting/clipping vegetation cover raster') veg_cover_orig_sr = arcpy.sa.Raster(veg_cover_orig_path).spatialReference # Remove existing clipped/projected veg cover raster if arcpy.Exists(veg_cover_path): arcpy.Delete_management(veg_cover_path) # Set preferred transforms transform_str = support.transform_func(hru.sr, veg_cover_orig_sr) logging.debug(' Transform: {}'.format(transform_str)) logging.debug(' Projection method: NEAREST') # Project veg cover # DEADBEEF - Arc10.2 ProjectRaster does not extent support.project_raster_func(veg_cover_orig_path, veg_cover_path, hru.sr, 'NEAREST', veg_cover_cs, transform_str, '{} {}'.format(hru.ref_x, hru.ref_y), veg_cover_orig_sr, hru) # env.extent = hru.extent # arcpy.ProjectRaster_management( # veg_cover_orig_path, veg_cover_path, hru.sr, # 'NEAREST', veg_cover_cs, transform_str, # '{} {}'.format(hru.ref_x, hru.ref_y), # veg_cover_orig_sr) # arcpy.ClearEnvironment('extent') del transform_str, veg_cover_orig_sr # Project/clip veg type to match HRU logging.info('Projecting/clipping vegetation type raster') veg_type_orig_sr = arcpy.sa.Raster(veg_type_orig_path).spatialReference # Remove existing clipped/projected veg type raster if arcpy.Exists(veg_type_path): arcpy.Delete_management(veg_type_path) # Set preferred transforms transform_str = support.transform_func(hru.sr, veg_type_orig_sr) logging.debug(' Transform: {}'.format(transform_str)) logging.debug(' Projection method: NEAREST') # Use a different field to calculate vegetation type if veg_type_field != 'VALUE': logging.info(' Calculating vegetation type from {} field'.format( veg_type_field)) veg_type_obj = arcpy.sa.Lookup(veg_type_orig_path, veg_type_field) else: veg_type_obj = arcpy.sa.Raster(veg_type_orig_path) # Project veg type # DEADBEEF - Arc10.2 ProjectRaster does not honor extent support.project_raster_func(veg_type_obj, veg_type_path, hru.sr, 'NEAREST', veg_type_cs, transform_str, '{} {}'.format(hru.ref_x, hru.ref_y), veg_type_orig_sr, hru) # env.extent = hru.extent # arcpy.ProjectRaster_management( # veg_type_obj, veg_type_path, hru.sr, # 'NEAREST', veg_type_cs, transform_str, # '{} {}'.format(hru.ref_x, hru.ref_y), # veg_type_orig_sr) # arcpy.ClearEnvironment('extent') del transform_str, veg_type_orig_sr, veg_type_obj # Reclassifying vegetation cover type logging.info('\nCalculating COV_TYPE') logging.debug(' Reclassifying: {}'.format(cov_type_remap_path)) cov_type_obj = arcpy.sa.ReclassByASCIIFile(veg_type_path, cov_type_remap_path) cov_type_obj.save(cov_type_path) del cov_type_obj # Summer cover density logging.info('Calculating COVDEN_SUM') logging.debug(' Reclassifying: {}'.format(covden_sum_remap_path)) covden_sum_obj = arcpy.sa.ReclassByASCIIFile(veg_cover_path, covden_sum_remap_path) covden_sum_obj *= 0.01 covden_sum_obj.save(covden_sum_path) del covden_sum_obj # Winter cover density logging.info('Calculating COVDEN_WIN') logging.debug(' Reclassifying: {}'.format(covden_win_remap_path)) covden_win_obj = arcpy.sa.ReclassByASCIIFile(cov_type_path, covden_win_remap_path) covden_win_obj *= 0.01 covden_win_obj *= arcpy.sa.Raster(covden_sum_path) covden_win_obj.save(covden_win_path) del covden_win_obj # Snow interception storage capacity logging.info('Calculating SNOW_INTCP') logging.debug(' Reclassifying: {}'.format(snow_intcp_remap_path)) snow_intcp_obj = arcpy.sa.ReclassByASCIIFile(cov_type_path, snow_intcp_remap_path) snow_intcp_obj *= snow_intcp_remap_factor snow_intcp_obj.save(snow_intcp_path) del snow_intcp_obj # Winter rain interception storage capacity logging.info('Calculating WRAIN_INTCP') logging.debug(' Reclassifying: {}'.format(wrain_intcp_remap_path)) wrain_intcp_obj = arcpy.sa.ReclassByASCIIFile(cov_type_path, wrain_intcp_remap_path) wrain_intcp_obj *= wrain_intcp_remap_factor wrain_intcp_obj.save(wrain_intcp_path) del wrain_intcp_obj # Summer rain interception storage capacity logging.info('Calculating SRAIN_INTCP') logging.debug(' Reclassifying: {}'.format(srain_intcp_remap_path)) srain_intcp_obj = arcpy.sa.ReclassByASCIIFile(cov_type_path, srain_intcp_remap_path) srain_intcp_obj *= srain_intcp_remap_factor srain_intcp_obj.save(srain_intcp_path) del srain_intcp_obj # Root depth logging.info('Calculating ROOT_DEPTH') logging.debug(' Reclassifying: {}'.format(root_depth_remap_path)) root_depth_obj = arcpy.sa.ReclassByASCIIFile(veg_type_path, root_depth_remap_path) root_depth_obj.save(root_depth_path) del root_depth_obj # Short-wave radiation transmission coefficent logging.info('Calculating {}'.format(hru.rad_trncf_field)) rad_trncf_obj = 0.9917 * arcpy.sa.Exp( -2.7557 * arcpy.sa.Raster(covden_win_path)) rad_trncf_obj.save(rad_trncf_path) del rad_trncf_obj # List of rasters, fields, and stats for zonal statistics zs_veg_dict = dict() zs_veg_dict[hru.cov_type_field] = [cov_type_path, 'MAJORITY'] zs_veg_dict[hru.covden_sum_field] = [covden_sum_path, 'MEAN'] zs_veg_dict[hru.covden_win_field] = [covden_win_path, 'MEAN'] zs_veg_dict[hru.snow_intcp_field] = [snow_intcp_path, 'MEAN'] zs_veg_dict[hru.srain_intcp_field] = [srain_intcp_path, 'MEAN'] zs_veg_dict[hru.wrain_intcp_field] = [wrain_intcp_path, 'MEAN'] # zs_veg_dict[hru.root_depth_field] = [root_depth_path, 'MEAN'] zs_veg_dict[hru.rad_trncf_field] = [rad_trncf_path, 'MEAN'] # Calculate zonal statistics logging.info('\nCalculating vegetation zonal statistics') support.zonal_stats_func(zs_veg_dict, hru.polygon_path, hru.point_path, hru) # Short-wave radiation transmission coefficient # logging.info('\nCalculating {}'.format(hru.rad_trncf_field)) # arcpy.CalculateField_management( # hru.polygon_path, hru.rad_trncf_field, # '0.9917 * math.exp(-2.7557 * !{}!)'.format(hru.covden_win_field), # 'PYTHON') # Clear COV_TYPE values for lake cells (HRU_TYPE == 2) if True: logging.info('\nClearing lake nodata vegetation parameters') # logging.info( # '\nClearing vegetation parameters for lake and inactive cells') hru_polygon_layer = "hru_polygon_layer" arcpy.MakeFeatureLayer_management(hru.polygon_path, hru_polygon_layer) arcpy.SelectLayerByAttribute_management( hru_polygon_layer, "NEW_SELECTION", '"{0}" = 2 OR ("{0}" = 0 AND "{1}" = 0)'.format( hru.type_field, hru.dem_adj_field)) arcpy.CalculateField_management(hru_polygon_layer, hru.cov_type_field, 0, 'PYTHON') arcpy.CalculateField_management(hru_polygon_layer, hru.covden_sum_field, 0, 'PYTHON') arcpy.CalculateField_management(hru_polygon_layer, hru.covden_win_field, 0, 'PYTHON') arcpy.CalculateField_management(hru_polygon_layer, hru.snow_intcp_field, 0, 'PYTHON') arcpy.CalculateField_management(hru_polygon_layer, hru.srain_intcp_field, 0, 'PYTHON') arcpy.CalculateField_management(hru_polygon_layer, hru.wrain_intcp_field, 0, 'PYTHON') arcpy.CalculateField_management(hru_polygon_layer, hru.rad_trncf_field, 0, 'PYTHON') arcpy.Delete_management(hru_polygon_layer) del hru_polygon_layer
def hru_parameters(config_path): """Calculate GSFLOW HRU Parameters Parameters ---------- config_path : str Project configuration file (.ini) path. Returns ------- None """ # Initialize hru parameters class hru = support.HRUParameters(config_path) # Open input parameter config file inputs_cfg = ConfigParser.ConfigParser() try: inputs_cfg.readfp(open(config_path)) except Exception as e: logging.error( '\nERROR: Config file could not be read, ' 'is not an input file, or does not exist\n' ' config_file = {}\n' ' Exception: {}\n'.format(config_path, e)) sys.exit() # Log DEBUG to file log_file_name = 'hru_parameters_log.txt' log_console = logging.FileHandler( filename=os.path.join(hru.log_ws, log_file_name), mode='w') log_console.setLevel(logging.DEBUG) log_console.setFormatter(logging.Formatter('%(message)s')) logging.getLogger('').addHandler(log_console) logging.info('\nGSFLOW HRU Parameters') # Read parameters from config file study_area_orig_path = inputs_cfg.get('INPUTS', 'study_area_path') try: set_lake_flag = inputs_cfg.getboolean('INPUTS', 'set_lake_flag') except ConfigParser.NoOptionError: set_lake_flag = False logging.info( ' Missing INI parameter, setting {} = {}'.format( 'set_lake_flag', set_lake_flag)) if set_lake_flag: lake_orig_path = inputs_cfg.get('INPUTS', 'lake_path') lake_zone_field = inputs_cfg.get('INPUTS', 'lake_zone_field') lake_area_pct = inputs_cfg.getfloat('INPUTS', 'lake_area_pct') # Model points model_inputs_path = inputs_cfg.get('INPUTS', 'model_points_path') try: model_points_zone_field = inputs_cfg.get( 'INPUTS', 'model_points_zone_field') except: model_points_zone_field = 'FID' logging.info( ' Missing INI parameter, setting {} = {}'.format( 'model_points_zone_field', model_points_zone_field)) try: model_points_type_field = inputs_cfg.get( 'INPUTS', 'model_points_type_field') except: model_points_type_field = 'TYPE' logging.info( ' Missing INI parameter, setting {} = {}'.format( 'model_points_type_field', model_points_type_field)) # Control flags try: calc_flow_acc_dem_flag = inputs_cfg.getboolean( 'INPUTS', 'calc_flow_acc_dem_flag') except ConfigParser.NoOptionError: calc_flow_acc_dem_flag = False logging.info( ' Missing INI parameter, setting {} = {}'.format( 'calc_flow_acc_dem_flag', calc_flow_acc_dem_flag)) try: calc_topo_index_flag = inputs_cfg.getboolean( 'INPUTS', 'calc_topo_index_flag') except ConfigParser.NoOptionError: calc_topo_index_flag = False logging.info( ' Missing INI parameter, setting {} = {}'.format( 'calc_topo_index_flag', calc_topo_index_flag)) # try: # set_ppt_zones_flag = inputs_cfg.getboolean( # 'INPUTS', 'set_ppt_zones_flag') # except ConfigParser.NoOptionError: # set_ppt_zones_flag = False # logging.info( # ' Missing INI parameter, setting {} = {}'.format( # 'set_ppt_zones_flag', set_ppt_zones_flag)) # Check input paths if not arcpy.Exists(hru.polygon_path): logging.error( '\nERROR: Fishnet ({}) does not exist'.format( hru.polygon_path)) sys.exit() if set_lake_flag: if not arcpy.Exists(lake_orig_path): logging.error( '\nERROR: Lake layer ({}) does not exist'.format( lake_orig_path)) sys.exit() # lake_path must be a polygon shapefile if arcpy.Describe(lake_orig_path).datasetType != 'FeatureClass': logging.error( '\nERROR: lake_path must be a polygon shapefile') sys.exit() # Check lake_zone_field if lake_zone_field.upper() in ['', 'FID', 'NONE']: lake_zone_field = arcpy.Describe(lake_orig_path).OIDFieldName logging.warning( '\n NOTE: Using {} to set {}\n'.format( lake_zone_field, hru.lake_id_field)) elif not arcpy.ListFields(lake_orig_path, lake_zone_field): logging.error( '\nERROR: lake_zone_field field {} does not exist\n'.format( lake_zone_field)) sys.exit() # Need to check that lake_zone_field is an int type elif not [f.type for f in arcpy.Describe(lake_orig_path).fields if (f.name == lake_zone_field and f.type in ['SmallInteger', 'Integer'])]: logging.error( '\nERROR: lake_zone_field field {} must be an ' 'integer type\n'.format(lake_zone_field)) sys.exit() # Check model points if not os.path.isfile(model_inputs_path): logging.error( '\nERROR: Model points shapefiles does not exist' '\nERROR: {}'.format(model_inputs_path)) sys.exit() # model_points_path must be a point shapefile elif arcpy.Describe(model_inputs_path).datasetType != 'FeatureClass': logging.error( '\nERROR: model_points_path must be a point shapefile') sys.exit() # For now, study area has to be a polygon if arcpy.Describe(study_area_orig_path).datasetType != 'FeatureClass': logging.error( '\nERROR: For now, study area must be a polygon shapefile') sys.exit() # Build output folder if necessary hru_temp_ws = os.path.join(hru.param_ws, 'hru_temp') if not os.path.isdir(hru_temp_ws): os.mkdir(hru_temp_ws) # Output paths study_area_path = os.path.join(hru_temp_ws, 'study_area.shp') lake_path = os.path.join(hru_temp_ws, 'lakes.shp') lake_clip_path = os.path.join(hru_temp_ws, 'lake_clip.shp') model_points_path = os.path.join(hru_temp_ws, 'model_points.shp') # Set ArcGIS environment variables arcpy.CheckOutExtension('Spatial') env.overwriteOutput = True env.pyramid = 'PYRAMIDS -1' # env.pyramid = 'PYRAMIDS 0' env.workspace = hru.param_ws env.scratchWorkspace = hru.scratch_ws # Create HRU points at polygon centroids if not arcpy.Exists(hru.point_path): logging.info('\n Building HRU point shapefile') # FeatureToPoint will copy all fields in hru.polygon_path # arcpy.FeatureToPoint_management( # hru.polygon_path, hru.point_path) # Build point_path directly arcpy.CreateFeatureclass_management( os.path.dirname(hru.point_path), os.path.basename(hru.point_path), 'POINT') arcpy.DefineProjection_management(hru.point_path, hru.sr) arcpy.AddField_management( hru.point_path, hru.fid_field, 'LONG') hru_centroid_list = [ row for row in arcpy.da.SearchCursor( hru.polygon_path, ['OID@', 'SHAPE@XY'])] with arcpy.da.InsertCursor( hru.point_path, ['OID@', 'SHAPE@XY', hru.fid_field]) as update_c: for hru_centroid in hru_centroid_list: update_c.insertRow( [hru_centroid[0], hru_centroid[1], hru_centroid[0]]) del hru_centroid_list # Check existing HRU points else: # Remove any extra fields field_remove_list = [ f.name for f in arcpy.ListFields(hru.point_path) if f.name not in ['FID', 'Shape', hru.fid_field]] # Skip if there is only one field in the shapefile if field_remove_list and len(field_remove_list) > 1: logging.info('\n Removing HRU point fields') for field in field_remove_list: logging.debug(' {}'.format(field)) try: arcpy.DeleteField_management(hru.point_path, field) except Exception as e: logging.debug(' Unhandled exception: {}'.format(e)) continue # Save original FID if len(arcpy.ListFields(hru.point_path, hru.fid_field)) == 0: arcpy.AddField_management( hru.point_path, hru.fid_field, 'LONG') arcpy.CalculateField_management( hru.point_path, hru.fid_field, '!FID!', 'PYTHON') if len(arcpy.ListFields(hru.point_path, 'Id')) > 0: arcpy.DeleteField_management(hru.point_path, 'Id') del field_remove_list # Add all output fields logging.info('\nAdding fields if necessary') logging.info( ' Note: You may see duplicate field names when writing to a network ' 'drive') # HRU/DEM Fields support.add_field_func(hru.polygon_path, hru.fid_field, 'LONG') support.add_field_func(hru.polygon_path, hru.id_field, 'LONG') support.add_field_func(hru.polygon_path, hru.type_field, 'LONG') support.add_field_func(hru.polygon_path, hru.dem_mean_field, 'DOUBLE') #support.add_field_func(hru.polygon_path, hru.dem_median_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.dem_min_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.dem_max_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.dem_adj_field, 'DOUBLE') if calc_flow_acc_dem_flag: support.add_field_func(hru.polygon_path, hru.dem_flowacc_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.dem_sum_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.dem_count_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.dem_sink_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.crt_elev_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.crt_fill_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.dem_aspect_field, 'LONG') support.add_field_func(hru.polygon_path, hru.dem_slope_deg_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.dem_slope_rad_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.dem_slope_pct_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.area_field, 'DOUBLE') if calc_topo_index_flag: support.add_field_func(hru.polygon_path, hru.topo_index_field, 'LONG') support.add_field_func(hru.polygon_path, hru.row_field, 'LONG') support.add_field_func(hru.polygon_path, hru.col_field, 'LONG') support.add_field_func(hru.polygon_path, hru.x_field, 'LONG') support.add_field_func(hru.polygon_path, hru.y_field, 'LONG') support.add_field_func(hru.polygon_path, hru.lat_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.lon_field, 'DOUBLE') # Lake fields support.add_field_func(hru.polygon_path, hru.lake_id_field, 'LONG') support.add_field_func(hru.polygon_path, hru.lake_area_field, 'DOUBLE') # Stream fields support.add_field_func(hru.polygon_path, hru.iseg_field, 'LONG') support.add_field_func(hru.polygon_path, hru.irunbound_field, 'LONG') support.add_field_func(hru.polygon_path, hru.flow_dir_field, 'LONG') support.add_field_func(hru.polygon_path, hru.krch_field, 'LONG') support.add_field_func(hru.polygon_path, hru.irch_field, 'LONG') support.add_field_func(hru.polygon_path, hru.jrch_field, 'LONG') support.add_field_func(hru.polygon_path, hru.iseg_field, 'LONG') support.add_field_func(hru.polygon_path, hru.reach_field, 'LONG') support.add_field_func(hru.polygon_path, hru.rchlen_field, 'LONG') support.add_field_func(hru.polygon_path, hru.maxreach_field, 'LONG') support.add_field_func(hru.polygon_path, hru.outseg_field, 'LONG') support.add_field_func(hru.polygon_path, hru.iupseg_field, 'LONG') support.add_field_func(hru.polygon_path, hru.subbasin_field, 'LONG') support.add_field_func(hru.polygon_path, hru.segbasin_field, 'LONG') support.add_field_func(hru.polygon_path, hru.outflow_field, 'LONG') support.add_field_func(hru.polygon_path, hru.strm_top_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.strm_slope_field, 'DOUBLE') # Sink field support.add_field_func(hru.polygon_path, hru.hru_sink_field, 'LONG') # Precipitation zone fields support.add_field_func(hru.polygon_path, hru.ppt_zone_id_field, 'SHORT') support.add_field_func(hru.polygon_path, hru.hru_psta_field, 'SHORT') # Temperature zone fields # if temp_calc_method == 'ZONES': # support.add_field_func(hru.polygon_path, hru.temp_zone_id_field, 'SHORT') # support.add_field_func(hru.polygon_path, hru.hru_tsta_field, 'SHORT') # DEM based support.add_field_func(hru.polygon_path, hru.jh_tmax_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.jh_tmin_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.jh_coef_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.snarea_thresh_field, 'DOUBLE') # Aspect based support.add_field_func(hru.polygon_path, hru.tmax_adj_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.tmin_adj_field, 'DOUBLE') # Vegetation fields support.add_field_func(hru.polygon_path, hru.cov_type_field, 'SHORT') support.add_field_func(hru.polygon_path, hru.covden_sum_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.covden_win_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.rad_trncf_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.snow_intcp_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.srain_intcp_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.wrain_intcp_field, 'DOUBLE') # Soil fields support.add_field_func(hru.polygon_path, hru.awc_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.clay_pct_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.sand_pct_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.ksat_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.soil_type_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.soil_root_max_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.moist_init_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.moist_max_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.rechr_init_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.rechr_max_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.ssr2gw_rate_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.ssr2gw_k_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.slowcoef_lin_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.slowcoef_sq_field, 'DOUBLE') # Impervious fields support.add_field_func(hru.polygon_path, hru.imperv_pct_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.carea_max_field, 'DOUBLE') # PRISM mean monthly fields month_list = ['{:02d}'.format(m) for m in range(1, 13)] # month_list.extend(['14']) for prism_data_name in ['PPT', 'TMAX', 'TMIN']: for month in month_list: support.add_field_func( hru.polygon_path, '{}_{}'.format(prism_data_name, month), 'DOUBLE') # PRISM mean monthly precipitation ratio fields for month in month_list: if month == '14': continue support.add_field_func( hru.polygon_path, 'PPT_RT_{}'.format(month), 'DOUBLE') # Temperature adjust fields are added in temp_adjust_parameters.py if needed # for month in month_list: # if month == '14': # continue # support.add_field_func( # hru.polygon_path, 'TMX_ADJ_{}'.format(month), 'DOUBLE') # for month in month_list: # if month == '14': # continue # support.add_field_func( # hru.polygon_path, 'TMN_ADJ_{}'.format(month), 'DOUBLE') # Id field is added by default to new fishnets if arcpy.ListFields(hru.polygon_path, 'Id'): arcpy.DeleteField_management(hru.polygon_path, 'Id') logging.info('\nCalculating parameters') # Keep original FID for subsetting in zonal stats logging.info(' Saving original HRU FID to {}'.format( hru.fid_field)) arcpy.CalculateField_management( hru.polygon_path, hru.fid_field, '!FID!', 'PYTHON') # Cell X/Y logging.info(' Calculating cell X/Y') cell_xy_func(hru.polygon_path, hru.x_field, hru.y_field) # Create unique ID, start at top left corner, work down rows # Row/Col numbered from top left corner (1's based numbering) logging.info(' Calculating cell ID/row/col') cell_id_col_row_func( hru.polygon_path, hru.id_field, hru.col_field, hru.row_field, hru.extent, hru.cs) # Cell Lat/Lon logging.info(' Calculating cell lat/lon') cell_lat_lon_func( hru.polygon_path, hru.lat_field, hru.lon_field, hru.sr.GCS) # Cell Area logging.info(' Calculating cell area (acres)') arcpy.CalculateField_management( hru.polygon_path, hru.area_field, '!SHAPE.AREA@acres!', 'PYTHON') # Reset HRU_TYPE logging.info('\nResetting {} to 0'.format(hru.type_field)) arcpy.CalculateField_management( hru.polygon_path, hru.type_field, 0, 'PYTHON') # Reset LAKE_ID if set_lake_flag: logging.info('Resetting {} to 0'.format(hru.lake_id_field)) arcpy.CalculateField_management( hru.polygon_path, hru.lake_id_field, 0, 'PYTHON') if set_lake_flag: logging.info('Resetting {} to 0'.format(hru.lake_area_field)) arcpy.CalculateField_management( hru.polygon_path, hru.lake_area_field, 0, 'PYTHON') # Calculate HRU Type logging.info('\nCalculating cell HRU Type') study_area_desc = arcpy.Describe(study_area_orig_path) study_area_sr = study_area_desc.spatialReference logging.debug(' Study area: {}'.format(study_area_orig_path)) logging.debug(' Study area spat. ref.: {}'.format( study_area_sr.name)) logging.debug(' Study area GCS: {}'.format( study_area_sr.GCS.name)) # If study area spat_ref doesn't match hru_param spat_ref # Project study area to hru_param spat ref # Otherwise, read study_area directly if hru.sr.name != study_area_sr.name: logging.info(' Projecting study area...') # Set preferred transforms transform_str = support.transform_func(hru.sr, study_area_sr) logging.debug(' Transform: {}'.format(transform_str)) # Project study area shapefile arcpy.Project_management( study_area_orig_path, study_area_path, hru.sr, transform_str, study_area_sr) del transform_str else: arcpy.Copy_management(study_area_orig_path, study_area_path) support.zone_by_centroid_func( study_area_path, hru.type_field, 1, hru.polygon_path, hru.point_path, hru) # Calculate HRU Type for lakes (HRU_TYPE = 2) if set_lake_flag: logging.info('\nCalculating cell HRU Type & ID for lakes') lake_layer = 'lake_layer' lake_desc = arcpy.Describe(lake_orig_path) lake_sr = lake_desc.spatialReference logging.debug(' Lakes: {}'.format(lake_orig_path)) logging.debug(' Lakes spat. ref.: {}'.format(lake_sr.name)) logging.debug(' Lakes GCS: {}'.format(lake_sr.GCS.name)) # If lakes spat_ref doesn't match hru_param spat_ref # Project lakes to hru_param spat ref # Otherwise, read lakes directly if hru.sr.name != lake_sr.name: logging.info(' Projecting lakes...') # Set preferred transforms transform_str = support.transform_func(hru.sr, lake_sr) logging.debug(' Transform: {}'.format(transform_str)) # Project lakes shapefile arcpy.Project_management( lake_orig_path, lake_path, hru.sr, transform_str, lake_sr) arcpy.MakeFeatureLayer_management(lake_path, lake_layer) del lake_path, transform_str else: arcpy.MakeFeatureLayer_management( lake_orig_path, lake_layer) # Clip lakes by study area after projecting lakes logging.info(' Clipping lakes...') arcpy.Clip_analysis(lake_layer, study_area_path, lake_clip_path) # Remove all unnecesary fields for field in arcpy.ListFields(lake_clip_path): if field.name not in [lake_zone_field, 'Shape']: try: arcpy.DeleteField_management(lake_clip_path, field.name) except Exception as e: logging.debug(' Unhandled exception: {}'.format(e)) continue # Set lake HRU_TYPE logging.info(' Setting lake {}'.format(hru.type_field)) support.zone_by_area_func( lake_clip_path, hru.type_field, 2, hru.polygon_path, hru, hru.area_field, hru.lake_area_field, lake_area_pct) # Set lake ID logging.info(' Setting {}'.format(hru.lake_id_field)) support.zone_by_area_func( lake_clip_path, hru.lake_id_field, lake_zone_field, hru.polygon_path, hru, hru.area_field, hru.lake_area_field, lake_area_pct) # Cleanup del lake_layer, lake_desc, lake_sr # Read in model points shapefile logging.info('\nChecking model points shapefile') model_points_desc = arcpy.Describe(model_inputs_path) model_points_sr = model_points_desc.spatialReference logging.debug(' Points: {}'.format(model_inputs_path)) logging.debug(' Points spat. ref.: {}'.format(model_points_sr.name)) logging.debug(' Points GCS: {}'.format(model_points_sr.GCS.name)) # If model points spat_ref doesn't match hru_param spat_ref # Project model points to hru_param spat ref # Otherwise, read model points directly if hru.sr.name != model_points_sr.name: logging.info( ' Model points projection does not match fishnet.\n' ' Projecting model points.\n') # Set preferred transforms transform_str = support.transform_func(hru.sr, model_points_sr) logging.debug(' Transform: {}'.format(transform_str)) arcpy.Project_management( model_inputs_path, model_points_path, hru.sr, transform_str, model_points_sr) else: arcpy.Copy_management(model_inputs_path, model_points_path) model_points_lyr = 'model_points_lyr' arcpy.MakeFeatureLayer_management(model_points_path, model_points_lyr) # Check model point types logging.info(' Checking model point types') model_point_types = [str(r[0]).upper() for r in arcpy.da.SearchCursor( model_points_path, [model_points_type_field])] if not set(model_point_types).issubset(set(['OUTLET', 'SUBBASIN', 'SWALE'])): logging.error('\nERROR: Unsupported model point type(s) found, exiting') logging.error('\n Model point types: {}\n'.format(model_point_types)) sys.exit() elif not set(model_point_types).intersection(set(['OUTLET', 'SWALE'])): logging.error( '\nERROR: At least one model point must be an OUTLET or SWALE, ' 'exiting\n') sys.exit() else: logging.debug(' {}'.format(', '.join(model_point_types))) if 'SWALE' in model_point_types: arcpy.SelectLayerByAttribute_management( model_points_lyr, 'NEW_SELECTION', '"TYPE" = \'SWALE\'') logging.info(' Setting swale (sink) cells to {}=3'.format( hru.type_field)) hru_polygon_lyr = 'hru_polygon_lyr' arcpy.MakeFeatureLayer_management(hru.polygon_path, hru_polygon_lyr) arcpy.SelectLayerByAttribute_management(hru_polygon_lyr, 'CLEAR_SELECTION') arcpy.SelectLayerByLocation_management( hru_polygon_lyr, 'INTERSECT', model_points_lyr) arcpy.CalculateField_management( hru_polygon_lyr, hru.type_field, 3, 'PYTHON') arcpy.SelectLayerByAttribute_management(hru_polygon_lyr, 'CLEAR_SELECTION') arcpy.SelectLayerByAttribute_management(model_points_lyr, 'CLEAR_SELECTION') arcpy.Delete_management(hru_polygon_lyr) del hru_polygon_lyr arcpy.Delete_management(model_points_lyr) del model_points_lyr # Setting HRU_PSTA to default value of 1 if all([row[0] == 0 for row in arcpy.da.SearchCursor( hru.polygon_path, [hru.hru_psta_field])]): logging.info('Setting {} to 1'.format( hru.hru_psta_field)) arcpy.CalculateField_management( hru.polygon_path, hru.hru_psta_field, '1', 'PYTHON') # Cleanup del study_area_desc, study_area_sr
def flow_parameters(config_path, overwrite_flag=False, debug_flag=False): """Calculate GSFLOW Flow Parameters Args: config_file (str): Project config file path ovewrite_flag (bool): if True, overwrite existing files debug_flag (bool): if True, enable debug level logging Returns: None """ # Initialize hru_parameters class hru = support.HRUParameters(config_path) # Open input parameter config file inputs_cfg = ConfigParser.ConfigParser() try: inputs_cfg.readfp(open(config_path)) except Exception as e: logging.error('\nERROR: Config file could not be read, ' 'is not an input file, or does not exist\n' ' config_file = {}\n' ' Exception: {}\n'.format(config_path, e)) sys.exit() # Log DEBUG to file log_file_name = 'dem_2_stream_log.txt' log_console = logging.FileHandler(filename=os.path.join( hru.log_ws, log_file_name), mode='w') log_console.setLevel(logging.DEBUG) log_console.setFormatter(logging.Formatter('%(message)s')) logging.getLogger('').addHandler(log_console) logging.info('\nGSFLOW DEM To Streams') # Check whether lake parameters should be calculated try: set_lake_flag = inputs_cfg.getboolean('INPUTS', 'set_lake_flag') except ConfigParser.NoOptionError: set_lake_flag = False logging.info(' Missing INI parameter, setting {} = {}'.format( 'set_lake_flag', set_lake_flag)) # Model points model_inputs_path = inputs_cfg.get('INPUTS', 'model_points_path') try: model_points_zone_field = inputs_cfg.get('INPUTS', 'model_points_zone_field') except: model_points_zone_field = 'FID' logging.info(' Missing INI parameter, setting {} = {}'.format( 'model_points_zone_field', model_points_zone_field)) try: model_points_type_field = inputs_cfg.get('INPUTS', 'model_points_type_field') except: model_points_type_field = 'TYPE' logging.info(' Missing INI parameter, setting {} = {}'.format( 'model_points_type_field', model_points_type_field)) # Flow parameters flow_acc_threshold = inputs_cfg.getint('INPUTS', 'flow_acc_threshold') flow_length_threshold = inputs_cfg.getint('INPUTS', 'flow_length_threshold') try: calc_flow_dir_points_flag = inputs_cfg.getboolean( 'INPUTS', 'calc_flow_dir_points_flag') except ConfigParser.NoOptionError: calc_flow_dir_points_flag = False logging.info(' Missing INI parameter, setting {} = {}'.format( 'calc_flow_dir_points_flag', calc_flow_dir_points_flag)) try: lake_seg_offset = inputs_cfg.getint('INPUTS', 'lake_seg_offset') except ConfigParser.NoOptionError: lake_seg_offset = 0 logging.info(' Missing INI parameter, setting {} = {}'.format( 'lake_seg_offset', lake_seg_offset)) if lake_seg_offset < 0: logging.error( '\nERROR: lake_seg_offset must be an integer greater than 0') sys.exit() # Check input paths dem_temp_ws = os.path.join(hru.param_ws, 'dem_rasters') dem_path = os.path.join(dem_temp_ws, 'dem.img') if not arcpy.Exists(dem_path): logging.error( '\nERROR: Projected/clipped DEM ({}) does not exist' '\nERROR: Try rerunning dem_parameters.py'.format(dem_path)) sys.exit() if not arcpy.Exists(hru.polygon_path): logging.error('\nERROR: Fishnet ({}) does not exist'.format( hru.polygon_path)) sys.exit() # Check model points if not os.path.isfile(model_inputs_path): logging.error('\nERROR: Model points shapefiles does not exist' '\nERROR: {}'.format(model_inputs_path)) sys.exit() # model_points_path must be a point shapefile elif arcpy.Describe(model_inputs_path).datasetType != 'FeatureClass': logging.error('\nERROR: model_points_path must be a point shapefile') sys.exit() # Build output folder if necessary flow_temp_ws = os.path.join(hru.param_ws, 'flow_rasters') if not os.path.isdir(flow_temp_ws): os.mkdir(flow_temp_ws) # Output paths hru_type_path = os.path.join(flow_temp_ws, 'hru_type.img') dem_adj_path = os.path.join(flow_temp_ws, 'dem_adj.img') lake_id_path = os.path.join(flow_temp_ws, 'lake_id.img') dem_sink_path = os.path.join(flow_temp_ws, 'dem_sink.img') dem_fill_path = os.path.join(flow_temp_ws, 'dem_fill.img') flow_dir_path = os.path.join(flow_temp_ws, 'flow_dir.img') flow_dir_points = os.path.join(flow_temp_ws, 'flow_dir_points.shp') flow_acc_full_path = os.path.join(flow_temp_ws, 'flow_acc_full.img') flow_acc_sub_path = os.path.join(flow_temp_ws, 'flow_acc_sub.img') flow_mask_path = os.path.join(flow_temp_ws, 'flow_mask.img') stream_link_path = os.path.join(flow_temp_ws, 'stream_link.img') stream_link_a_path = os.path.join(flow_temp_ws, 'stream_link_a.img') stream_link_b_path = os.path.join(flow_temp_ws, 'stream_link_b.img') stream_order_path = os.path.join(flow_temp_ws, 'stream_order.img') stream_length_path = os.path.join(flow_temp_ws, 'stream_length.img') watersheds_path = os.path.join(flow_temp_ws, 'watersheds.img') outlet_path = os.path.join(flow_temp_ws, 'outlet.img') swale_path = os.path.join(flow_temp_ws, 'swale.img') subbasin_path = os.path.join(flow_temp_ws, 'subbasin.img') basin_path = os.path.join(flow_temp_ws, 'basin.img') streams_path = os.path.join(flow_temp_ws, 'streams.shp') model_points_path = os.path.join(flow_temp_ws, 'model_points.shp') # Set ArcGIS environment variables arcpy.CheckOutExtension('Spatial') env.overwriteOutput = True # env.pyramid = 'PYRAMIDS -1' env.pyramid = 'PYRAMIDS 0' env.workspace = flow_temp_ws env.scratchWorkspace = hru.scratch_ws # Set environment parameters env.extent = hru.extent env.cellsize = hru.cs env.outputCoordinateSystem = hru.sr # Read in model points shapefile logging.info('\nChecking model points shapefile') model_points_desc = arcpy.Describe(model_inputs_path) model_points_sr = model_points_desc.spatialReference logging.debug(' Points: {}'.format(model_inputs_path)) logging.debug(' Points spat. ref.: {}'.format(model_points_sr.name)) logging.debug(' Points GCS: {}'.format(model_points_sr.GCS.name)) # If model points spat_ref doesn't match hru_param spat_ref # Project model points to hru_param spat ref # Otherwise, read model points directly if hru.sr.name != model_points_sr.name: logging.info(' Model points projection does not match fishnet.\n' ' Projecting model points.\n') # Set preferred transforms transform_str = support.transform_func(hru.sr, model_points_sr) logging.debug(' Transform: {}'.format(transform_str)) arcpy.Project_management(model_inputs_path, model_points_path, hru.sr, transform_str, model_points_sr) else: arcpy.Copy_management(model_inputs_path, model_points_path) model_points_lyr = 'model_points_lyr' arcpy.MakeFeatureLayer_management(model_points_path, model_points_lyr) # Check model_points_zone_field if model_points_zone_field.upper() in ['', 'FID', 'NONE']: model_points_fid_field = arcpy.Describe(model_points_path).OIDFieldName logging.warning(' NOTE: Using {}+1 to set {}'.format( model_points_fid_field, hru.subbasin_field)) model_points_zone_field = 'ZONE_VALUE' if not arcpy.ListFields(model_points_path, model_points_zone_field): arcpy.AddField_management(model_points_path, model_points_zone_field, 'LONG') arcpy.CalculateField_management( model_points_path, model_points_zone_field, '!{}! + 1'.format(model_points_fid_field), 'PYTHON') elif not arcpy.ListFields(model_points_path, model_points_zone_field): logging.error( '\nERROR: model_points_zone_field {} does not exist\n'.format( model_points_zone_field)) sys.exit() # Need to check that model_points_zone_field is an int type elif not [ f.type for f in arcpy.Describe(model_points_path).fields if (f.name == model_points_zone_field and f.type in ['SmallInteger', 'Integer']) ]: logging.error( '\nERROR: model_points_zone_field {} must be an integer type\n'. format(model_points_zone_field)) sys.exit() # Need to check that model_points_zone_field is all positive values if min([ row[0] for row in arcpy.da.SearchCursor(model_points_path, [model_points_zone_field]) ]) <= 0: logging.error( '\nERROR: model_points_zone_field values must be positive\n'. format(model_points_zone_field)) sys.exit() # Check that subbasin values increment from 1 to nsub logging.info(' Checking subbasin numbering') subbasin_id_list = sorted( list( set([ row[0] for row in arcpy.da.SearchCursor( model_points_path, [model_points_zone_field]) ]))) if subbasin_id_list != range(1, len(subbasin_id_list) + 1): logging.error('\nERROR: SUB_BASINs must be sequential starting from 1' '\nERROR: {}'.format(subbasin_id_list)) sys.exit() subbasin_input_count = len(subbasin_id_list) logging.debug(' {} subbasins'.format(subbasin_input_count)) # Check model point types logging.info(' Checking model point types') model_point_types = [ str(r[0]).upper() for r in arcpy.da.SearchCursor( model_points_path, [model_points_type_field]) ] if not set(model_point_types).issubset(set(['OUTLET', 'SUBBASIN', 'SWALE' ])): logging.error( '\nERROR: Unsupported model point type(s) found, exiting') logging.error('\n Model point types: {}\n'.format(model_point_types)) sys.exit() ## elif not set(model_point_types).issubset(set(['OUTLET', 'SWALE'])): ## logging.error( ## '\nERROR: At least one model point must be an OUTLET or SWALE, ' ## 'exiting\n') sys.exit() else: logging.debug(' {}'.format(', '.join(model_point_types))) # Check DEM field logging.info('\nAdding DEM fields if necessary') support.add_field_func(hru.polygon_path, hru.iseg_field, 'LONG') support.add_field_func(hru.polygon_path, hru.irunbound_field, 'LONG') support.add_field_func(hru.polygon_path, hru.flow_dir_field, 'LONG') support.add_field_func(hru.polygon_path, hru.dem_sink_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.outflow_field, 'DOUBLE') if set_lake_flag: # Check lake cell elevations logging.info('\nChecking lake cell {}'.format(hru.dem_adj_field)) lake_elev_dict = defaultdict(list) fields = [ hru.type_field, hru.lake_id_field, hru.dem_adj_field, hru.id_field ] for row in arcpy.da.SearchCursor(hru.polygon_path, fields): if int(row[0]) != 2: continue lake_elev_dict[int(row[1])].append(float(row[2])) del fields logging.info(' {:>7} {:>12} {:>12} {:>12} {:>12}'.format( 'Lake ID', 'Minimum', 'Mean', 'Maximum', 'Std. Dev.')) for lake_id, lake_elev_list in lake_elev_dict.items(): lake_elev_array = np.array(lake_elev_list) logging.info(' {:7} {:12f} {:12f} {:12f} {:12f}'.format( lake_id, np.min(lake_elev_array), np.mean(lake_elev_array), np.max(lake_elev_array), np.std(lake_elev_array))) if np.std(lake_elev_array) > 1: logging.warning( ' Please check the lake cell elevations\n' ' They may need to be manually adjusted'.format(lake_id)) raw_input(' Press ENTER to continue') del lake_elev_array # Build Lake raster logging.debug(' LAKE_ID') arcpy.PolygonToRaster_conversion(hru.polygon_path, hru.lake_id_field, lake_id_path, 'CELL_CENTER', '', hru.cs) lake_id_obj = arcpy.sa.Raster(lake_id_path) logging.info('\nExporting HRU polygon parameters to raster') logging.debug(' HRU_TYPE') arcpy.PolygonToRaster_conversion(hru.polygon_path, hru.type_field, hru_type_path, 'CELL_CENTER', '', hru.cs) hru_type_obj = arcpy.sa.Raster(hru_type_path) # Convert DEM_ADJ to raster logging.debug(' DEM_ADJ') arcpy.PolygonToRaster_conversion(hru.polygon_path, hru.dem_adj_field, dem_adj_path, 'CELL_CENTER', '', hru.cs) dem_adj_obj = arcpy.sa.Raster(dem_adj_path) # dem_adj_obj = arcpy.sa.Float(arcpy.sa.Raster(dem_adj_path)) hru_polygon_lyr = 'hru_polygon_lyr' arcpy.MakeFeatureLayer_management(hru.polygon_path, hru_polygon_lyr) arcpy.SelectLayerByAttribute_management(hru_polygon_lyr, 'CLEAR_SELECTION') arcpy.CalculateField_management(hru_polygon_lyr, hru.outflow_field, 0, 'PYTHON') if 'OUTLET' in model_point_types: arcpy.SelectLayerByAttribute_management(model_points_lyr, 'NEW_SELECTION', '"TYPE" = \'OUTLET\'') arcpy.SelectLayerByLocation_management(hru_polygon_lyr, 'INTERSECT', model_points_lyr) arcpy.CalculateField_management(hru_polygon_lyr, hru.outflow_field, 1, 'PYTHON') # The point of all of this code is to determine the flow direction # at the outlet points since it won't be computed. # It might be easier to compute fill and flow dir. on the full raster logging.info(' Computing OUTLET point flow direction') # Get HRU values at outlet points outlet_points = [(int(r[0]), int(r[1])) for r in arcpy.da.SearchCursor( hru_polygon_lyr, [hru.col_field, hru.row_field])] # Get elevations and type of neighboring cells # Multiplying the cellsize by 1.5 is needed to get all possible # neighbors but it can return extra cells that will need to be skipped # It might be easier to use the Select tool directly arcpy.SelectLayerByLocation_management(hru_polygon_lyr, 'WITHIN_A_DISTANCE', model_points_lyr, 1.5 * hru.cs) elev_dict = dict() hru_type_dict = dict() fields = [ hru.col_field, hru.row_field, hru.dem_adj_field, hru.type_field ] for row in arcpy.da.SearchCursor(hru_polygon_lyr, fields): elev_dict[(int(row[0]), int(row[1]))] = float(row[2]) hru_type_dict[(int(row[0]), int(row[1]))] = int(row[3]) # For each outlet cell, cycle through flow directions and find ?. # Outlet cells should exit to an inactive cell or out of the grid. outlet_flowdir = {} for outlet_pt in outlet_points: logging.debug(' Outlet Point: {}'.format(outlet_pt)) outlet_slopes = [] # Search non-diagonals first. for fd in [1, 4, 16, 64, 2, 8, 32, 128]: if support.next_row_col(fd, outlet_pt) not in elev_dict.keys(): # Don't compute other slopes if next cell is outside the grid outlet_slopes.append([-9999, fd]) break elif hru_type_dict[support.next_row_col(fd, outlet_pt)] != 0: # Only compute slope to inactive cells continue else: # Compute slope to next cell slope = (elev_dict[support.next_row_col(fd, outlet_pt)] - elev_dict[outlet_pt]) if fd in [2, 8, 32, 128]: # For diagonals, adjust slope # I think Arc approximates root(2) to 1.5 slope /= 1.5 outlet_slopes.append([slope, fd]) logging.debug(' {:>3d} {}'.format(fd, slope)) if not outlet_slopes: logging.error('\nERROR: The OUTLET model point is not at the ' 'edge of the study area or model grid.\n' ' Col: {0} Rol: {1}'.format(*outlet_pt)) sys.exit() # Assign the flow direction with the steepest (positive) slope outlet_slope, outlet_fd = min(outlet_slopes) outlet_flowdir[outlet_pt] = outlet_fd if outlet_slope > 0: logging.warning( '\n WARNING: The OUTLET model point flow direction may ' 'be invalid') logging.debug(' Flow Direction: {}'.format(outlet_fd)) logging.info(' Building OUTLET point raster') outlet_array = np.zeros((hru.rows, hru.cols)).astype(np.uint8) for outlet_pt in outlet_points: outlet_array[outlet_pt[1] - 1, outlet_pt[0] - 1] = outlet_flowdir[outlet_pt] support.array_to_raster( outlet_array, outlet_path, arcpy.Point(hru.extent.XMin, hru.extent.YMin, 0), hru.cs, outlet_array) outlet_obj = arcpy.sa.Raster(outlet_path) if 'SWALE' in model_point_types: logging.info(' Building SWALE point raster') arcpy.SelectLayerByAttribute_management(model_points_lyr, 'NEW_SELECTION', '"TYPE" = \'SWALE\'') # DEADBEEF - Should SWALE points be written to OUTFLOWHRU.TXT? arcpy.SelectLayerByLocation_management(hru_polygon_lyr, 'INTERSECT', model_points_lyr) arcpy.CalculateField_management(hru_polygon_lyr, hru.outflow_field, 1, 'PYTHON') arcpy.PointToRaster_conversion(model_points_lyr, model_points_type_field, swale_path, "", "", hru.cs) swale_obj = arcpy.sa.Raster(swale_path) arcpy.SelectLayerByAttribute_management(model_points_lyr, 'CLEAR_SELECTION') arcpy.Delete_management(hru_polygon_lyr) logging.info('\nCalculating flow direction') # This will force all active cells to flow to an outlet logging.debug(' Setting DEM_ADJ values to 20000 for inactivate cells') dem_mod_obj = arcpy.sa.Con(hru_type_obj > 0, dem_adj_obj, 20000.0) if 'OUTLET' in model_point_types: logging.debug(' Setting DEM_ADJ values to NoData for OUTLET cells') dem_mod_obj = arcpy.sa.Con(arcpy.sa.IsNull(outlet_obj), dem_mod_obj) if 'SWALE' in model_point_types: logging.debug(' Setting DEM_ADJ values to NoData for SWALE cells') dem_mod_obj = arcpy.sa.Con(arcpy.sa.IsNull(swale_obj), dem_mod_obj) logging.info(' Filling DEM_ADJ (8-way)') dem_fill_obj = arcpy.sa.Fill(dem_mod_obj) del dem_mod_obj if 'OUTLET' in model_point_types: logging.debug(' Resetting OUTLET cell values') dem_fill_obj = arcpy.sa.Con(arcpy.sa.IsNull(outlet_obj), dem_fill_obj, dem_adj_obj) logging.info(' Calculating sinks (8-way)') # Threshold of 0.001 is needed to avoid noise from 32/64 bit conversion dem_sink_obj = arcpy.sa.Con(hru_type_obj > 0, dem_fill_obj - dem_adj_obj) dem_sink_obj = arcpy.sa.Con(dem_sink_obj > 0.001, dem_sink_obj) logging.info(' Calculating flow direction') flow_dir_obj = arcpy.sa.FlowDirection(dem_fill_obj, False) logging.debug(' Setting flow direction to NoData for inactive cells') flow_dir_obj = arcpy.sa.SetNull(hru_type_obj == 0, flow_dir_obj) if 'OUTLET' in model_point_types: logging.debug(' Resetting OUTLET cell flow direction') flow_dir_obj = arcpy.sa.Con(~arcpy.sa.IsNull(outlet_obj), outlet_obj, flow_dir_obj) del outlet_obj if 'SWALE' in model_point_types: logging.debug(' Resetting SWALE cell flow direction') flow_dir_obj = arcpy.sa.Con(~arcpy.sa.IsNull(swale_obj), 1, flow_dir_obj) del swale_obj logging.debug(' Resetting DEM_ADJ values for inactive cell') dem_fill_obj = arcpy.sa.Con(hru_type_obj == 0, dem_adj_obj, dem_fill_obj) flow_dir_obj.save(flow_dir_path) dem_fill_obj.save(dem_fill_path) dem_sink_obj.save(dem_sink_path) del dem_sink_obj # Save flow direction as points if calc_flow_dir_points_flag: logging.info('\nFlow direction points') # ArcGIS fails for raster_to_x conversions on a network path # You have to go through an in_memory file first flow_dir_temp = os.path.join('in_memory', 'flow_dir') arcpy.RasterToPoint_conversion(flow_dir_path, flow_dir_temp) try: arcpy.CopyFeatures_management(flow_dir_temp, flow_dir_points) except: time.sleep(1) logging.warning('Copy feature failed') arcpy.Delete_management(flow_dir_temp) del flow_dir_temp # Reclassify flow directions to angles, assuming 1 is 0 remap_cb = ('def Reclass(value):\n' + ' if value == 1: return 0\n' + ' elif value == 2: return 45\n' + ' elif value == 4: return 90\n' + ' elif value == 8: return 135\n' + ' elif value == 16: return 180\n' + ' elif value == 32: return 225\n' + ' elif value == 64: return 270\n' + ' elif value == 128: return 315\n') arcpy.CalculateField_management(flow_dir_points, 'grid_code', 'Reclass(!{}!)'.format('grid_code'), 'PYTHON', remap_cb) # Write flow direction to hru_polygon logging.debug(' Extracting flow direction at points') vt_list = [[flow_dir_path, hru.flow_dir_field]] mem_point_path = os.path.join('in_memory', 'hru_point') arcpy.CopyFeatures_management(hru.point_path, mem_point_path) arcpy.sa.ExtractMultiValuesToPoints(mem_point_path, vt_list, 'NONE') logging.debug(' Reading flow direction values at point') data_dict = defaultdict(dict) fields = [hru.flow_dir_field, hru.fid_field] with arcpy.da.SearchCursor(mem_point_path, fields) as s_cursor: for row in s_cursor: # Set nodata cells to 0 if row[0] is not None and row[1] is not None: data_dict[int(row[1])][hru.flow_dir_field] = int(row[0]) del row logging.debug(' Writing flow direction values to polygon') fields = [hru.flow_dir_field, hru.fid_field] with arcpy.da.UpdateCursor(hru.polygon_path, fields) as u_cursor: for row in u_cursor: row_dict = data_dict.get(int(row[-1]), None) for i, field in enumerate(fields[:-1]): if row_dict: row[i] = row_dict[field] else: row[i] = 0 u_cursor.updateRow(row) del row_dict, row # DEADBEEF - This whole section seems to only be needed if the outflows # are not specified by the user. # # Subbasins # # Select the HRU cells that intersect the subbasin point cells # logging.debug(' Reading input subbasin points') # hru_polygon_lyr = 'hru_polygon_lyr' # arcpy.MakeFeatureLayer_management(hru.polygon_path, hru_polygon_lyr) # arcpy.SelectLayerByLocation_management( # hru_polygon_lyr, 'intersect', model_points_path) # input_xy_dict = dict() # fields = [hru.col_field, hru.row_field, hru.x_field, hru.y_field] # for row in arcpy.da.SearchCursor(hru_polygon_lyr, fields): # input_xy_dict[(int(row[0]), int(row[1]))] = (int(row[2]), int(row[3])) # arcpy.Delete_management(hru_polygon_lyr) # del hru_polygon_lyr # # for k,v in input_xy_dict.items(): # # logging.debug(' {} {}'.format(k,v)) # logging.info('\nBuilding all subbasin points') # # First calculate downstream cell for all cells # logging.debug(' Calculating downstream cells') # out_cell_dict = dict() # hru_type_dict = dict() # cell_xy_dict = dict() # fields = [ # hru.type_field, hru.flow_dir_field, hru.id_field, # hru.col_field, hru.row_field, hru.x_field, hru.y_field] # for row in arcpy.da.SearchCursor(hru.polygon_path, fields): # cell = (int(row[3]), int(row[4])) # out_cell_dict[cell] = support.next_row_col(int(row[1]), cell) # hru_type_dict[cell] = int(row[0]) # cell_xy_dict[cell] = (int(row[5]), int(row[6])) # # Identify all active/lake cells that exit the model # # or flow to an inactive cell # logging.debug(' Identifying active cells that exit the model') # out_cell_xy_list = [] # for cell, cell_xy in sorted(cell_xy_dict.items()): # # DEADBEEF - This is finding exit cells that aren't already gauges # # if cell in input_xy_dict.keys(): # # continue # # elif cell not in hru_type_dict.keys(): # if cell not in hru_type_dict.keys(): # continue # elif hru_type_dict[cell] not in [1, 2]: # continue # elif cell not in out_cell_dict.keys(): # continue # elif out_cell_dict[cell] not in hru_type_dict.keys(): # out_cell_xy_list.append(cell_xy) # elif (out_cell_dict[cell] in hru_type_dict.keys() and # hru_type_dict[out_cell_dict[cell]] not in [1, 2]): # out_cell_xy_list.append(cell_xy) # # Outflow cells exit the model to inactive cells or out of the domain # # These cells will be used to set the OUTFLOW_HRU.DAT for CRT # # in crt_fill_parameters and stream_parameters # logging.info(' Flag outflow cells') # fields = [hru.type_field, hru.x_field, hru.y_field, hru.outflow_field] # with arcpy.da.UpdateCursor(hru.polygon_path, fields) as u_cursor: # for row in u_cursor: # cell_xy = (row[1], row[2]) # # Inactive cells can't be outflow cells # if int(row[0]) == 0: # continue # elif out_cell_xy_list and cell_xy in out_cell_xy_list: # row[3] = 1 # else: # row[3] = 0 # u_cursor.updateRow(row) # del out_cell_dict, hru_type_dict, cell_xy_dict # DEADBEEF - This was added for sinks or ocean so that there would be # subbasin points along the edge? # fields = ['SHAPE@XY', model_points_zone_field] # with arcpy.da.InsertCursor(model_points_path, fields) as insert_c: # for out_cell_xy in sorted(out_cell_xy_list): # insert_c.insertRow([out_cell_xy, subbasin_input_count + 1]) # del fields # del out_cell_xy_list # Flow Accumulation logging.info('\nCalculating initial flow accumulation') flow_acc_full_obj = arcpy.sa.FlowAccumulation(flow_dir_obj) logging.info(' Only keeping flow_acc >= {}'.format(flow_acc_threshold)) flow_acc_full_obj = arcpy.sa.Con(flow_acc_full_obj >= flow_acc_threshold, flow_acc_full_obj) flow_acc_full_obj.save(flow_acc_full_path) # Flow accumulation and stream link with lakes logging.info('\nCalculating flow accumulation & stream link (w/ lakes)') flow_acc_obj = arcpy.sa.Con((hru_type_obj >= 1) & (hru_type_obj <= 3), flow_acc_full_obj) stream_link_obj = arcpy.sa.StreamLink(flow_acc_obj, flow_dir_obj) stream_link_obj.save(stream_link_a_path) del flow_acc_obj, stream_link_obj # Flow accumulation and stream link without lakes logging.info('Calculating flow accumulation & stream link (w/o lakes)') flow_acc_obj = arcpy.sa.Con((hru_type_obj == 1) | (hru_type_obj == 3), flow_acc_full_obj) # flow_acc_obj.save(flow_acc_sub_path) stream_link_obj = arcpy.sa.StreamLink(flow_acc_obj, flow_dir_obj) stream_link_obj.save(stream_link_b_path) del flow_acc_obj, stream_link_obj # Initial Stream Link # logging.info('\nCalculating initial stream link') # stream_link_obj = StreamLink(flow_acc_obj, flow_dir_obj) # stream_link_obj.save(stream_link_path) # Calculate stream link with and without lakes # Initial Stream Order (w/ lakes) logging.info('Calculating stream order (w/ lakes)') logging.debug(' Using SHREVE ordering so after 1st order are removed, ' + '2nd order will only be dangles') stream_order_obj = arcpy.sa.StreamOrder(stream_link_a_path, flow_dir_obj, 'SHREVE') stream_order_obj.save(stream_order_path) # Stream Length (cell count w/o lakes) logging.info('Calculating stream length (cell count w/o lakes)') stream_length_obj = arcpy.sa.Lookup(stream_link_b_path, 'Count') stream_length_obj.save(stream_length_path) # Filter 1st order segments logging.info( '\nFilter all 1st order streams with length < {}' + '\nKeep all higher order streams'.format(flow_length_threshold)) # Stream length is nodata for lakes, so put lakes back in # This removes short 1st order streams off of lakes flow_mask_obj = ((hru_type_obj == 3) | (hru_type_obj == 2) | (stream_order_obj >= 2) | ((stream_order_obj == 1) & (stream_length_obj >= flow_length_threshold))) flow_mask_obj.save(flow_mask_path) flow_acc_sub_obj = arcpy.sa.Con(flow_mask_obj, flow_acc_full_obj) flow_acc_sub_obj.save(flow_acc_sub_path) del flow_mask_obj, stream_order_obj, stream_length_obj # Final Stream Link logging.info('\nCalculating final stream link') stream_link_obj = arcpy.sa.StreamLink(flow_acc_sub_obj, flow_dir_obj) # Get count of streams for automatically setting lake_seg_offset if not lake_seg_offset: lake_seg_count = int( arcpy.GetCount_management(stream_link_obj).getOutput(0)) n = 10**math.floor(math.log10(lake_seg_count)) lake_seg_offset = int(math.ceil((lake_seg_count + 1) / n)) * int(n) logging.info(' lake_segment_offset was not set in the input file\n' + ' Using automatic lake segment offset: {}'.format( lake_seg_offset)) elif set_lake_flag: logging.info( ' Using manual lake segment offset: {}'.format(lake_seg_offset)) # Include lake cells into 'stream_link' before calculating watersheds # Watershed function doesn't work for negative values # Convert lakes to large positive numbers for Watershed # ISEG needs to be negative values though if set_lake_flag: logging.info( ' Including lakes as {0} + {1}\n' ' This will allow for a watershed/subbasin for the lakes\n' ' {2} will be save as negative of {0} though'.format( hru.lake_id_field, lake_seg_offset, hru.iseg_field)) stream_link_obj = arcpy.sa.Con((hru_type_obj == 2), (lake_id_obj + lake_seg_offset), stream_link_obj) stream_link_obj.save(stream_link_path) # Watersheds logging.info('Calculating watersheds') watersheds_obj = arcpy.sa.Watershed(flow_dir_obj, stream_link_obj) watersheds_obj.save(watersheds_path) del stream_link_obj, watersheds_obj # Subbasins logging.info('Calculating subbasins') subbasin_obj = arcpy.sa.Watershed(flow_dir_obj, model_points_path, model_points_zone_field) subbasin_obj.save(subbasin_path) del subbasin_obj # Basins logging.info('Calculating basins') basin_obj = arcpy.sa.Basin(flow_dir_obj) basin_obj.save(basin_path) del basin_obj # Clear subbasin value if HRU_TYPE is 0 logging.info('Clearing subbasin ID for inactive cells') subbasin_obj = arcpy.sa.SetNull(hru_type_obj, arcpy.sa.Raster(subbasin_path), 'VALUE=0') subbasin_obj.save(subbasin_path) del subbasin_obj del hru_type_obj # Stream polylines logging.info('Calculating stream polylines') # ArcGIS fails for raster_to_x conversions on a network path # You have to go through an in_memory file first streams_temp = os.path.join('in_memory', 'streams') arcpy.sa.StreamToFeature(stream_link_path, flow_dir_obj, streams_temp, 'NO_SIMPLIFY') arcpy.CopyFeatures_management(streams_temp, streams_path) arcpy.Delete_management(streams_temp) del streams_temp # Write values to hru_polygon logging.info('\nExtracting stream parameters') vt_list = [ [watersheds_path, hru.irunbound_field], [stream_link_path, hru.iseg_field], # [flow_dir_path, hru.flow_dir_field], [subbasin_path, hru.subbasin_field], [hru_type_path, hru.type_field] ] mem_point_path = os.path.join('in_memory', 'hru_point') arcpy.CopyFeatures_management(hru.point_path, mem_point_path) arcpy.sa.ExtractMultiValuesToPoints(mem_point_path, vt_list, 'NONE') del vt_list # Read values from points logging.info(' Reading cell values') data_dict = defaultdict(dict) fields = [ hru.irunbound_field, hru.iseg_field, hru.subbasin_field, hru.type_field, hru.fid_field ] # fields = [ # hru.irunbound_field, hru.iseg_field, hru.flow_dir_field, # hru.subbasin_field, hru.type_field, hru.fid_field] with arcpy.da.SearchCursor(mem_point_path, fields) as s_cursor: for row in s_cursor: for i, field in enumerate(fields[:-1]): # Set nodata or inactive cells to 0 if row[i] is None or (int(row[-2]) == 0): data_dict[int(row[-1])][field] = 0 else: data_dict[int(row[-1])][field] = int(row[i]) del row del fields # ISEG for lake cells must be -1 * LAKE_ID, not LAKE_ID + OFFSET for k in data_dict.keys(): irunbound = data_dict[k][hru.irunbound_field] iseg = data_dict[k][hru.iseg_field] if irunbound > lake_seg_offset: data_dict[k][hru.irunbound_field] = lake_seg_offset - irunbound if iseg > lake_seg_offset: data_dict[k][hru.iseg_field] = lake_seg_offset - iseg # data_dict = dict([(k,v) for k,v in data_dict.items()]) # Write values to polygon logging.info(' Writing values to polygons') fields = [ hru.irunbound_field, hru.iseg_field, hru.subbasin_field, hru.type_field, hru.fid_field ] # fields = [ # hru.irunbound_field, hru.iseg_field, hru.flow_dir_field, # hru.subbasin_field, hru.type_field, hru.fid_field] with arcpy.da.UpdateCursor(hru.polygon_path, fields) as u_cursor: for row in u_cursor: row_dict = data_dict.get(int(row[-1]), None) for i, field in enumerate(fields[:-1]): if row_dict: row[i] = row_dict[field] else: row[i] = 0 u_cursor.updateRow(row) del row_dict, row del fields # Write sink values to hru_polygon vt_list = [] if arcpy.Exists(dem_sink_path): vt_list.append([dem_sink_path, hru.dem_sink_field]) if vt_list: logging.info('\nExtracting sink values') for vt_item in vt_list: logging.debug(' {}: {}'.format(vt_item[1], os.path.basename(vt_item[0]))) mem_point_path = os.path.join('in_memory', 'hru_point') arcpy.CopyFeatures_management(hru.point_path, mem_point_path) arcpy.sa.ExtractMultiValuesToPoints(mem_point_path, vt_list, 'NONE') # Read sink values from points logging.info(' Reading sink values') data_dict = defaultdict(dict) fields = [field for path, field in vt_list] + [hru.fid_field] with arcpy.da.SearchCursor(mem_point_path, fields) as s_cursor: for row in s_cursor: for i, field in enumerate(fields[:-1]): # Set nodata or inactive cells to 0 if row[i] is None: data_dict[int(row[-1])][field] = 0 else: data_dict[int(row[-1])][field] = float(row[i]) del row # Write sink values to polygon logging.info(' Writing sink values to polygons') fields = [field for path, field in vt_list] + [hru.fid_field] with arcpy.da.UpdateCursor(hru.polygon_path, fields) as u_cursor: for row in u_cursor: row_dict = data_dict.get(int(row[-1]), None) for i, field in enumerate(fields[:-1]): if row_dict: row[i] = row_dict[field] else: row[i] = 0 u_cursor.updateRow(row) del row_dict, row # Cleanup arcpy.Delete_management(mem_point_path) del mem_point_path, vt_list, data_dict, field # Re-Calculate HRU_ELEV # logging.info('Calculating HRU_ELEV from DEM_ADJ') # logging.info(' Converting from meters to feet') # arcpy.CalculateField_management( # hru.polygon_path, hru_elev_field, # # Convert meters to feet # '!{}! * 3.28084'.format(dem_adj_field), 'PYTHON') # Cleanup del dem_fill_obj if set_lake_flag: del lake_id_obj del flow_dir_obj del flow_acc_full_obj del flow_acc_sub_obj
def dem_parameters(config_path, overwrite_flag=False, debug_flag=False): """Calculate GSFLOW DEM Parameters Args: config_path: Project config file path ovewrite_flag (bool): if True, overwrite existing files debug_flag (bool): if True, enable debug level logging Returns: None """ # Initialize hru parameters class hru = support.HRUParameters(config_path) # Open input parameter config file inputs_cfg = ConfigParser.ConfigParser() try: inputs_cfg.readfp(open(config_path)) except Exception as e: logging.error('\nERROR: Config file could not be read, ' 'is not an input file, or does not exist\n' ' config_file = {}\n' ' Exception: {}\n'.format(config_path, e)) sys.exit() # Log DEBUG to file log_file_name = 'dem_parameters_log.txt' log_console = logging.FileHandler(filename=os.path.join( hru.log_ws, log_file_name), mode='w') log_console.setLevel(logging.DEBUG) log_console.setFormatter(logging.Formatter('%(message)s')) logging.getLogger('').addHandler(log_console) logging.info('\nGSFLOW DEM Parameters') # dem_orig_path = inputs_cfg.get('INPUTS', 'dem_orig_path') # Resampling method 'BILINEAR', 'CUBIC', 'NEAREST' dem_proj_method = inputs_cfg.get('INPUTS', 'dem_projection_method').upper() dem_cs = inputs_cfg.getint('INPUTS', 'dem_cellsize') # DEADBEEF - This could/should be moved to support_functions.py since it is # in this script and in both PRISM scripts. # DEM Units dem_units = inputs_cfg.get('INPUTS', 'dem_units').lower() dem_unit_types = { 'meters': 'meter', 'm': 'meter', 'meter': 'meter', 'feet': 'feet', 'ft': 'meter', 'foot': 'meter', } try: dem_units = dem_unit_types[dem_units] except: logging.error( '\nERROR: DEM unit "{}" is not supported\n'.format(dem_units)) sys.exit() # Many expressions are hardcoded to units of feet # If dem_units are in meters, scale DEM_ADJ to get to feet if dem_units == 'meter': dem_unit_scalar = 0.3048 else: dem_unit_scalar = 1.0 # try: reset_dem_adj_flag = inputs_cfg.getboolean('INPUTS', 'reset_dem_adj_flag') except: reset_dem_adj_flag = False logging.info(' Missing INI parameter, setting {} = {}'.format( 'reset_dem_adj_flag', reset_dem_adj_flag)) try: calc_flow_acc_dem_flag = inputs_cfg.getboolean( 'INPUTS', 'calc_flow_acc_dem_flag') except: calc_flow_acc_dem_flag = True logging.info(' Missing INI parameter, setting {} = {}'.format( 'calc_flow_acc_dem_flag', calc_flow_acc_dem_flag)) try: dem_adj_copy_field = inputs_cfg.get('INPUTS', 'dem_adj_copy_field') except: if calc_flow_acc_dem_flag: dem_adj_copy_field = 'DEM_FLOWAC' else: dem_adj_copy_field = 'DEM_MEAN' logging.info(' Missing INI parameter, setting {} = {}'.format( 'dem_adj_copy_field', dem_adj_copy_field)) # Use PRISM temperature to set Jensen-Haise coefficient # Typically these values will not be available when dem_parameters is first run # Setting it True means that the values will remain consistent even if # dem_parameters is run again after the prism_script. try: calc_prism_jh_coef_flag = inputs_cfg.getboolean( 'INPUTS', 'calc_prism_jh_coef_flag') except: calc_prism_jh_coef_flag = True logging.info(' Missing INI parameter, setting {} = {}'.format( 'calc_prism_jh_coef_flag', calc_prism_jh_coef_flag)) # Calculate flow accumulation weighted elevation if calc_flow_acc_dem_flag: # Get factor for scaling dem_flowacc values to avoid 32 bit int limits try: flow_acc_dem_factor = float( inputs_cfg.get('INPUTS', 'flow_acc_dem_factor')) except: # This is a worst case for keeping flow_acc_dem from exceeding 2E9 # Assume all cells flow to 1 cell flow_acc_dem_factor = int( arcpy.GetCount_management(hru.point_path).getOutput(0)) # Assume flow acc is in every DEM cell in HRU cell flow_acc_dem_factor *= (float(hru.cs) / dem_cs)**2 # Need to account for the elevation in this worst cell # For now just make it 100 # flow_acc_dem_factor *= max_elevation flow_acc_dem_factor *= 100 # Calculate ratio of flow_acc_dem to a 32 bit int flow_acc_dem_factor /= (0.5 * 2**32) # If the ratio is less than 0.1, round up to 0.1 so factor -> 1.0 flow_acc_dem_factor = min(0.1, flow_acc_dem_factor) # Round up to next multiple of 10 just to be safe flow_acc_dem_factor = 1.0 / 10**( int(math.log10(flow_acc_dem_factor)) + 1) logging.info( ' flow_acc_dem_factor was not set in the input file\n' ' Using automatic flow_acc_dem_factor: {}'.format( flow_acc_dem_factor)) # Calc flow_acc/flow_dir # DEADBEEF - For now, set these to True only if needed # calc_flow_acc_flag = inputs_cfg.getboolean('INPUTS', 'calc_flow_acc_flag') # calc_flow_dir_flag = inputs_cfg.getboolean('INPUTS', 'calc_flow_dir_flag') if calc_flow_acc_dem_flag: calc_flow_acc_flag = True calc_flow_dir_flag = True else: calc_flow_acc_flag = False calc_flow_dir_flag = False # Remap remap_ws = inputs_cfg.get('INPUTS', 'remap_folder') temp_adj_remap_name = inputs_cfg.get('INPUTS', 'temp_adj_remap') # Check input paths if not arcpy.Exists(hru.polygon_path): logging.error('\nERROR: Fishnet ({}) does not exist\n'.format( hru.polygon_path)) sys.exit() # Check that either the original DEM raster exists if not arcpy.Exists(dem_orig_path): logging.error( '\nERROR: DEM ({}) raster does not exist\n'.format(dem_orig_path)) sys.exit() # Check that remap folder is valid if not os.path.isdir(remap_ws): logging.error('\nERROR: Remap folder does not exist\n') sys.exit() # Check that remap files exist # Check remap files comment style temp_adj_remap_path = os.path.join(remap_ws, temp_adj_remap_name) remap_path_list = [temp_adj_remap_path] # remap_path_list = [aspect_remap_path, temp_adj_remap_path] for remap_path in remap_path_list: support.remap_check(remap_path) # DEADBEEF - Trying out setting SWALE points before filling model_inputs_path = inputs_cfg.get('INPUTS', 'model_points_path') try: model_points_type_field = inputs_cfg.get('INPUTS', 'model_points_type_field') except: model_points_type_field = 'TYPE' logging.info(' Missing INI parameter, setting {} = {}'.format( 'model_points_type_field', model_points_type_field)) # Check model points if not os.path.isfile(model_inputs_path): logging.error('\nERROR: Model points shapefiles does not exist' '\nERROR: {}'.format(model_inputs_path)) sys.exit() # model_points_path must be a point shapefile elif arcpy.Describe(model_inputs_path).datasetType != 'FeatureClass': logging.error('\nERROR: model_points_path must be a point shapefile') sys.exit() # DEADBEEF # if not os.path.isfile(temp_adj_remap_path): # logging.error( # '\nERROR: ASCII remap file ({}) does not exist\n'.format( # os.path.basename(temp_adj_remap_path))) # sys.exit() # Check remap files comment style # if '10.2' in arcpy.GetInstallInfo()['version']: # if remap_comment_check(temp_adj_remap_path): # logging.error( # ('\nERROR: ASCII remap file ({}) has pre-ArcGIS 10.2 ' + # 'comments\n').format(os.path.basename(temp_adj_remap_path))) # sys.exit() # Check other inputs if dem_cs <= 0: logging.error('\nERROR: DEM cellsize must be greater than 0') sys.exit() dem_proj_method_list = ['BILINEAR', 'CUBIC', 'NEAREST'] if dem_proj_method not in dem_proj_method_list: logging.error('\nERROR: DEM projection method must be: {}'.format( ', '.join(dem_proj_method_list))) sys.exit() if reset_dem_adj_flag: logging.warning( '\nWARNING: All values in {} will be overwritten'.format( hru.dem_adj_field)) raw_input(' Press ENTER to continue') # Build output folder if necessary dem_temp_ws = os.path.join(hru.param_ws, 'dem_rasters') if not os.path.isdir(dem_temp_ws): os.mkdir(dem_temp_ws) # Output paths dem_path = os.path.join(dem_temp_ws, 'dem.img') dem_fill_path = os.path.join(dem_temp_ws, 'dem_fill.img') flow_dir_path = os.path.join(dem_temp_ws, 'flow_dir.img') flow_acc_path = os.path.join(dem_temp_ws, 'flow_acc.img') flow_acc_dem_path = os.path.join(dem_temp_ws, 'flow_acc_x_dem.img') flow_acc_filter_path = os.path.join(dem_temp_ws, 'flow_acc_filter.img') dem_integer_path = os.path.join(dem_temp_ws, 'dem_integer.img') dem_slope_path = os.path.join(dem_temp_ws, 'dem_slope.img') dem_aspect_path = os.path.join(dem_temp_ws, 'dem_aspect.img') dem_aspect_reclass_path = os.path.join(dem_temp_ws, 'aspect_reclass.img') temp_adj_path = os.path.join(dem_temp_ws, 'temp_adj.img') swale_path = os.path.join(dem_temp_ws, 'swale.img') model_points_path = os.path.join(dem_temp_ws, 'model_points.shp') # Set ArcGIS environment variables arcpy.CheckOutExtension('Spatial') env.overwriteOutput = True env.pyramid = 'PYRAMIDS -1' # env.pyramid = 'PYRAMIDS 0' # env.rasterStatistics = 'NONE' # env.extent = 'MINOF' env.workspace = dem_temp_ws env.scratchWorkspace = hru.scratch_ws # DEADBEEF - Trying out setting SWALE points before filling # Read in model points shapefile logging.info('\nChecking model points shapefile') model_points_desc = arcpy.Describe(model_inputs_path) model_points_sr = model_points_desc.spatialReference logging.debug(' Points: {}'.format(model_inputs_path)) logging.debug(' Points spat. ref.: {}'.format(model_points_sr.name)) logging.debug(' Points GCS: {}'.format(model_points_sr.GCS.name)) # If model points spat_ref doesn't match hru_param spat_ref # Project model points to hru_param spat ref # Otherwise, read model points directly if hru.sr.name != model_points_sr.name: logging.info(' Model points projection does not match fishnet.\n' ' Projecting model points.\n') # Set preferred transforms transform_str = support.transform_func(hru.sr, model_points_sr) logging.debug(' Transform: {}'.format(transform_str)) arcpy.Project_management(model_inputs_path, model_points_path, hru.sr, transform_str, model_points_sr) else: arcpy.Copy_management(model_inputs_path, model_points_path) model_points_lyr = 'model_points_lyr' arcpy.MakeFeatureLayer_management(model_points_path, model_points_lyr) # Check model point types logging.info(' Checking model point types') model_point_types = [ str(r[0]).upper() for r in arcpy.da.SearchCursor( model_points_path, [model_points_type_field]) ] if not set(model_point_types).issubset(set(['OUTLET', 'SUBBASIN', 'SWALE' ])): logging.error( '\nERROR: Unsupported model point type(s) found, exiting') logging.error('\n Model point types: {}\n'.format(model_point_types)) sys.exit() elif not set(model_point_types).issubset(set(['OUTLET', 'SWALE'])): logging.error( '\nERROR: At least one model point must be an OUTLET or SWALE, ' 'exiting\n') sys.exit() else: logging.debug(' {}'.format(', '.join(model_point_types))) # Check DEM field logging.info('\nAdding DEM fields if necessary') support.add_field_func(hru.polygon_path, hru.dem_mean_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.dem_max_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.dem_min_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.dem_adj_field, 'DOUBLE') if calc_flow_acc_dem_flag: support.add_field_func(hru.polygon_path, hru.dem_flowacc_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.dem_sum_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.dem_count_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.dem_sink_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.dem_aspect_field, 'LONG') support.add_field_func(hru.polygon_path, hru.dem_slope_deg_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.dem_slope_rad_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.dem_slope_pct_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.jh_tmin_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.jh_tmax_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.jh_coef_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.snarea_thresh_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.tmax_adj_field, 'DOUBLE') support.add_field_func(hru.polygon_path, hru.tmin_adj_field, 'DOUBLE') # Check that dem_adj_copy_field exists if len(arcpy.ListFields(hru.polygon_path, dem_adj_copy_field)) == 0: logging.error('\nERROR: dem_adj_copy_field {} does not exist\n'.format( dem_adj_copy_field)) sys.exit() # Assume all DEM rasters will need to be rebuilt # Check slope, aspect, and proejcted DEM rasters # This will check for matching spat. ref., snap point, and cellsize # If DEM is GCS, project it to 10m to match # DEADBEEF - I had originally wanted the DEM to get projected only once # but if the user wants to rerun this script, then all steps should # be rerun. This also allows the user to change the DEM raster # dem_flag = valid_raster_func( # dem_path, 'projected DEM', hru, dem_cs) # if arcpy.Exists(dem_orig_path) and not dem_flag: logging.info('\nProjecting DEM raster') dem_orig_sr = arcpy.sa.Raster(dem_orig_path).spatialReference logging.debug(' DEM GCS: {}'.format(dem_orig_sr.GCS.name)) # Remove existing projected DEM if arcpy.Exists(dem_path): arcpy.Delete_management(dem_path) # Set preferred transforms transform_str = support.transform_func(hru.sr, dem_orig_sr) logging.debug(' Transform: {}'.format(transform_str)) logging.debug(' Projection method: {}'.format(dem_proj_method)) # Project DEM # DEADBEEF - Arc10.2 ProjectRaster does not honor extent logging.debug(' Input SR: {}'.format(dem_orig_sr.exportToString())) logging.debug(' Output SR: {}'.format(hru.sr.exportToString())) support.project_raster_func(dem_orig_path, dem_path, hru.sr, dem_proj_method, dem_cs, transform_str, '{} {}'.format(hru.ref_x, hru.ref_y), dem_orig_sr, hru, in_memory=False) # env.extent = hru.extent # arcpy.ProjectRaster_management( # dem_orig_path, dem_path, hru.sr, # dem_proj_method, dem_cs, transform_str, # '{} {}'.format(hru.ref_x, hru.ref_y), # dem_orig_sr) # arcpy.ClearEnvironment('extent') # Check linear unit of raster # DEADBEEF - The conversion could probably be dynamic dem_obj = arcpy.sa.Raster(dem_path) linear_unit_list = ['METERS', 'METER', 'FOOT_US', 'FOOT'] linear_unit = dem_obj.spatialReference.linearUnitName.upper() if linear_unit not in linear_unit_list: logging.error( '\nERROR: The linear unit of the projected/clipped DEM must' ' be meters or feet\n {}'.format(linear_unit)) sys.exit() del dem_obj # DEADBEEF - Trying out setting SWALE points before filling hru_polygon_lyr = 'hru_polygon_lyr' arcpy.MakeFeatureLayer_management(hru.polygon_path, hru_polygon_lyr) arcpy.SelectLayerByAttribute_management(hru_polygon_lyr, 'CLEAR_SELECTION') arcpy.CalculateField_management(hru_polygon_lyr, hru.outflow_field, 0, 'PYTHON') if 'SWALE' in model_point_types: logging.info(' Building SWALE point raster') arcpy.SelectLayerByAttribute_management(model_points_lyr, 'NEW_SELECTION', '"TYPE" = \'SWALE\'') # DEADBEEF - Should SWALE points be written to OUTFLOWHRU.TXT? arcpy.SelectLayerByLocation_management(hru_polygon_lyr, 'INTERSECT', model_points_lyr) arcpy.CalculateField_management(hru_polygon_lyr, hru.outflow_field, 1, 'PYTHON') arcpy.PointToRaster_conversion(model_points_lyr, model_points_type_field, swale_path, "", "", hru.cs) swale_obj = arcpy.sa.Raster(swale_path) arcpy.SelectLayerByAttribute_management(model_points_lyr, 'CLEAR_SELECTION') dem_obj = arcpy.sa.Raster(dem_path) if 'SWALE' in model_point_types: logging.debug(' Setting DEM_ADJ values to NoData for SWALE cells') dem_obj = arcpy.sa.Con(arcpy.sa.IsNull(swale_obj), dem_obj) # Calculate filled DEM, flow_dir, & flow_acc logging.info('\nCalculating filled DEM raster') dem_fill_obj = arcpy.sa.Fill(dem_obj) dem_fill_obj.save(dem_fill_path) del dem_fill_obj # # Calculate filled DEM, flow_dir, & flow_acc # logging.info('\nCalculating filled DEM raster') # dem_fill_obj = arcpy.sa.Fill(dem_obj) # dem_fill_obj.save(dem_fill_path) # del dem_fill_obj if calc_flow_dir_flag: logging.info('Calculating flow direction raster') dem_fill_obj = arcpy.sa.Raster(dem_fill_path) flow_dir_obj = arcpy.sa.FlowDirection(dem_fill_obj, True) flow_dir_obj.save(flow_dir_path) del flow_dir_obj, dem_fill_obj if calc_flow_acc_flag: logging.info('Calculating flow accumulation raster') flow_dir_obj = arcpy.sa.Raster(flow_dir_path) flow_acc_obj = arcpy.sa.FlowAccumulation(flow_dir_obj) flow_acc_obj.save(flow_acc_path) del flow_acc_obj, flow_dir_obj if calc_flow_acc_dem_flag: # flow_acc_dem_obj = dem_fill_obj * flow_acc_obj # Low pass filter of flow_acc then take log10 flow_acc_filter_obj = arcpy.sa.Filter(arcpy.sa.Raster(flow_acc_path), 'LOW', 'NODATA') flow_acc_filter_obj *= flow_acc_dem_factor flow_acc_filter_obj.save(flow_acc_filter_path) flow_acc_dem_obj = arcpy.sa.Raster(dem_fill_path) * flow_acc_filter_obj flow_acc_dem_obj.save(flow_acc_dem_path) del flow_acc_dem_obj, flow_acc_filter_obj # Calculate an integer version of DEM for median zonal stats dem_integer_obj = arcpy.sa.Int(arcpy.sa.Raster(dem_path) * 100) dem_integer_obj.save(dem_integer_path) del dem_integer_obj # Calculate slope logging.info('Calculating slope raster') dem_slope_obj = arcpy.sa.Slope(dem_fill_path, 'DEGREE') # Setting small slopes to zero logging.info(' Setting slopes <= 0.01 to 0') dem_slope_obj = arcpy.sa.Con(dem_slope_obj <= 0.01, 0, dem_slope_obj) dem_slope_obj.save(dem_slope_path) del dem_slope_obj # Calculate aspect logging.info('Calculating aspect raster') dem_aspect_obj = arcpy.sa.Int(arcpy.sa.Aspect(dem_fill_path)) # Set small slopes to -1 aspect logging.debug(' Setting aspect for slopes <= 0.01 to -1') dem_aspect_obj = arcpy.sa.Con( arcpy.sa.Raster(dem_slope_path) > 0.01, dem_aspect_obj, -1) dem_aspect_obj.save(dem_aspect_path) del dem_aspect_obj # Temperature Aspect Adjustment logging.info('Calculating temperature aspect adjustment raster') temp_adj_obj = arcpy.sa.Float( arcpy.sa.ReclassByASCIIFile(dem_aspect_path, temp_adj_remap_path)) # temp_adj_obj = arcpy.sa.Float(arcpy.sa.ReclassByASCIIFile( # dem_aspect_reclass_path, temp_adj_remap_path)) # Since reclass can't remap to floats directly # Values are scaled by 10 and stored as integers temp_adj_obj *= 0.1 temp_adj_obj.save(temp_adj_path) del temp_adj_obj # List of rasters, fields, and stats for zonal statistics zs_dem_dict = dict() zs_dem_dict[hru.dem_mean_field] = [dem_path, 'MEAN'] if calc_flow_acc_dem_flag: zs_dem_dict[hru.dem_sum_field] = [flow_acc_dem_path, 'SUM'] zs_dem_dict[hru.dem_count_field] = [flow_acc_filter_path, 'SUM'] zs_dem_dict[hru.dem_max_field] = [dem_path, 'MAXIMUM'] zs_dem_dict[hru.dem_min_field] = [dem_path, 'MINIMUM'] zs_dem_dict[hru.dem_aspect_field] = [dem_aspect_path, 'MEAN'] zs_dem_dict[hru.dem_slope_deg_field] = [dem_slope_path, 'MEAN'] zs_dem_dict[hru.tmax_adj_field] = [temp_adj_path, 'MEAN'] zs_dem_dict[hru.tmin_adj_field] = [temp_adj_path, 'MEAN'] # Calculate DEM zonal statistics logging.info('\nCalculating DEM zonal statistics') support.zonal_stats_func(zs_dem_dict, hru.polygon_path, hru.point_path, hru) # Flow accumulation weighted elevation if calc_flow_acc_dem_flag: logging.info('Calculating {}'.format(hru.dem_flowacc_field)) hru_polygon_layer = 'hru_polygon_layer' arcpy.MakeFeatureLayer_management(hru.polygon_path, hru_polygon_layer) arcpy.SelectLayerByAttribute_management( hru_polygon_layer, "NEW_SELECTION", '"{}" > 0'.format(hru.dem_count_field)) arcpy.CalculateField_management( hru_polygon_layer, hru.dem_flowacc_field, 'float(!{}!) / !{}!'.format(hru.dem_sum_field, hru.dem_count_field), 'PYTHON') # Clear dem_flowacc for any cells that have zero sum or count arcpy.SelectLayerByAttribute_management( hru_polygon_layer, "NEW_SELECTION", '("{}" = 0) OR ("{}" = 0)'.format(hru.dem_count_field, hru.dem_sum_field)) arcpy.CalculateField_management(hru_polygon_layer, hru.dem_flowacc_field, 0, 'PYTHON') arcpy.Delete_management(hru_polygon_layer) # Fill DEM_ADJ if it is not set if all([ row[0] == 0 for row in arcpy.da.SearchCursor( hru.polygon_path, [hru.dem_adj_field]) ]): logging.info('Filling {} from {}'.format(hru.dem_adj_field, dem_adj_copy_field)) arcpy.CalculateField_management( hru.polygon_path, hru.dem_adj_field, 'float(!{}!)'.format(dem_adj_copy_field), 'PYTHON') elif reset_dem_adj_flag: logging.info('Filling {} from {}'.format(hru.dem_adj_field, dem_adj_copy_field)) arcpy.CalculateField_management( hru.polygon_path, hru.dem_adj_field, 'float(!{}!)'.format(dem_adj_copy_field), 'PYTHON') else: logging.info('{} appears to already have been set and ' 'will not be overwritten'.format(hru.dem_adj_field)) # HRU_SLOPE in radians logging.info('Calculating {} (Slope in Radians)'.format( hru.dem_slope_rad_field)) arcpy.CalculateField_management( hru.polygon_path, hru.dem_slope_rad_field, 'math.pi * !{}! / 180'.format(hru.dem_slope_deg_field), 'PYTHON') # HRU_SLOPE in percent logging.info('Calculating {} (Percent Slope)'.format( hru.dem_slope_pct_field)) arcpy.CalculateField_management( hru.polygon_path, hru.dem_slope_pct_field, 'math.tan(!{}!)'.format(hru.dem_slope_rad_field), 'PYTHON') # Jensen-Haise Potential ET air temperature coefficient logging.info('Calculating JH_COEF_HRU') # First check if PRISM TMAX/TMIN have been set # If max July value is 0, use default values if (calc_prism_jh_coef_flag and (len(arcpy.ListFields(hru.polygon_path, 'TMAX_07')) == 0 or support.field_stat_func(hru.polygon_path, 'TMAX_07', 'MAXIMUM') == 0)): calc_prism_jh_coef_flag = False # Use PRISM temperature values if calc_prism_jh_coef_flag: logging.info(' Using PRISM temperature values') tmax_field_list = ['!TMAX_{:02d}!'.format(m) for m in range(1, 13)] tmin_field_list = ['!TMIN_{:02d}!'.format(m) for m in range(1, 13)] tmax_expr = 'max([{}])'.format(','.join(tmax_field_list)) arcpy.CalculateField_management(hru.polygon_path, hru.jh_tmax_field, tmax_expr, 'PYTHON') # Get TMIN for same month as maximum TMAX tmin_expr = 'max(zip([{}],[{}]))[1]'.format(','.join(tmax_field_list), ','.join(tmin_field_list)) arcpy.CalculateField_management(hru.polygon_path, hru.jh_tmin_field, tmin_expr, 'PYTHON') # Use default temperature values else: logging.info(' setting temperature values (7 & 25)') arcpy.CalculateField_management(hru.polygon_path, hru.jh_tmax_field, 25, 'PYTHON') arcpy.CalculateField_management(hru.polygon_path, hru.jh_tmin_field, 7, 'PYTHON') # Pass unit scalar to convert DEM_ADJ to feet if necessary support.jensen_haise_func(hru.polygon_path, hru.jh_coef_field, hru.dem_adj_field, hru.jh_tmin_field, hru.jh_tmax_field, dem_unit_scalar) # SNAREA_THRESH # Convert DEM_ADJ to feet if necessary logging.info('Calculating {}'.format(hru.snarea_thresh_field)) elev_min = support.field_stat_func(hru.polygon_path, hru.dem_adj_field, 'MINIMUM') arcpy.CalculateField_management( hru.polygon_path, hru.snarea_thresh_field, '(!{}! - {}) * 0.005'.format(hru.dem_adj_field, elev_min * dem_unit_scalar), 'PYTHON') # Clear slope/aspect values for lake cells (HRU_TYPE == 2) # Also clear for ocean cells (HRU_TYPE == 0 and DEM_ADJ == 0) if True: logging.info('\nClearing slope/aspect parameters for lake cells') hru_polygon_layer = "hru_polygon_layer" arcpy.MakeFeatureLayer_management(hru.polygon_path, hru_polygon_layer) arcpy.SelectLayerByAttribute_management( hru_polygon_layer, "NEW_SELECTION", '"{0}" = 2 OR ("{0}" = 0 AND "{1}" = 0)'.format( hru.type_field, hru.dem_adj_field)) arcpy.CalculateField_management(hru_polygon_layer, hru.dem_aspect_field, 0, 'PYTHON') arcpy.CalculateField_management(hru_polygon_layer, hru.dem_slope_deg_field, 0, 'PYTHON') arcpy.CalculateField_management(hru_polygon_layer, hru.dem_slope_rad_field, 0, 'PYTHON') arcpy.CalculateField_management(hru_polygon_layer, hru.dem_slope_pct_field, 0, 'PYTHON') # arcpy.CalculateField_management( # hru_polygon_layer, hru.deplcrv_field, 0, 'PYTHON') # arcpy.CalculateField_management( # hru_polygon_layer, hru.snarea_field, 0, 'PYTHON') # arcpy.CalculateField_management( # hru_polygon_layer, hru.tmax_adj_field, 0, 'PYTHON') # arcpy.CalculateField_management( # hru_polygon_layer, hru.tmin_adj_field, 0, 'PYTHON') # Should JH coefficients be cleared for lakes? # logging.info('\nClearing JH parameters for ocean cells') arcpy.SelectLayerByAttribute_management( hru_polygon_layer, "NEW_SELECTION", '"{}" = 0 AND "{}" = 0'.format(hru.type_field, hru.dem_adj_field)) arcpy.CalculateField_management(hru_polygon_layer, hru.jh_coef_field, 0, 'PYTHON') arcpy.CalculateField_management(hru_polygon_layer, hru.jh_tmax_field, 0, 'PYTHON') arcpy.CalculateField_management(hru_polygon_layer, hru.jh_tmin_field, 0, 'PYTHON') arcpy.Delete_management(hru_polygon_layer) del hru_polygon_layer
def temp_adjust_parameters(config_path): """Calculate GSFLOW Temperature Adjustment Parameters Parameters ---------- config_path : str Project configuration file (.ini) path. Returns ------- None """ # Hardcoded HRU field formats for now tmax_field_fmt = 'TMAX_{:02d}' tmin_field_fmt = 'TMIN_{:02d}' tmax_adj_field_fmt = 'TMX_ADJ_{:02d}' tmin_adj_field_fmt = 'TMN_ADJ_{:02d}' # Initialize hru_parameters class hru = support.HRUParameters(config_path) # Open input parameter config file inputs_cfg = ConfigParser.ConfigParser() try: inputs_cfg.readfp(open(config_path)) except Exception as e: logging.error('\nERROR: Config file could not be read, ' 'is not an input file, or does not exist\n' ' config_file = {}\n' ' Exception: {}\n'.format(config_path, e)) sys.exit() # Log DEBUG to file log_file_name = 'temp_adjust_parameters_log.txt' log_console = logging.FileHandler(filename=os.path.join( hru.log_ws, log_file_name), mode='w') log_console.setLevel(logging.DEBUG) log_console.setFormatter(logging.Formatter('%(message)s')) logging.getLogger('').addHandler(log_console) logging.info('\nGSFLOW Temperature Adjustment Parameters') # Units temp_obs_units = support.get_param('temp_obs_units', 'C', inputs_cfg).upper() temp_units_list = ['C', 'F', 'K'] # Compare against the upper case of the values in the list # but don't modify the acceptable units list if temp_obs_units not in temp_units_list: logging.error('\nERROR: Invalid observed temperature units ({})\n ' 'Valid units are: {}'.format(temp_obs_units, ', '.join(temp_units_list))) sys.exit() # Check input paths if not arcpy.Exists(hru.polygon_path): logging.error('\nERROR: Fishnet ({}) does not exist'.format( hru.polygon_path)) sys.exit() # Temperature calculation method try: temp_calc_method = inputs_cfg.get('INPUTS', 'temperature_calc_method').upper() except: temp_calc_method = '1STA' logging.info(' Defaulting temperature_calc_method = {}'.format( temp_calc_method)) temp_calc_options = ['ZONES', '1STA', 'LAPSE'] if temp_calc_method not in temp_calc_options: logging.error( '\nERROR: Invalid temperature calculation method ({})\n ' 'Valid methods are: {}'.format(temp_calc_method, ', '.join(temp_calc_options))) sys.exit() if temp_calc_method == 'LAPSE': logging.warning( '\nWARNING: If temperature calculation set to LAPSE,' '\n it is not necessary to run the temp_adjust_parameters.py' '\n Exiting') return False if temp_calc_method == 'ZONES': temp_zone_orig_path = inputs_cfg.get('INPUTS', 'temp_zone_path') try: temp_zone_id_field = inputs_cfg.get('INPUTS', 'temp_zone_id_field') except: logging.error( '\nERROR: temp_zone_id_field must be set in INI to apply ' 'zone specific temperature adjustments\n') sys.exit() try: temp_hru_id_field = inputs_cfg.get('INPUTS', 'temp_hru_id_field') except: temp_hru_id_field = None logging.warning( ' temp_hru_id_field was not set in the INI file\n' ' Temperature adjustments will not be changed to match station ' 'values') # Field name for TSTA hard coded, but could be changed to be read from # config file like temp_zone hru_tsta_field = 'HRU_TSTA' try: tmax_obs_field_fmt = inputs_cfg.get('INPUTS', 'tmax_obs_field_format') except: tmax_obs_field_fmt = 'TMAX_{:02d}' logging.info(' Defaulting tmax_obs_field_format = {}'.format( tmax_obs_field_fmt)) try: tmin_obs_field_fmt = inputs_cfg.get('INPUTS', 'temp_obs_field_format') except: tmin_obs_field_fmt = 'TMIN_{:02d}' logging.info(' Defaulting tmin_obs_field_format = {}'.format( tmin_obs_field_fmt)) if not arcpy.Exists(temp_zone_orig_path): logging.error( '\nERROR: Temperature Zone ({}) does not exist'.format( temp_zone_orig_path)) sys.exit() # temp_zone_path must be a polygon shapefile if arcpy.Describe(temp_zone_orig_path).datasetType != 'FeatureClass': logging.error( '\nERROR: temp_zone_path must be a polygon shapefile') sys.exit() # Check temp_zone_id_field if temp_zone_id_field.upper() in ['FID', 'OID']: temp_zone_id_field = arcpy.Describe( temp_zone_orig_path).OIDFieldName logging.warning('\n NOTE: Using {} to set {}\n'.format( temp_zone_id_field, hru.temp_zone_id_field)) elif not arcpy.ListFields(temp_zone_orig_path, temp_zone_id_field): logging.error( '\nERROR: temp_zone_id_field field {} does not exist\n'.format( temp_zone_id_field)) sys.exit() # Need to check that field is an int type # Should we only check active cells (HRU_TYPE > 0)? elif not [ f.type for f in arcpy.Describe(temp_zone_orig_path).fields if (f.name == temp_zone_id_field and f.type in ['SmallInteger', 'Integer']) ]: logging.error( '\nERROR: temp_zone_id_field field {} must be an integer type\n' .format(temp_zone_id_field)) sys.exit() # Need to check that field values are all positive # Should we only check active cells (HRU_TYPE > 0)? elif min([ row[0] for row in arcpy.da.SearchCursor( temp_zone_orig_path, [temp_zone_id_field]) ]) <= 0: logging.error( '\nERROR: temp_zone_id_field values cannot be negative\n') sys.exit() # Check hru_tsta_field if not arcpy.ListFields(temp_zone_orig_path, hru_tsta_field): logging.error( '\nERROR: hru_tsta_field field {} does not exist\n'.format( hru_tsta_field)) sys.exit() # Need to check that field is an int type # Only check active cells (HRU_TYPE >0)?! elif not [ f.type for f in arcpy.Describe(temp_zone_orig_path).fields if (f.name == hru_tsta_field and f.type in ['SmallInteger', 'Integer']) ]: logging.error( '\nERROR: hru_tsta_field field {} must be an integer type\n'. format(hru_tsta_field)) sys.exit() # Need to check that field values are all positive # Only check active cells (HRU_TYPE >0)?! elif min([ row[0] for row in arcpy.da.SearchCursor( temp_zone_orig_path, [hru_tsta_field]) ]) <= 0: logging.error( '\nERROR: hru_tsta_field values cannot be negative\n') sys.exit() # Check temp_hru_id_field # temp_hru_id values are checked later if temp_hru_id_field is not None: if not arcpy.ListFields(temp_zone_orig_path, temp_hru_id_field): logging.error( '\nERROR: temp_hru_id_field field {} does not exist\n'. format(temp_hru_id_field)) sys.exit() # Need to check that field is an int type elif not [ f.type for f in arcpy.Describe(temp_zone_orig_path).fields if (f.name == temp_hru_id_field and f.type in ['SmallInteger', 'Integer']) ]: logging.error( '\nERROR: temp_hru_id_field field {} must be an integer type\n' .format(temp_hru_id_field)) sys.exit() # Need to check that field values are not negative (0 is okay) elif min([ row[0] for row in arcpy.da.SearchCursor( temp_zone_orig_path, [temp_hru_id_field]) ]) < 0: logging.error( '\nERROR: temp_hru_id_field values cannot be negative\n') sys.exit() elif temp_calc_method == '1STA': # If a zone shapefile is not used, temperature must be set manually tmax_obs_list = inputs_cfg.get('INPUTS', 'tmax_obs_list') tmin_obs_list = inputs_cfg.get('INPUTS', 'tmin_obs_list') # Check that values are floats try: tmax_obs_list = map(float, tmax_obs_list.split(',')) except ValueError: logging.error('\nERROR: tmax_obs_list (mean monthly tmax) ' 'values could not be parsed as floats\n') sys.exit() try: tmin_obs_list = map(float, tmin_obs_list.split(',')) except ValueError: logging.error('\nERROR: tmin_obs_list (mean monthly tmin) ' 'values could not be parsed as floats\n') sys.exit() # Check that there are 12 values if len(tmax_obs_list) != 12: logging.error('\nERROR: There must be exactly 12 mean monthly ' 'observed tmax values based to tmax_obs_list\n') sys.exit() logging.info( ' Observed mean monthly tmax ({}):\n {}\n' ' (Script will assume these are listed in month order, ' 'i.e. Jan, Feb, ...)'.format(temp_obs_units, ', '.join(map(str, tmax_obs_list)))) if len(tmin_obs_list) != 12: logging.error('\nERROR: There must be exactly 12 mean monthly ' 'observed tmin values based to tmin_obs_list\n') sys.exit() logging.info( ' Observed mean monthly tmin ({}):\n {}\n' ' (Script will assume these are listed in month order, ' 'i.e. Jan, Feb, ...)'.format(temp_obs_units, ', '.join(map(str, tmin_obs_list)))) # Check if all the values are 0 if tmax_obs_list == ([0.0] * 12): logging.error( '\nERROR: The observed tmax values are all 0.\n' ' To compute tmax adjustments, please set the tmax_obs_list ' 'parameter in the INI with\n observed mean monthly tmax ' 'values (i.e. from a weather station)') sys.exit() if tmin_obs_list == ([0.0] * 12): logging.error( '\nERROR: The observed tmin values are all 0.\n' ' To compute tmin adjustments, please set the tmin_obs_list ' 'parameter in the INI with\n observed mean monthly tmin ' 'values (i.e. from a weather station)') sys.exit() # Get the temperature HRU ID try: temp_hru_id = inputs_cfg.getint('INPUTS', 'temp_hru_id') except: temp_hru_id = 0 # Check that the temp_hru_id is a valid cell hru_id # If temp_hru_id is 0, temperature adjustments will not be adjusted if temp_hru_id > 0: # Check that HRU_ID is valid logging.info(' Temperature HRU_ID: {}'.format(temp_hru_id)) arcpy.MakeTableView_management( hru.polygon_path, "layer", "{} = {}".format(hru.id_field, temp_hru_id)) if (temp_hru_id != 0 and int( arcpy.GetCount_management("layer").getOutput(0)) == 0): logging.error( '\nERROR: temp_hru_id {0} is not a valid cell hru_id' '\nERROR: temp adjustments will NOT be forced to 1' ' at cell {0}\n'.format(temp_hru_id)) temp_hru_id = 0 arcpy.Delete_management("layer") else: logging.info( ' Temperatures adjustments will not be adjusted to match ' 'station values\n (temp_hru_id = 0)') # Could add a second check that HRU_TSTA has values >0 # Build output folders if necessary temp_adj_temp_ws = os.path.join(hru.param_ws, 'temp_adjust') if not os.path.isdir(temp_adj_temp_ws): os.mkdir(temp_adj_temp_ws) temp_zone_path = os.path.join(temp_adj_temp_ws, 'temp_zone.shp') # temp_zone_clip_path = os.path.join(temp_adj_temp_ws, 'temp_zone_clip.shp') # Set ArcGIS environment variables arcpy.CheckOutExtension('Spatial') env.overwriteOutput = True # env.pyramid = 'PYRAMIDS -1' env.pyramid = 'PYRAMIDS 0' env.workspace = hru.param_ws env.scratchWorkspace = hru.scratch_ws # Set month list based on flags month_list = range(1, 13) tmax_field_list = [tmax_field_fmt.format(m) for m in month_list] tmin_field_list = [tmin_field_fmt.format(m) for m in month_list] tmax_adj_field_list = [tmax_adj_field_fmt.format(m) for m in month_list] tmin_adj_field_list = [tmin_adj_field_fmt.format(m) for m in month_list] # Check fields logging.info('\nAdding temperature adjust fields if necessary') # Temperature zone fields support.add_field_func(hru.polygon_path, hru.temp_zone_id_field, 'LONG') support.add_field_func(hru.polygon_path, hru.hru_tsta_field, 'SHORT') # Temperature adjustment fields for tmax_adj_field in tmax_adj_field_list: support.add_field_func(hru.polygon_path, tmax_adj_field, 'DOUBLE') for tmin_adj_field in tmin_adj_field_list: support.add_field_func(hru.polygon_path, tmin_adj_field, 'DOUBLE') # Calculate temperature zone ID if temp_calc_method == 'ZONES': logging.info('\nCalculating cell HRU Temperature Zone ID') temp_zone_desc = arcpy.Describe(temp_zone_orig_path) temp_zone_sr = temp_zone_desc.spatialReference logging.debug(' Zones: {}'.format(temp_zone_orig_path)) logging.debug(' Projection: {}'.format(temp_zone_sr.name)) logging.debug(' GCS: {}'.format(temp_zone_sr.GCS.name)) # Reset temp_ZONE_ID logging.info(' Resetting {} to 0'.format(hru.temp_zone_id_field)) arcpy.CalculateField_management(hru.polygon_path, hru.temp_zone_id_field, 0, 'PYTHON') # If temp_zone spat_ref doesn't match hru_param spat_ref # Project temp_zone to hru_param spat ref # Otherwise, read temp_zone directly if hru.sr.name != temp_zone_sr.name: logging.info(' Projecting temperature zones...') # Set preferred transforms transform_str = support.transform_func(hru.sr, temp_zone_sr) logging.debug(' Transform: {}'.format(transform_str)) # Project temp_zone shapefile arcpy.Project_management(temp_zone_orig_path, temp_zone_path, hru.sr, transform_str, temp_zone_sr) del transform_str else: arcpy.Copy_management(temp_zone_orig_path, temp_zone_path) # # Remove all unnecessary fields # for field in arcpy.ListFields(temp_zone_path): # skip_field_list = temp_obs_field_list + [temp_zone_id_field, 'Shape'] # if field.name not in skip_field_list: # try: # arcpy.DeleteField_management(temp_zone_path, field.name) # except: # pass # Set temperature zone ID logging.info(' Setting {}'.format(hru.temp_zone_id_field)) support.zone_by_centroid_func(temp_zone_path, hru.temp_zone_id_field, temp_zone_id_field, hru.polygon_path, hru.point_path, hru) # support.zone_by_area_func( # temp_zone_layer, hru.temp_zone_id_field, temp_zone_id_field, # hru.polygon_path, hru, hru_area_field, None, 50) # Set HRU_TSTA logging.info(' Setting {}'.format(hru.hru_tsta_field)) support.zone_by_centroid_func(temp_zone_path, hru.hru_tsta_field, hru_tsta_field, hru.polygon_path, hru.point_path, hru) del temp_zone_desc, temp_zone_sr elif temp_calc_method == '1STA': # Set all cells to zone 1 arcpy.CalculateField_management(hru.polygon_path, hru.temp_zone_id_field, 1, 'PYTHON') # Calculate adjustments logging.info('\nCalculating mean monthly temperature adjustments') if temp_calc_method == 'ZONES': # Read mean monthly values for each zone tmax_obs_dict = dict() tmin_obs_dict = dict() tmax_obs_field_list = [ tmax_obs_field_fmt.format(m) for m in month_list ] tmin_obs_field_list = [ tmin_obs_field_fmt.format(m) for m in month_list ] tmax_fields = [temp_zone_id_field] + tmax_obs_field_list tmin_fields = [temp_zone_id_field] + tmin_obs_field_list logging.debug(' Tmax Obs. Fields: {}'.format(', '.join(tmax_fields))) logging.debug(' Tmin Obs. Fields: {}'.format(', '.join(tmax_fields))) with arcpy.da.SearchCursor(temp_zone_path, tmax_fields) as s_cursor: for row in s_cursor: tmax_obs_dict[int(row[0])] = map(float, row[1:13]) with arcpy.da.SearchCursor(temp_zone_path, tmin_fields) as s_cursor: for row in s_cursor: tmin_obs_dict[int(row[0])] = map(float, row[1:13]) # Convert values to Celsius if necessary to match PRISM if temp_obs_units == 'F': tmax_obs_dict = { z: [(t - 32) * (5.0 / 9) for t in t_list] for z, t_list in tmax_obs_dict.items() } tmin_obs_dict = { z: [(t - 32) * (5.0 / 9) for t in t_list] for z, t_list in tmin_obs_dict.items() } elif temp_obs_units == 'K': tmax_obs_dict = { z: [(t - 273.15) for t in t_list] for z, t_list in tmax_obs_dict.items() } tmin_obs_dict = { z: [(t - 273.15) for t in t_list] for z, t_list in tmin_obs_dict.items() } tmax_zone_list = sorted(tmax_obs_dict.keys()) tmin_zone_list = sorted(tmin_obs_dict.keys()) logging.debug(' Tmax Zones: {}'.format(tmax_zone_list)) logging.debug(' Tmin Zones: {}'.format(tmin_zone_list)) # Print the observed temperature values logging.debug(' Observed Tmax') for zone, tmax_obs in tmax_obs_dict.items(): logging.debug(' {}: {}'.format( zone, ', '.join(['{:.2f}'.format(x) for x in tmax_obs]))) logging.debug(' Observed Tmin') for zone, tmin_obs in tmin_obs_dict.items(): logging.debug(' {}: {}'.format( zone, ', '.join(['{:.2f}'.format(x) for x in tmin_obs]))) # Default all zones to an adjustment of 0 tmax_adj_dict = {z: [0] * 12 for z in tmax_zone_list} tmin_adj_dict = {z: [0] * 12 for z in tmin_zone_list} # Get list of HRU_IDs for each zone fields = [hru.temp_zone_id_field, hru.id_field] zone_hru_id_dict = defaultdict(list) with arcpy.da.SearchCursor(hru.polygon_path, fields) as s_cursor: for row in s_cursor: zone_hru_id_dict[int(row[0])].append(int(row[1])) # Check that TEMP_HRU_IDs are in the correct zone # Default all temperature zone HRU IDs to 0 temp_hru_id_dict = {z: 0 for z in tmax_zone_list} if temp_hru_id_field is not None: fields = [temp_zone_id_field, temp_hru_id_field] logging.debug( ' Temp Zone ID field: {}'.format(temp_zone_id_field)) logging.debug(' Temp HRU ID field: {}'.format(temp_hru_id_field)) with arcpy.da.SearchCursor(temp_zone_path, fields) as s_cursor: for row in s_cursor: temp_zone = int(row[0]) hru_id = int(row[1]) if hru_id == 0 or hru_id in zone_hru_id_dict[temp_zone]: temp_hru_id_dict[temp_zone] = hru_id logging.debug(' {}: {}'.format(temp_zone, hru_id)) else: logging.error( '\nERROR: HRU_ID {} is not in temperature ZONE {}'. format(hru_id, temp_hru_id_dict[temp_zone])) sys.exit() # Get gridded tmax values for each TEMP_HRU_ID fields = [hru.temp_zone_id_field, hru.id_field] + tmax_field_list with arcpy.da.SearchCursor(hru.polygon_path, fields) as s_cursor: for row in s_cursor: temp_zone = int(row[0]) hru_id = int(row[1]) if hru_id == 0: pass elif hru_id in temp_hru_id_dict.values(): tmax_gridded_list = map(float, row[2:14]) tmax_obs_list = tmax_obs_dict[temp_zone] tmax_adj_list = [ float(o) - t for o, t in zip(tmax_obs_list, tmax_gridded_list) ] tmax_adj_dict[temp_zone] = tmax_adj_list # Get gridded tmin values for each TEMP_HRU_ID fields = [hru.temp_zone_id_field, hru.id_field] + tmin_field_list with arcpy.da.SearchCursor(hru.polygon_path, fields) as s_cursor: for row in s_cursor: temp_zone = int(row[0]) hru_id = int(row[1]) if hru_id == 0: pass elif hru_id in temp_hru_id_dict.values(): tmin_gridded_list = map(float, row[2:14]) tmin_obs_list = tmin_obs_dict[temp_zone] tmin_adj_list = [ float(o) - t for o, t in zip(tmin_obs_list, tmin_gridded_list) ] tmin_adj_dict[temp_zone] = tmin_adj_list del temp_hru_id_dict, zone_hru_id_dict, fields logging.debug(' Tmax Adjustment Factors:') for k, v in tmax_adj_dict.items(): logging.debug(' {}: {}'.format( k, ', '.join(['{:.3f}'.format(x) for x in v]))) logging.debug(' Tmin Adjustment Factors:') for k, v in tmin_adj_dict.items(): logging.debug(' {}: {}'.format( k, ', '.join(['{:.3f}'.format(x) for x in v]))) logging.debug('\nWriting adjustment values to hru_params') fields = [hru.temp_zone_id_field] fields.extend(tmax_field_list + tmax_adj_field_list) fields.extend(tmin_field_list + tmin_adj_field_list) with arcpy.da.UpdateCursor(hru.polygon_path, fields) as u_cursor: for row in u_cursor: zone = int(row[0]) for i, month in enumerate(month_list): tmax_i = fields.index(tmax_field_fmt.format(month)) tmax_adj_i = fields.index(tmax_adj_field_fmt.format(month)) row[tmax_adj_i] = (row[tmax_i] - tmax_obs_dict[zone][i] + tmax_adj_dict[zone][i]) tmin_i = fields.index(tmin_field_fmt.format(month)) tmin_adj_i = fields.index(tmin_adj_field_fmt.format(month)) row[tmin_adj_i] = (row[tmin_i] - tmin_obs_dict[zone][i] + tmin_adj_dict[zone][i]) u_cursor.updateRow(row) del row elif temp_calc_method == '1STA': # Get gridded temperature at temp_HRU_ID tmax_fields = [hru.id_field] + tmax_field_list tmin_fields = [hru.id_field] + tmin_field_list logging.debug(' Tmax Fields: {}'.format(', '.join(tmax_field_list))) logging.debug(' Tmin Fields: {}'.format(', '.join(tmin_field_list))) # Convert values to Celsius if necessary to match PRISM if temp_obs_units == 'F': tmax_obs_list = [(t - 32) * (5.0 / 9) for t in tmax_obs_list] tmin_obs_list = [(t - 32) * (5.0 / 9) for t in tmin_obs_list] elif temp_obs_units == 'K': tmax_obs_list = [t - 273.15 for t in tmax_obs_list] tmin_obs_list = [t - 273.15 for t in tmin_obs_list] if temp_obs_units != 'C': logging.info('\nConverted Mean Monthly Tmax ({}):\n {}'.format( temp_obs_units, ', '.join(map(str, tmax_obs_list)))) logging.info('Converted Mean Monthly Tmin ({}):\n {}'.format( temp_obs_units, ', '.join(map(str, tmin_obs_list)))) # Scale all adjustments so gridded temperature will match observed # temperature at target cell if temp_hru_id != 0: tmax_gridded_list = map( float, arcpy.da.SearchCursor( hru.polygon_path, tmax_fields, '"{}" = {}'.format(hru.id_field, temp_hru_id)).next()[1:]) logging.debug(' Gridded Tmax: {}'.format(', '.join( ['{:.2f}'.format(p) for p in tmax_gridded_list]))) tmin_gridded_list = map( float, arcpy.da.SearchCursor( hru.polygon_path, tmin_fields, '"{}" = {}'.format(hru.id_field, temp_hru_id)).next()[1:]) logging.debug(' Gridded Tmin: {}'.format(', '.join( ['{:.2f}'.format(p) for p in tmin_gridded_list]))) # Difference of MEASURED or OBSERVED TEMP to GRIDDED TEMP tmax_adj_list = [ float(o) - t for o, t in zip(tmax_obs_list, tmax_gridded_list) ] logging.info(' Obs./Gridded: {}'.format(', '.join( ['{:.3f}'.format(p) for p in tmax_adj_list]))) tmin_adj_list = [ float(o) - t for o, t in zip(tmin_obs_list, tmin_gridded_list) ] logging.info(' Obs./Gridded: {}'.format(', '.join( ['{:.3f}'.format(p) for p in tmin_adj_list]))) else: tmax_adj_list = [0 for p in tmax_obs_list] tmin_adj_list = [0 for p in tmin_obs_list] # Use single mean monthly tmax for all cells # Assume tmax_obs_list is in month order fields = tmax_field_list + tmax_adj_field_list with arcpy.da.UpdateCursor(hru.polygon_path, fields) as u_cursor: for row in u_cursor: for i, month in enumerate(month_list): tmax_i = fields.index(tmax_field_fmt.format(month)) tmax_adj_i = fields.index(tmax_adj_field_fmt.format(month)) row[tmax_adj_i] = (row[tmax_i] - tmax_obs_list[i] + tmax_adj_list[i]) u_cursor.updateRow(row) del row # Use single mean monthly tmax for all cells # Assume tmax_obs_list is in month order fields = tmin_field_list + tmin_adj_field_list with arcpy.da.UpdateCursor(hru.polygon_path, fields) as u_cursor: for row in u_cursor: for i, month in enumerate(month_list): tmin_i = fields.index(tmin_field_fmt.format(month)) tmin_adj_i = fields.index(tmin_adj_field_fmt.format(month)) row[tmin_adj_i] = (row[tmin_i] - tmin_obs_list[i] + tmin_adj_list[i]) u_cursor.updateRow(row) del row
def ppt_ratio_parameters(config_path, overwrite_flag=False, debug_flag=False): """Calculate GSFLOW PPT Ratio Parameters Args: config_file (str): Project config file path ovewrite_flag (bool): if True, overwrite existing files debug_flag (bool): if True, enable debug level logging Returns: None """ # Hardcoded HRU field formats for now ppt_field_format = 'PPT_{:02d}' ratio_field_format = 'PPT_RT_{:02d}' # Initialize hru_parameters class hru = support.HRUParameters(config_path) # Open input parameter config file inputs_cfg = ConfigParser.ConfigParser() try: inputs_cfg.readfp(open(config_path)) except Exception as e: logging.error('\nERROR: Config file could not be read, ' 'is not an input file, or does not exist\n' ' config_file = {}\n' ' Exception: {}\n'.format(config_path, e)) sys.exit() # Log DEBUG to file log_file_name = 'ppt_ratio_parameters_log.txt' log_console = logging.FileHandler(filename=os.path.join( hru.log_ws, log_file_name), mode='w') log_console.setLevel(logging.DEBUG) log_console.setFormatter(logging.Formatter('%(message)s')) logging.getLogger('').addHandler(log_console) logging.info('\nGSFLOW PPT Ratio Parameters') # Units ppt_obs_units = support.get_param('ppt_obs_units', 'mm', inputs_cfg).lower() ppt_units_list = ['mm', 'cm', 'm', 'in', 'ft'] # Compare against the upper case of the values in the list # but don't modify the acceptable units list if ppt_obs_units not in ppt_units_list: logging.warning( ('WARNING: Invalid PPT obs. units ({})\n ' 'Valid units are: {}').format(ppt_obs_units, ', '.join(ppt_units_list))) # Convert units while reading obs values if ppt_obs_units == 'mm': units_factor = 1 elif ppt_obs_units == 'cm': units_factor = 10 elif ppt_obs_units == 'm': units_factor = 1000 elif ppt_obs_units == 'in': units_factor = 25.4 elif ppt_obs_units == 'ft': units_factor = 304.8 else: units_factor = 1 # Check input paths if not arcpy.Exists(hru.polygon_path): logging.error('\nERROR: Fishnet ({}) does not exist'.format( hru.polygon_path)) sys.exit() # PPT Zones set_ppt_zones_flag = inputs_cfg.getboolean('INPUTS', 'set_ppt_zones_flag') if set_ppt_zones_flag: ppt_zone_orig_path = inputs_cfg.get('INPUTS', 'ppt_zone_path') try: ppt_zone_field = inputs_cfg.get('INPUTS', 'ppt_zone_field') except: logging.error( '\nERROR: ppt_zone_field must be set in INI to apply ' 'zone specific ppt ratios\n') sys.exit() try: ppt_hru_id_field = inputs_cfg.get('INPUTS', 'ppt_hru_id_field') except: ppt_hru_id_field = None logging.warning( ' ppt_hru_id_field was not set in the INI file\n' ' PPT ratios will not be adjusted to match station values'. format(ppt_zone_field, hru.ppt_zone_id_field)) # Field name for PSTA hard coded, but could be changed to be read from # config file like ppt_zone hru_psta_field = 'HRU_PSTA' try: ppt_obs_field_format = inputs_cfg.get('INPUTS', 'ppt_obs_field_format') except: ppt_obs_field_format = 'PPT_{:02d}' logging.info(' Defaulting ppt_obs_field_format = {}'.format( ppt_obs_field_format)) if not arcpy.Exists(ppt_zone_orig_path): logging.error('\nERROR: PPT Zone ({}) does not exist'.format( ppt_zone_orig_path)) sys.exit() # ppt_zone_path must be a polygon shapefile if arcpy.Describe(ppt_zone_orig_path).datasetType != 'FeatureClass': logging.error('\nERROR: ppt_zone_path must be a polygon shapefile') sys.exit() # Check ppt_zone_field if ppt_zone_field.upper() in ['FID', 'OID']: ppt_zone_field = arcpy.Describe(ppt_zone_orig_path).OIDFieldName logging.warning('\n NOTE: Using {} to set {}\n'.format( ppt_zone_field, hru.ppt_zone_id_field)) elif not arcpy.ListFields(ppt_zone_orig_path, ppt_zone_field): logging.error( '\nERROR: ppt_zone_field field {} does not exist\n'.format( ppt_zone_field)) sys.exit() # Need to check that field is an int type # Only check active cells (HRU_TYPE >0)?! elif not [ f.type for f in arcpy.Describe(ppt_zone_orig_path).fields if (f.name == ppt_zone_field and f.type in ['SmallInteger', 'Integer']) ]: logging.error( '\nERROR: ppt_zone_field field {} must be an integer type\n'. format(ppt_zone_field)) sys.exit() # Need to check that field values are all positive # Only check active cells (HRU_TYPE >0)?! elif min([ row[0] for row in arcpy.da.SearchCursor( ppt_zone_orig_path, [ppt_zone_field]) ]) <= 0: logging.error( '\nERROR: ppt_zone_field values must be positive\n'.format( ppt_zone_field)) sys.exit() # Check hru_psta_field if not arcpy.ListFields(ppt_zone_orig_path, hru_psta_field): logging.error( '\nERROR: hru_psta_field field {} does not exist\n'.format( hru_psta_field)) sys.exit() # Need to check that field is an int type # Only check active cells (HRU_TYPE >0)?! elif not [ f.type for f in arcpy.Describe(ppt_zone_orig_path).fields if (f.name == hru_psta_field and f.type in ['SmallInteger', 'Integer']) ]: logging.error( '\nERROR: hru_psta_field field {} must be an integer type\n'. format(hru_psta_field)) sys.exit() # Need to check that field values are all positive # Only check active cells (HRU_TYPE >0)?! elif min([ row[0] for row in arcpy.da.SearchCursor( ppt_zone_orig_path, [hru_psta_field]) ]) <= 0: logging.error( '\nERROR: hru_psta_field values must be positive\n'.format( hru_psta_field)) sys.exit() # Check ppt_hru_id_field # ppt_hru_id values are checked later if ppt_hru_id_field is not None: if not arcpy.ListFields(ppt_zone_orig_path, ppt_hru_id_field): logging.error( '\nERROR: ppt_hru_id_field field {} does not exist\n'. format(ppt_hru_id_field)) sys.exit() # Need to check that field is an int type elif not [ f.type for f in arcpy.Describe(ppt_zone_orig_path).fields if (f.name == ppt_hru_id_field and f.type in ['SmallInteger', 'Integer']) ]: logging.error( '\nERROR: ppt_hru_id_field field {} must be an integer type\n' .format(ppt_hru_id_field)) sys.exit() # Need to check that field values are not negative (0 is okay) elif min([ row[0] for row in arcpy.da.SearchCursor( ppt_zone_orig_path, [ppt_hru_id_field]) ]) < 0: logging.error( '\nERROR: ppt_hru_id_field values cannot be negative\n'. format(ppt_hru_id_field)) sys.exit() else: # If a zone shapefile is not used, PPT must be set manually ppt_obs_list = inputs_cfg.get('INPUTS', 'ppt_obs_list') # Check that values are floats try: ppt_obs_list = map(float, ppt_obs_list.split(',')) except ValueError: logging.error('\nERROR: ppt_obs_list (mean monthly precipitation) ' 'values could not be parsed as floats\n') sys.exit() # Check that there are 12 values if len(ppt_obs_list) != 12: logging.error( '\nERROR: There must be exactly 12 mean monthly ' 'observed precipitation values based to ppt_obs_list\n') sys.exit() logging.info((' Observed Mean Monthly PPT ({}):\n {}\n (Script ' 'will assume these are listed in month order, i.e. Jan, ' 'Feb, ...)').format(ppt_obs_units, ppt_obs_list)) # Check if all the values are 0 if ppt_obs_list == ([0.0] * 12): logging.error( '\nERROR: The observed precipitation values are all 0.\n' ' To compute PPT ratios, please set the ppt_obs_list ' 'parameter in the INI with\n observed mean monthly PPT ' 'values (i.e. from a weather station)') sys.exit() # Adjust units (DEADBEEF - this might be better later on) if units_factor != 1: ppt_obs_list = [p * units_factor for p in ppt_obs_list] logging.info('\n Converted Mean Monthly PPT ({}):\n {}'.format( ppt_obs_units, ppt_obs_list)) # Get the PPT HRU ID try: ppt_hru_id = inputs_cfg.getint('INPUTS', 'ppt_hru_id') except: ppt_hru_id = 0 # Check that the ppt_hru_id is a valid cell hru_id # If ppt_hru_id is 0, PPT ratios will not be adjusted if ppt_hru_id > 0: # Check that HRU_ID is valid logging.info(' PPT HRU_ID: {}'.format(ppt_hru_id)) arcpy.MakeTableView_management( hru.polygon_path, "layer", "{} = {}".format(hru.id_field, ppt_hru_id)) if (ppt_hru_id != 0 and int( arcpy.GetCount_management("layer").getOutput(0)) == 0): logging.error( ('\nERROR: ppt_hru_id {} is not a valid cell hru_id' '\nERROR: ppt_ratios will NOT be forced to 1' ' at cell {}\n').format(ppt_hru_id)) ppt_hru_id = 0 arcpy.Delete_management("layer") else: logging.info( ' PPT ratios will not be adjusted to match station values\n' ' (ppt_hru_id = 0)') # Could add a second check that HRU_PSTA has values >0 # Build output folders if necesssary ppt_ratio_temp_ws = os.path.join(hru.param_ws, 'ppt_ratio_temp') if not os.path.isdir(ppt_ratio_temp_ws): os.mkdir(ppt_ratio_temp_ws) ppt_zone_path = os.path.join(ppt_ratio_temp_ws, 'ppt_zone.shp') # ppt_zone_clip_path = os.path.join(ppt_ratio_temp_ws, 'ppt_zone_clip.shp') # Set ArcGIS environment variables arcpy.CheckOutExtension('Spatial') env.overwriteOutput = True # env.pyramid = 'PYRAMIDS -1' env.pyramid = 'PYRAMIDS 0' env.workspace = hru.param_ws env.scratchWorkspace = hru.scratch_ws # Set month list based on flags month_list = range(1, 13) ppt_field_list = [ppt_field_format.format(m) for m in month_list] ratio_field_list = [ratio_field_format.format(m) for m in month_list] # Check fields logging.info('\nAdding PPT fields if necessary') # PPT zone fields support.add_field_func(hru.polygon_path, hru.ppt_zone_id_field, 'LONG') # PPT ratio fields for ppt_field in ppt_field_list: support.add_field_func(hru.polygon_path, ppt_field, 'DOUBLE') # Calculate PPT zone ID if set_ppt_zones_flag: logging.info('\nCalculating cell HRU PPT zone ID') ppt_zone_desc = arcpy.Describe(ppt_zone_orig_path) ppt_zone_sr = ppt_zone_desc.spatialReference logging.debug(' PPT zones: {}'.format(ppt_zone_orig_path)) logging.debug(' PPT zones spat. ref.: {}'.format(ppt_zone_sr.name)) logging.debug(' PPT zones GCS: {}'.format( ppt_zone_sr.GCS.name)) # Reset PPT_ZONE_ID if set_ppt_zones_flag: logging.info(' Resetting {} to 0'.format(hru.ppt_zone_id_field)) arcpy.CalculateField_management(hru.polygon_path, hru.ppt_zone_id_field, 0, 'PYTHON') # If ppt_zone spat_ref doesn't match hru_param spat_ref # Project ppt_zone to hru_param spat ref # Otherwise, read ppt_zone directly if hru.sr.name != ppt_zone_sr.name: logging.info(' Projecting PPT zones...') # Set preferred transforms transform_str = support.transform_func(hru.sr, ppt_zone_sr) logging.debug(' Transform: {}'.format(transform_str)) # Project ppt_zone shapefile arcpy.Project_management(ppt_zone_orig_path, ppt_zone_path, hru.sr, transform_str, ppt_zone_sr) del transform_str else: arcpy.Copy_management(ppt_zone_orig_path, ppt_zone_path) # # Remove all unnecesary fields # for field in arcpy.ListFields(ppt_zone_path): # skip_field_list = ppt_obs_field_list + [ppt_zone_field, 'Shape'] # if field.name not in skip_field_list: # try: # arcpy.DeleteField_management(ppt_zone_path, field.name) # except: # pass # Set ppt zone ID logging.info(' Setting {}'.format(hru.ppt_zone_id_field)) support.zone_by_centroid_func(ppt_zone_path, hru.ppt_zone_id_field, ppt_zone_field, hru.polygon_path, hru.point_path, hru) # support.zone_by_area_func( # ppt_zone_layer, hru.ppt_zone_id_field, ppt_zone_field, # hru.polygon_path, hru, hru_area_field, None, 50) # Set HRU_PSTA logging.info(' Setting {}'.format(hru.hru_psta_field)) support.zone_by_centroid_func(ppt_zone_path, hru.hru_psta_field, hru_psta_field, hru.polygon_path, hru.point_path, hru) # Cleanup del ppt_zone_desc, ppt_zone_sr else: # Set all cells to PPT zone 1 arcpy.CalculateField_management(hru.polygon_path, hru.ppt_zone_id_field, 1, 'PYTHON') # Calculate PPT ratios logging.info('\nCalculating mean monthly PPT ratios') if set_ppt_zones_flag: # Read mean monthly PPT values for each zone ppt_obs_dict = dict() ppt_obs_field_list = [ ppt_obs_field_format.format(m) for m in month_list ] fields = [ppt_zone_field] + ppt_obs_field_list logging.debug(' Obs. Fields: {}'.format(', '.join(fields))) with arcpy.da.SearchCursor(ppt_zone_path, fields) as s_cursor: for row in s_cursor: # Convert units while reading obs values ppt_obs_dict[int(row[0])] = map( lambda x: float(x) * units_factor, row[1:13]) # ppt_obs_dict[row[0]] = map(float, row[1:13]) ppt_zone_list = sorted(ppt_obs_dict.keys()) logging.debug(' PPT Zones: {}'.format(ppt_zone_list)) # Print the observed PPT values logging.debug(' Observed PPT') for zone, ppt_obs in ppt_obs_dict.items(): logging.debug(' {}: {}'.format( zone, ', '.join(['{:.2f}'.format(x) for x in ppt_obs]))) # Default all zones to a PPT ratio of 1 ppt_ratio_dict = {z: [1] * 12 for z in ppt_zone_list} # ppt_ratio_dict[0] = [1] * 12 # ppt_ratio_dict[0] = 1 # Get list of HRU_IDs for each PPT Zone fields = [hru.ppt_zone_id_field, hru.id_field] zone_hru_id_dict = defaultdict(list) with arcpy.da.SearchCursor(hru.polygon_path, fields) as s_cursor: for row in s_cursor: zone_hru_id_dict[int(row[0])].append(int(row[1])) # Check that PPT_HRU_IDs are in the correct zone # Default all PPT Zone HRU IDs to 0 ppt_hru_id_dict = {z: 0 for z in ppt_zone_list} if ppt_hru_id_field is not None: fields = [ppt_zone_field, ppt_hru_id_field] logging.debug(' PPT Zone ID field: {}'.format(ppt_zone_field)) logging.debug(' PPT HRU ID field: {}'.format(ppt_hru_id_field)) with arcpy.da.SearchCursor(ppt_zone_path, fields) as s_cursor: for row in s_cursor: ppt_zone = int(row[0]) hru_id = int(row[1]) if hru_id == 0 or hru_id in zone_hru_id_dict[ppt_zone]: ppt_hru_id_dict[ppt_zone] = hru_id logging.debug(' {}: {}'.format(ppt_zone, hru_id)) else: logging.error( '\nERROR: HRU_ID {} is not in PPT ZONE {}'.format( hru_id, ppt_hru_id_dict[ppt_zone])) sys.exit() # Get gridded PPT values for each PPT_HRU_ID fields = [hru.ppt_zone_id_field, hru.id_field] + ppt_field_list # ppt_ratio_dict = dict() with arcpy.da.SearchCursor(hru.polygon_path, fields) as s_cursor: for row in s_cursor: ppt_zone = int(row[0]) hru_id = int(row[1]) if hru_id == 0: pass elif hru_id in ppt_hru_id_dict.values(): ppt_gridded_list = map(float, row[2:14]) ppt_obs_list = ppt_obs_dict[ppt_zone] ppt_ratio_list = [ float(o) / p if p > 0 else 0 for o, p in zip(ppt_obs_list, ppt_gridded_list) ] ppt_ratio_dict[ppt_zone] = ppt_ratio_list del ppt_hru_id_dict, zone_hru_id_dict, fields logging.debug(' PPT Ratio Adjustment Factors:') for k, v in ppt_ratio_dict.items(): logging.debug(' {}: {}'.format( k, ', '.join(['{:.3f}'.format(x) for x in v]))) # DEADBEEF - ZONE_VALUE is calculated in zone_by_centroid_func # There is probably a cleaner way of linking these two fields = [hru.ppt_zone_id_field] + ppt_field_list + ratio_field_list with arcpy.da.UpdateCursor(hru.polygon_path, fields) as u_cursor: for row in u_cursor: ppt_zone = int(row[0]) for i, month in enumerate(month_list): ppt_i = fields.index(ppt_field_format.format(month)) ratio_i = fields.index(ratio_field_format.format(month)) if ppt_zone in ppt_zone_list: ppt_obs = ppt_obs_dict[ppt_zone][i] else: ppt_obs = 0 if ppt_obs > 0: row[ratio_i] = (ppt_ratio_dict[ppt_zone][i] * row[ppt_i] / ppt_obs) else: row[ratio_i] = 0 u_cursor.updateRow(row) del row else: # Get gridded precip at PPT_HRU_ID fields = [hru.id_field] + ppt_field_list logging.debug(' Fields: {}'.format(', '.join(fields))) # Scale all ratios so gridded PPT will match observed PPT at target cell if ppt_hru_id != 0: ppt_gridded_list = map( float, arcpy.da.SearchCursor( hru.polygon_path, fields, '"{}" = {}'.format(hru.id_field, ppt_hru_id)).next()[1:]) logging.info(' Gridded PPT: {}'.format(', '.join( ['{:.2f}'.format(p) for p in ppt_gridded_list]))) # Ratio of MEASURED or OBSERVED PPT to GRIDDED PPT # This will be multiplied by GRIDDED/OBSERVED below ppt_ratio_list = [ float(o) / p if p > 0 else 0 for o, p in zip(ppt_obs_list, ppt_gridded_list) ] logging.info(' Obs./Gridded: {}'.format(', '.join( ['{:.3f}'.format(p) for p in ppt_ratio_list]))) else: ppt_ratio_list = [1 for p in ppt_obs_list] # Use single mean monthly PPT for all cells # Assume ppt_obs_list is in month order fields = ppt_field_list + ratio_field_list with arcpy.da.UpdateCursor(hru.polygon_path, fields) as u_cursor: for row in u_cursor: for i, month in enumerate(month_list): ppt_i = fields.index(ppt_field_format.format(month)) ratio_i = fields.index(ratio_field_format.format(month)) if ppt_obs_list[i] > 0: row[ratio_i] = (ppt_ratio_list[i] * row[ppt_i] / ppt_obs_list[i]) else: row[ratio_i] = 0 u_cursor.updateRow(row) del row
def soil_raster_prep(config_path): """Prepare GSFLOW soil rasters Parameters ---------- config_path : str Project configuration file (.ini) path. Returns ------- None """ # Initialize hru_parameters class hru = support.HRUParameters(config_path) # Open input parameter config file inputs_cfg = ConfigParser.ConfigParser() try: inputs_cfg.readfp(open(config_path)) except Exception as e: logging.error('\nERROR: Config file could not be read, ' 'is not an input file, or does not exist\n' ' config_file = {}\n' ' Exception: {}\n'.format(config_path, e)) sys.exit() # Log DEBUG to file log_file_name = 'soil_prep_log.txt' log_console = logging.FileHandler(filename=os.path.join( hru.log_ws, log_file_name), mode='w') log_console.setLevel(logging.DEBUG) log_console.setFormatter(logging.Formatter('%(message)s')) logging.getLogger('').addHandler(log_console) logging.info('\nPrepare GSFLOW Soil Rasters') soil_orig_ws = inputs_cfg.get('INPUTS', 'soil_orig_folder') awc_name = inputs_cfg.get('INPUTS', 'awc_name') clay_pct_name = inputs_cfg.get('INPUTS', 'clay_pct_name') sand_pct_name = inputs_cfg.get('INPUTS', 'sand_pct_name') soil_proj_method = 'NEAREST' soil_cs = inputs_cfg.getint('INPUTS', 'soil_cellsize') fill_soil_nodata_flag = inputs_cfg.getboolean('INPUTS', 'fill_soil_nodata_flag') # Use Ksat to calculate ssr2gw_rate and slowcoef_lin ksat_name = inputs_cfg.get('INPUTS', 'ksat_name') # Read and apply soil depth raster # Otherwise soil depth will only be derived from rooting depth try: soil_depth_flag = inputs_cfg.getboolean('INPUTS', 'soil_depth_flag') except ConfigParser.NoOptionError: soil_depth_flag = False logging.info(' Missing INI parameter, setting {} = {}'.format( 'soil_depth_flag', soil_depth_flag)) if soil_depth_flag: soil_depth_name = inputs_cfg.get('INPUTS', 'soil_depth_name') # Use geology based multipliers to adjust ssr2gw_rate # Otherwise default value set in config file will be used try: ssr2gw_mult_flag = inputs_cfg.getboolean('INPUTS', 'ssr2gw_mult_flag') except ConfigParser.NoOptionError: ssr2gw_mult_flag = False logging.info(' Missing INI parameter, setting {} = {}'.format( 'ssr2gw_mult_flag', ssr2gw_mult_flag)) if ssr2gw_mult_flag: ssr2gw_mult_name = inputs_cfg.get('INPUTS', 'ssr2gw_mult_name') # Check input paths if not arcpy.Exists(hru.polygon_path): logging.error('\nERROR: Fishnet ({}) does not exist'.format( hru.polygon_path)) sys.exit() # All of the soil rasters must exist awc_orig_path = os.path.join(soil_orig_ws, awc_name) clay_pct_orig_path = os.path.join(soil_orig_ws, clay_pct_name) sand_pct_orig_path = os.path.join(soil_orig_ws, sand_pct_name) ksat_orig_path = os.path.join(soil_orig_ws, ksat_name) if soil_depth_flag: soil_depth_orig_path = os.path.join(soil_orig_ws, soil_depth_name) if ssr2gw_mult_flag: ssr2gw_mult_orig_path = os.path.join(soil_orig_ws, ssr2gw_mult_name) # Check that either the original or projected/clipped raster exists if not arcpy.Exists(awc_orig_path): logging.error('\nERROR: AWC raster does not exist') sys.exit() if not arcpy.Exists(clay_pct_orig_path): logging.error('\nERROR: Clay raster does not exist') sys.exit() if not arcpy.Exists(sand_pct_orig_path): logging.error('\nERROR: Sand raster does not exist') sys.exit() if not arcpy.Exists(ksat_orig_path): logging.error('\nERROR: Ksat raster does not exist') sys.exit() if soil_depth_flag and not arcpy.Exists(soil_depth_orig_path): logging.error('\nERROR: Soil depth raster does not exist') sys.exit() if ssr2gw_mult_flag and not arcpy.Exists(ssr2gw_mult_orig_path): logging.error('\nERROR: Geology based raster for ssr2gw multiplier ' 'does not exist') sys.exit() # Check other inputs if soil_cs <= 0: logging.error('\nERROR: soil cellsize must be greater than 0') sys.exit() soil_proj_method_list = ['BILINEAR', 'CUBIC', 'NEAREST'] if soil_proj_method.upper() not in soil_proj_method_list: logging.error('\nERROR: Soil projection method must be: {}'.format( ', '.join(soil_proj_method_list))) sys.exit() # Build output folder if necessary soil_temp_ws = os.path.join(hru.param_ws, 'soil_rasters') if not os.path.isdir(soil_temp_ws): os.mkdir(soil_temp_ws) # Output paths awc_path = os.path.join(soil_temp_ws, 'awc.img') clay_pct_path = os.path.join(soil_temp_ws, 'clay_pct.img') sand_pct_path = os.path.join(soil_temp_ws, 'sand_pct.img') ksat_path = os.path.join(soil_temp_ws, 'ksat.img') soil_depth_path = os.path.join(soil_temp_ws, 'soil_depth.img') ssr2gw_mult_path = os.path.join(soil_temp_ws, 'ssr2gw_mult.img') # Set ArcGIS environment variables arcpy.CheckOutExtension('Spatial') env.overwriteOutput = True env.pyramid = 'PYRAMIDS -1' # env.pyramid = 'PYRAMIDS 0' env.workspace = soil_temp_ws env.scratchWorkspace = hru.scratch_ws # Available Water Capacity (AWC) logging.info('\nProjecting/clipping AWC raster') soil_orig_sr = arcpy.sa.Raster(awc_orig_path).spatialReference logging.debug(' AWC GCS: {}'.format(soil_orig_sr.GCS.name)) # Remove existing projected raster if arcpy.Exists(awc_path): arcpy.Delete_management(awc_path) # Set preferred transforms transform_str = support.transform_func(hru.sr, soil_orig_sr) logging.debug(' Transform: {}'.format(transform_str)) logging.debug(' Projection method: NEAREST') # Project soil raster # DEADBEEF - Arc10.2 ProjectRaster does not honor extent support.project_raster_func(awc_orig_path, awc_path, hru.sr, soil_proj_method, soil_cs, transform_str, '{} {}'.format(hru.ref_x, hru.ref_y), soil_orig_sr, hru) # env.extent = hru.extent # arcpy.ProjectRaster_management( # awc_orig_path, awc_path, hru.sr, # soil_proj_method, soil_cs, transform_str, # '{} {}'.format(hru.ref_x, hru.ref_y), # soil_orig_sr) # arcpy.ClearEnvironment('extent') # Percent clay logging.info('Projecting/clipping clay raster') soil_orig_sr = arcpy.sa.Raster(clay_pct_orig_path).spatialReference logging.debug(' Clay GCS: {}'.format(soil_orig_sr.GCS.name)) # Remove existing projected raster if arcpy.Exists(clay_pct_path): arcpy.Delete_management(clay_pct_path) # Set preferred transforms transform_str = support.transform_func(hru.sr, soil_orig_sr) logging.debug(' Transform: {}'.format(transform_str)) logging.debug(' Projection method: NEAREST') # Project soil raster # DEADBEEF - Arc10.2 ProjectRaster does not extent support.project_raster_func(clay_pct_orig_path, clay_pct_path, hru.sr, soil_proj_method, soil_cs, transform_str, '{} {}'.format(hru.ref_x, hru.ref_y), soil_orig_sr, hru) # env.extent = hru.extent # arcpy.ProjectRaster_management( # clay_pct_orig_path, clay_pct_path, hru.sr, # soil_proj_method, soil_cs, transform_str, # '{} {}'.format(hru.ref_x, hru.ref_y), # soil_orig_sr) # arcpy.ClearEnvironment('extent') # Percent sand logging.info('Projecting/clipping sand raster') soil_orig_sr = arcpy.sa.Raster(sand_pct_orig_path).spatialReference logging.debug(' Sand GCS: {}'.format(soil_orig_sr.GCS.name)) # Remove existing projected raster if arcpy.Exists(sand_pct_path): arcpy.Delete_management(sand_pct_path) # Set preferred transforms transform_str = support.transform_func(hru.sr, soil_orig_sr) logging.debug(' Transform: {}'.format(transform_str)) logging.debug(' Projection method: NEAREST') # Project soil raster # DEADBEEF - Arc10.2 ProjectRaster does not honor extent support.project_raster_func(sand_pct_orig_path, sand_pct_path, hru.sr, soil_proj_method, soil_cs, transform_str, '{} {}'.format(hru.ref_x, hru.ref_y), soil_orig_sr, hru) # env.extent = hru.extent # arcpy.ProjectRaster_management( # sand_pct_orig_path, sand_pct_path, hru.sr, # soil_proj_method, soil_cs, transform_str, # '{} {}'.format(hru.ref_x, hru.ref_y), # soil_orig_sr) # arcpy.ClearEnvironment('extent') # Hydraulic conductivity logging.info('Projecting/clipping ksat raster') ksat_orig_sr = arcpy.sa.Raster(ksat_orig_path).spatialReference logging.debug(' Ksat GCS: {}'.format(soil_orig_sr.GCS.name)) # Remove existing projected raster if arcpy.Exists(ksat_path): arcpy.Delete_management(ksat_path) # Set preferred transforms transform_str = support.transform_func(hru.sr, ksat_orig_sr) logging.debug(' Transform: {}'.format(transform_str)) logging.debug(' Projection method: NEAREST') # Project ksat raster # DEADBEEF - Arc10.2 ProjectRaster does not honor extent support.project_raster_func(ksat_orig_path, ksat_path, hru.sr, soil_proj_method, soil_cs, transform_str, '{} {}'.format(hru.ref_x, hru.ref_y), soil_orig_sr, hru) # env.extent = hru.extent # arcpy.ProjectRaster_management( # ksat_orig_path, ksat_path, hru.sr, # soil_proj_method, soil_cs, transform_str, # '{} {}'.format(hru.ref_x, hru.ref_y), # soil_orig_sr) # arcpy.ClearEnvironment('extent') # Soil depth is only needed if clipping root depth if soil_depth_flag: logging.info('\nProjecting/clipping depth raster') soil_orig_sr = arcpy.sa.Raster(soil_depth_orig_path).spatialReference logging.debug(' Depth GCS: {}'.format(soil_orig_sr.GCS.name)) # Remove existing projected raster if arcpy.Exists(soil_depth_path): arcpy.Delete_management(soil_depth_path) # Set preferred transforms transform_str = support.transform_func(hru.sr, soil_orig_sr) logging.debug(' Transform: {}'.format(transform_str)) logging.debug(' Projection method: NEAREST') # Project soil raster # DEADBEEF - Arc10.2 ProjectRaster does not honor extent support.project_raster_func(soil_depth_orig_path, soil_depth_path, hru.sr, soil_proj_method, soil_cs, transform_str, '{} {}'.format(hru.ref_x, hru.ref_y), soil_orig_sr, hru) # env.extent = hru.extent # arcpy.ProjectRaster_management( # soil_depth_orig_path, soil_depth_path, hru.sr, # soil_proj_method, soil_cs, transform_str, # '{} {}'.format(hru.ref_x, hru.ref_y), # soil_orig_sr) # arcpy.ClearEnvironment('extent') # Geology based multiplier for gravity drainage (ssr2gw multiplier) if ssr2gw_mult_flag: logging.info('\nProjecting/clipping ssr2gw multiplier raster') soil_orig_sr = arcpy.sa.Raster(ssr2gw_mult_orig_path).spatialReference logging.debug(' Depth GCS: {}'.format(soil_orig_sr.GCS.name)) # Remove existing projected raster if arcpy.Exists(ssr2gw_mult_path): arcpy.Delete_management(ssr2gw_mult_path) # Set preferred transforms transform_str = support.transform_func(hru.sr, soil_orig_sr) logging.debug(' Transform: {}'.format(transform_str)) logging.debug(' Projection method: NEAREST') # Project soil raster # DEADBEEF - Arc10.2 ProjectRaster does not honor extent support.project_raster_func(ssr2gw_mult_orig_path, ssr2gw_mult_path, hru.sr, soil_proj_method, soil_cs, transform_str, '{} {}'.format(hru.ref_x, hru.ref_y), soil_orig_sr, hru) # Fill soil nodata values using nibble if fill_soil_nodata_flag: logging.info('\nFilling soil nodata values using Nibble') soil_raster_list = [awc_path, clay_pct_path, sand_pct_path, ksat_path] if soil_depth_flag: soil_raster_list.append(soil_depth_path) for soil_raster_path in soil_raster_list: logging.info(' {}'.format(soil_raster_path)) # DEADBEEF - Check if there is any nodata to be filled first? mask_obj = arcpy.sa.Int(1000 * arcpy.sa.SetNull( arcpy.sa.Raster(soil_raster_path) < 0, arcpy.sa.Raster(soil_raster_path))) input_obj = arcpy.sa.Con(arcpy.sa.IsNull(mask_obj), 0, mask_obj) nibble_obj = 0.001 * arcpy.sa.Nibble(input_obj, mask_obj, 'ALL_VALUES') nibble_obj.save(soil_raster_path) arcpy.BuildPyramids_management(soil_raster_path)