예제 #1
0
def impervious_parameters(config_path, overwrite_flag=False, debug_flag=False):
    """Calculate GSFLOW Impervious Parameters

    Args:
        config_file (str): Project config file path
        ovewrite_flag (bool): if True, overwrite existing files
        debug_flag (bool): if True, enable debug level logging

    Returns:
        None
    """

    # Initialize hru_parameters class
    hru = support.HRUParameters(config_path)

    # Open input parameter config file
    inputs_cfg = ConfigParser.ConfigParser()
    try:
        inputs_cfg.readfp(open(config_path))
    except Exception as e:
        logging.error('\nERROR: Config file could not be read, '
                      'is not an input file, or does not exist\n'
                      '  config_file = {}\n'
                      '  Exception: {}\n'.format(config_path, e))
        sys.exit()

    # Log DEBUG to file
    log_file_name = 'impervious_parameters_log.txt'
    log_console = logging.FileHandler(filename=os.path.join(
        hru.log_ws, log_file_name),
                                      mode='w')
    log_console.setLevel(logging.DEBUG)
    log_console.setFormatter(logging.Formatter('%(message)s'))
    logging.getLogger('').addHandler(log_console)
    logging.info('\nGSFLOW Impervious Parameters')

    #
    imperv_orig_path = inputs_cfg.get('INPUTS', 'impervious_orig_path')
    # imperv_proj_method = inputs_cfg.get('INPUTS', 'impervious_projection_method')
    imperv_proj_method = 'NEAREST'
    imperv_cs = inputs_cfg.getint('INPUTS', 'impervious_cellsize')
    imperv_pct_flag = inputs_cfg.getboolean('INPUTS', 'impervious_pct_flag')

    # Check input paths
    if not arcpy.Exists(hru.polygon_path):
        logging.error('\nERROR: Fishnet ({}) does not exist'.format(
            hru.polygon_path))
        sys.exit()
    # Impervious raster must exist
    if not arcpy.Exists(imperv_orig_path):
        logging.error('\nERROR: Impervious raster does not exist')
        sys.exit()

    # Check other inputs
    if imperv_cs <= 0:
        logging.error('\nERROR: soil cellsize must be greater than 0')
        sys.exit()
    imperv_proj_method_list = ['BILINEAR', 'CUBIC', 'NEAREST']
    if imperv_proj_method.upper() not in imperv_proj_method_list:
        logging.error(
            '\nERROR: Impervious projection method must be: {}'.format(
                ', '.join(imperv_proj_method_list)))
        sys.exit()

    # Build output folder if necessary
    imperv_temp_ws = os.path.join(hru.param_ws, 'impervious_rasters')
    if not os.path.isdir(imperv_temp_ws):
        os.mkdir(imperv_temp_ws)
    # Output paths
    imperv_path = os.path.join(imperv_temp_ws, 'impervious_cover.img')

    # Set ArcGIS environment variables
    arcpy.CheckOutExtension('Spatial')
    env.overwriteOutput = True
    env.pyramid = 'PYRAMIDS -1'
    # env.pyramid = 'PYRAMIDS 0'
    env.workspace = imperv_temp_ws
    env.scratchWorkspace = hru.scratch_ws

    # Check field
    logging.info('\nAdding impervious fields if necessary')
    support.add_field_func(hru.polygon_path, hru.imperv_pct_field, 'DOUBLE')
    # add_field_func(hru.polygon_path, hru.carea_min_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.carea_max_field, 'DOUBLE')

    # Available Water Capacity (AWC)
    logging.info('\nProjecting/clipping impervious cover raster')
    imperv_orig_sr = arcpy.sa.Raster(imperv_orig_path).spatialReference
    logging.debug('  Impervious GCS:  {}'.format(imperv_orig_sr.GCS.name))
    # Remove existing projected raster
    if arcpy.Exists(imperv_path):
        arcpy.Delete_management(imperv_path)
    # Set preferred transforms
    transform_str = support.transform_func(hru.sr, imperv_orig_sr)
    logging.debug('  Transform: {}'.format(transform_str))
    logging.debug('  Projection method: NEAREST')
    # Project impervious raster
    # DEADBEEF - Arc10.2 ProjectRaster does not extent
    # env.extent = hru.extent
    support.project_raster_func(imperv_orig_path, imperv_path, hru.sr,
                                imperv_proj_method, imperv_cs, transform_str,
                                '{} {}'.format(hru.ref_x,
                                               hru.ref_y), imperv_orig_sr, hru)
    # arcpy.ProjectRaster_management(
    #    imperv_orig_path, imperv_path, hru.sr,
    #    imperv_proj_method, imperv_cs, transform_str,
    #    '{} {}'.format(hru.ref_x, hru.ref_y),
    #    imperv_orig_sr)
    # arcpy.ClearEnvironment('extent')

    # List of rasters, fields, and stats for zonal statistics
    zs_imperv_dict = dict()
    zs_imperv_dict[hru.imperv_pct_field] = [imperv_path, 'MEAN']
    # zs_imperv_dict[hru.carea_min_field] = [imperv_path, 'MEAN']
    # zs_imperv_dict[hru.carea_max_field] = [imperv_path, 'MEAN']

    # Calculate zonal statistics
    logging.info('\nCalculating zonal statistics')
    support.zonal_stats_func(zs_imperv_dict, hru.polygon_path, hru.point_path,
                             hru)

    # Calculate CAREA_MIN / CAREA_MAX
    logging.info('\nCalculating CAREA_MIN / CAREA_MAX')
    if imperv_pct_flag:
        arcpy.CalculateField_management(
            hru.polygon_path, hru.imperv_pct_field,
            '0.01 * !{}!'.format(hru.imperv_pct_field), 'PYTHON')
        # arcpy.CalculateField_management(
        #    hru.polygon_path, hru.carea_min_field,
        #    '0.01 * !{}!'.format(hru.imperv_pct_field), 'PYTHON')
        arcpy.CalculateField_management(
            hru.polygon_path, hru.carea_max_field,
            '0.01 * !{}!'.format(hru.imperv_pct_field), 'PYTHON')
    else:
        # arcpy.CalculateField_management(
        #    hru.polygon_path, hru.carea_min_field,
        #    '!{}!'.format(hru.imperv_pct_field), 'PYTHON')
        arcpy.CalculateField_management(hru.polygon_path, hru.carea_max_field,
                                        '!{}!'.format(hru.imperv_pct_field),
                                        'PYTHON')
예제 #2
0
def daymet_parameters(
    config_path,
    data_name='PPT',
    overwrite_flag=False,
    debug_flag=False,
):
    """Calculate GSFLOW DAYMET Parameters

    Args:
        config_file: Project config file path
        data_name (str): DAYMET data type (ALL, PPT, TMAX, TMIN, etc.)
        ovewrite_flag (bool): if True, overwrite existing files
        debug_flag (bool): if True, enable debug level logging

    Returns:
        None
    """

    # Initialize hru_parameters class
    hru = support.HRUParameters(config_path)

    # Open input parameter config file
    inputs_cfg = ConfigParser.ConfigParser()
    try:
        inputs_cfg.readfp(open(config_path))
    except Exception as e:
        logging.error('\nERROR: Config file could not be read, '
                      'is not an input file, or does not exist\n'
                      '  config_file = {}\n'
                      '  Exception: {}\n'.format(config_path, e))
        sys.exit()

    # Log DEBUG to file
    log_file_name = 'daymet_normals_log.txt'
    log_console = logging.FileHandler(filename=os.path.join(
        hru.log_ws, log_file_name),
                                      mode='w')
    log_console.setLevel(logging.DEBUG)
    log_console.setFormatter(logging.Formatter('%(message)s'))
    logging.getLogger('').addHandler(log_console)
    logging.info('\nGSFLOW DAYMET Parameters')

    # DAYMET
    daymet_ws = inputs_cfg.get('INPUTS', 'daymet_folder')
    daymet_proj_method = inputs_cfg.get('INPUTS', 'prism_projection_method')
    daymet_cs = inputs_cfg.getint('INPUTS', 'prism_cellsize')
    calc_jh_coef_flag = inputs_cfg.getboolean('INPUTS',
                                              'calc_prism_jh_coef_flag')

    # Check input paths
    if not arcpy.Exists(hru.polygon_path):
        logging.error('\nERROR: Fishnet ({}) does not exist'.format(
            hru.polygon_path))
        sys.exit()
    # Check that DAYMET folder is valid
    if not os.path.isdir(daymet_ws):
        logging.error(
            '\nERROR: DAYMET folder ({}) does not exist'.format(daymet_ws))
        sys.exit()
    proj_method_list = ['BILINEAR', 'CUBIC', 'NEAREST']
    if daymet_proj_method.upper() not in proj_method_list:
        logging.error('\nERROR: DAYMET projection method must be: {}'.format(
            ', '.join(proj_method_list)))
        sys.exit()
    logging.debug('  Projection method:    {}'.format(
        daymet_proj_method.upper()))

    # Check other inputs
    if daymet_cs <= 0:
        logging.error('\nERROR: DAYMET cellsize must be greater than 0\n')
        sys.exit()

    # Set ArcGIS environment variables
    arcpy.CheckOutExtension('Spatial')
    env.overwriteOutput = True
    env.pyramid = 'PYRAMIDS 0'
    env.workspace = hru.param_ws
    env.scratchWorkspace = hru.scratch_ws

    # DAYMET data names
    if data_name == 'ALL':
        data_name_list = ['PPT', 'TMAX', 'TMIN']
    else:
        data_name_list = [data_name]

    # Set month list
    month_list = ['{:02d}'.format(m) for m in range(1, 13)]
    # month_list.extend(['annual'])

    # Check fields
    logging.info('\nAdding DAYMET fields if necessary')
    for data_name in data_name_list:
        for month in month_list:
            support.add_field_func(hru.polygon_path,
                                   '{}_{}'.format(data_name, month), 'DOUBLE')

    # Process each DAYMET data type
    logging.info('\nProjecting/clipping DAYMET mean monthly rasters')
    for data_name in data_name_list:
        logging.info('\n{}'.format(data_name))
        daymet_normal_re = re.compile(
            'daymet_(?P<type>%s)_30yr_normal_(?P<month>\d{2}).img$' %
            data_name, re.IGNORECASE)

        # Search all files & subfolders in DAYMET folder
        #   for images that match data type
        input_raster_dict = dict()
        for root, dirs, files in os.walk(daymet_ws):
            for file_name in files:
                daymet_normal_match = daymet_normal_re.match(file_name)
                if daymet_normal_match:
                    month_str = daymet_normal_match.group('month')
                    input_raster_dict[month_str] = os.path.join(
                        daymet_ws, root, file_name)
        if not input_raster_dict:
            logging.error(
                ('\nERROR: No DAYMET rasters were found matching the ' +
                 'following pattern:\n  {}\n\n').format(
                     daymet_normal_re.pattern))
            logging.error()
            sys.exit()

        # DAYMET input data workspace
        # input_ws = os.path.join(daymet_ws, data_name.lower())
        # if not os.path.isdir(input_ws):
        #    logging.error('\nERROR: The DAYMET {} folder does not exist'.format(
        #        data_name.lower()))
        #    sys.exit()

        # DAYMET output data workspace
        output_ws = os.path.join(hru.param_ws, data_name.lower() + '_rasters')
        if not os.path.isdir(output_ws):
            os.mkdir(output_ws)

        # Remove all non year/month rasters in DAYMET temp folder
        logging.info('  Removing existing DAYMET files')
        for item in os.listdir(output_ws):
            if daymet_normal_re.match(item):
                os.remove(os.path.join(output_ws, item))

        # Extract, project/resample, clip
        # Process images by month
        zs_daymet_dict = dict()
        # env.extent = hru.extent
        for month in month_list:
            logging.info('  Month: {}'.format(month))

            # Projected/clipped DAYMET raster
            input_raster = input_raster_dict[month]
            # input_name = 'daymet_{}_30yr_normal_800mM2_{}_bil.bil'.format(
            #    data_name.lower(), input_month)
            # input_raster = os.path.join(input_ws, input_name)
            output_name = 'daymet_{}_normal_{}.img'.format(
                data_name.lower(), month)
            output_raster = os.path.join(output_ws, output_name)

            # Set preferred transforms
            input_sr = arcpy.sa.Raster(input_raster).spatialReference
            transform_str = support.transform_func(hru.sr, input_sr)
            if transform_str:
                logging.debug('  Transform: {}'.format(transform_str))

            # Project DAYMET rasters to HRU coordinate system
            # DEADBEEF - Arc10.2 ProjectRaster does not extent
            support.project_raster_func(input_raster, output_raster, hru.sr,
                                        daymet_proj_method.upper(), daymet_cs,
                                        transform_str,
                                        '{} {}'.format(hru.ref_x, hru.ref_y),
                                        input_sr, hru)
            # arcpy.ProjectRaster_management(
            #    input_raster, output_raster, hru.sr,
            #    daymet_proj_method.upper(), daymet_cs, transform_str,
            #    '{} {}'.format(hru.ref_x, hru.ref_y),
            #    input_sr)

            # Save parameters for calculating zonal stats
            zs_field = '{}_{}'.format(data_name, month)
            zs_daymet_dict[zs_field] = [output_raster, 'MEAN']

            # Cleanup
            del input_raster, output_raster, output_name
            del input_sr, transform_str, zs_field

        # Cleanup
        # arcpy.ClearEnvironment('extent')

        # Calculate zonal statistics
        logging.info('\nCalculating DAYMET zonal statistics')
        support.zonal_stats_func(zs_daymet_dict, hru.polygon_path,
                                 hru.point_path, hru)
        del zs_daymet_dict
def fishnet_func(config_path, overwrite_flag=False):
    """GSFLOW Fishnet Generator

    Args:
        config_file (str): Project config file path
        ovewrite_flag (bool): if True, overwrite existing files
        debug_flag (bool): if True, enable debug level logging

    Parameters
    ----------
    config_path : str
        Project configuration file (.ini) path.
    ovewrite_flag : bool
        If True, overwrite existing files (the default is False).

    Returns
    -------
    None

    """
    # Initialize hru parameters class
    hru = support.HRUParameters(config_path)

    # Open input parameter config file
    inputs_cfg = ConfigParser.ConfigParser()
    try:
        inputs_cfg.readfp(open(config_path))
    except Exception as e:
        logging.error(
            '\nERROR: Config file could not be read, '
            'is not an input file, or does not exist\n'
            '  config_file = {}\n'
            '  Exception: {}\n'.format(config_path, e))
        sys.exit()

    # Log DEBUG to file
    log_file_name = 'fishnet_generator_log.txt'
    log_console = logging.FileHandler(
        filename=os.path.join(hru.log_ws, log_file_name), mode='w')
    log_console.setLevel(logging.DEBUG)
    log_console.setFormatter(logging.Formatter('%(message)s'))
    logging.getLogger('').addHandler(log_console)
    logging.info('\nGSFLOW Fishnet Generator')

    # Warn the user if the fishnet already exists
    # It might be better to not allow the user to do this at all and force them
    # to manually remove the file.
    if arcpy.Exists(hru.polygon_path) and not overwrite_flag:
        logging.warning('\nWARNING: The existing fishnet/grid will be '
                        'over written\n  {}'.format(hru.polygon_path))
        #raw_input('Press ENTER to continue')

    # Check input paths
    study_area_path = inputs_cfg.get('INPUTS', 'study_area_path')
    if not arcpy.Exists(study_area_path):
        logging.error(
            '\nERROR: Study area ({}) does not exist'.format(
                study_area_path))
        sys.exit()

    # For now, study area has to be a polygon
    if arcpy.Describe(study_area_path).datasetType != 'FeatureClass':
        logging.error(
            '\nERROR: For now, study area must be a polygon shapefile')
        sys.exit()

    # Read Fishnet specific parameters from INI
    # If ref_x and ref_y are not specified, get from the study area extent
    try:
        hru.ref_x = inputs_cfg.getfloat('INPUTS', 'hru_ref_x')
    except:
        hru.ref_x = arcpy.Describe(study_area_path).extent.XMin
        logging.info(
        '  {0} parameter not set in INI, setting {0} = {1}'.format(
            'ref_x', hru.ref_x))
    try:
        hru.ref_y = inputs_cfg.getfloat('INPUTS', 'hru_ref_y')
    except:
        hru.ref_y = arcpy.Describe(study_area_path).extent.YMin
        logging.info(
            '  {0} parameter not set in INI, setting {0} = {1}'.format(
                'ref_y', hru.ref_y))
    try:
        buffer_cells = inputs_cfg.getint('INPUTS', 'hru_buffer_cells')
    except:
        buffer_cells = 2
        logging.info(
            '  Missing INI parameter, setting {} = {}'.format(
                'buffer_cells', buffer_cells))
    try:
        snap_method = inputs_cfg.get('INPUTS', 'hru_param_snap_method')
    except:
        snap_method = 'EXPAND'
        logging.info(
            '  Missing INI parameter, setting {} = {}'.format(
                'snap_method', snap_method))
    snap_method_list = ['EXPAND', 'ROUND', 'SHRINK']
    if snap_method not in snap_method_list:
        logging.error('\nERROR: {} must be: {}'.format(
            'snap_method', ', '.join(snap_method_list)))
        sys.exit()

    # Log input hru parameters
    logging.info('\nFishnet Parameters')
    logging.info('  Cellsize:      {}'.format(hru.cs))
    logging.info('  Snap point:    {} {}'.format(hru.ref_x, hru.ref_y))
    logging.debug('  Buffer cells:  {}'.format(buffer_cells))

    # Read reference point as string for determining number of digits
    try:
        digits = abs(min(
            Decimal(inputs_cfg.get('INPUTS', 'hru_ref_x')).as_tuple().exponent,
            Decimal(inputs_cfg.get('INPUTS', 'hru_ref_y')).as_tuple().exponent))
    except ConfigParser.NoOptionError:
        digits = 10
    logging.debug('  Extent digits: {}'.format(digits))

    # Check inputs
    if buffer_cells < 0:
        logging.error('\nERROR: Buffer cells must be greater than 0')
        sys.exit()

    # Build output folder if necessary
    fishnet_temp_ws = os.path.join(hru.param_ws, 'fishnet_temp')
    if not os.path.isdir(fishnet_temp_ws):
        os.mkdir(fishnet_temp_ws)
    # Output paths
    study_area_proj_path = os.path.join(
        fishnet_temp_ws, 'projected_study_area.shp')

    # Set ArcGIS environment variables
    arcpy.CheckOutExtension('Spatial')
    env.overwriteOutput = True
    env.pyramid = 'PYRAMIDS -1'
    # env.pyramid = 'PYRAMIDS 0'
    env.workspace = hru.param_ws
    env.scratchWorkspace = hru.scratch_ws

    # Get spatial reference of study_area
    hru.sr = arcpy.Describe(study_area_path).spatialReference

    # If study area spat_ref doesn't match hru_param spat_ref
    # Project study are to hru_param and get projected extent
    # Otherwise, read study_area extent directly
    study_area_extent = arcpy.Describe(study_area_path).extent
    logging.debug('\n  Study area: {}'.format(study_area_path))
    logging.debug('  Study area spat. ref.: {}'.format(hru.sr.name))
    logging.debug('  Study area GCS:        {}'.format(hru.sr.GCS.name))
    logging.info('  Study Area extent: {}'.format(
        support.extent_string(study_area_extent)))

    # Check if the study area shapefile is projeted
    if (hru.sr.name in ['GCS_North_American_1983', 'GCS_WGS_1984'] or
            hru.sr.GCS.name == hru.sr.name):
        logging.warning(
            '\nWARNING: The study area shapefile does not appear to be projected.'
            '\n  This will likely cause problems or not work at all.'
            '\n  Projection: {}'.format(hru.sr.name))
        raw_input('Press ENTER to continue\n')

    # Buffer extent
    buffer_extent = support.buffer_extent_func(
        study_area_extent, buffer_cells * hru.cs)
    logging.info('  Buffered Extent:   {}'.format(
        support.extent_string(buffer_extent)))

    # Adjust study area extent to reference points
    # Set the number of digits of rounding based on the number digits
    #   int the reference points
    hru.ref_pnt = arcpy.Point(hru.ref_x, hru.ref_y)
    hru.extent = support.adjust_extent_to_snap(
        buffer_extent, hru.ref_pnt, hru.cs,
        method=snap_method, digits=digits)
    logging.info('  Snapped Extent:    {}'.format(
        support.extent_string(hru.extent)))

    # Build hru_param
    logging.info('\nBuilding HRU parameter fishnet')
    build_fishnet_func(
        hru.polygon_path, hru.point_path, hru.extent, hru.cs, hru.sr)
예제 #4
0
def prism_4km_parameters(
    config_path,
    data_name='ALL',
    overwrite_flag=False,
    debug_flag=False,
):
    """Calculate GSFLOW PRISM Parameters

    Args:
        config_file (str): Project config file path
        data_name -- the prism data type (ALL, PPT, TMAX, TMIN, etc.)
        ovewrite_flag (bool): if True, overwrite existing files
        debug_flag (bool): if True, enable debug level logging

    Returns:
        None
    """

    # Initialize hru_parameters class
    hru = support.HRUParameters(config_path)

    # Open input parameter config file
    inputs_cfg = ConfigParser.ConfigParser()
    try:
        inputs_cfg.readfp(open(config_path))
    except Exception as e:
        logging.error('\nERROR: Config file could not be read, '
                      'is not an input file, or does not exist\n'
                      '  config_file = {}\n'
                      '  Exception: {}\n'.format(config_path, e))
        sys.exit()

    # Log DEBUG to file
    log_file_name = 'prism_4km_normals_log.txt'
    log_console = logging.FileHandler(filename=os.path.join(
        hru.log_ws, log_file_name),
                                      mode='w')
    log_console.setLevel(logging.DEBUG)
    log_console.setFormatter(logging.Formatter('%(message)s'))
    logging.getLogger('').addHandler(log_console)
    logging.info('\nGSFLOW PRISM Parameters')

    # PRISM
    prism_ws = inputs_cfg.get('INPUTS', 'prism_folder')
    prism_proj_method = inputs_cfg.get('INPUTS', 'prism_projection_method')
    prism_cs = inputs_cfg.getint('INPUTS', 'prism_cellsize')
    calc_jh_coef_flag = inputs_cfg.getboolean('INPUTS',
                                              'calc_prism_jh_coef_flag')

    if calc_jh_coef_flag:
        # DEADBEEF - This could/should be moved to support_functions.py since it is
        #   in this script and in both PRISM scripts.
        # DEM Units
        dem_units = inputs_cfg.get('INPUTS', 'dem_units').lower()
        dem_unit_types = {
            'meters': 'meter',
            'm': 'meter',
            'meter': 'meter',
            'feet': 'feet',
            'ft': 'meter',
            'foot': 'meter',
        }
        try:
            dem_units = dem_unit_types[dem_units]
        except:
            logging.error(
                '\nERROR: DEM unit "{}" is not supported\n'.format(dem_units))
            sys.exit()
        # Many expressions are hardcoded to units of feet
        # If dem_units are in meters, scale DEM_ADJ to get to feet
        if dem_units == 'meter':
            dem_unit_scalar = 0.3048
        else:
            dem_unit_scalar = 1.0

    # Check input paths
    if not arcpy.Exists(hru.polygon_path):
        logging.error('\nERROR: Fishnet ({}) does not exist'.format(
            hru.polygon_path))
        sys.exit()
    # Check that PRISM folder is valid
    if not os.path.isdir(prism_ws):
        logging.error(
            '\nERROR: PRISM folder ({}) does not exist'.format(prism_ws))
        sys.exit()
    proj_method_list = ['BILINEAR', 'CUBIC', 'NEAREST']
    if prism_proj_method.upper() not in proj_method_list:
        logging.error('\nERROR: PRISM projection method must be: {}'.format(
            ', '.join(proj_method_list)))
        sys.exit()
    logging.debug('  Projection method:    {}'.format(
        prism_proj_method.upper()))

    # Check other inputs
    if prism_cs <= 0:
        logging.error('\nERROR: PRISM cellsize must be greater than 0\n')
        sys.exit()

    # Set ArcGIS environment variables
    arcpy.CheckOutExtension('Spatial')
    env.overwriteOutput = True
    env.pyramid = 'PYRAMIDS 0'
    env.workspace = hru.param_ws
    env.scratchWorkspace = hru.scratch_ws

    # PRISM data names
    if data_name == 'ALL':
        data_name_list = ['PPT', 'TMAX', 'TMIN']
    else:
        data_name_list = [data_name]

    # Set month list
    month_list = ['{0:02d}'.format(m) for m in range(1, 13)]
    # month_list.extend(['annual'])

    # Check fields
    logging.info('\nAdding PRISM fields if necessary')
    for data_name in data_name_list:
        for month in month_list:
            support.add_field_func(hru.polygon_path,
                                   '{}_{}'.format(data_name, month), 'DOUBLE')

    # Process each PRISM data type
    logging.info('\nProjecting/clipping PRISM mean monthly rasters')
    for data_name in data_name_list:
        logging.info('\n{}'.format(data_name))
        prism_normal_re = re.compile(
            'PRISM_(?P<type>%s)_30yr_normal_4kmM2_(?P<month>\d{2})_bil.bil$' %
            data_name, re.IGNORECASE)

        # Search all files & subfolders in prism folder
        #   for images that match data type
        input_raster_dict = dict()
        for root, dirs, files in os.walk(prism_ws):
            for file_name in files:
                prism_normal_match = prism_normal_re.match(file_name)
                if prism_normal_match:
                    month_str = prism_normal_match.group('month')
                    input_raster_dict[month_str] = os.path.join(
                        root, file_name)
        if not input_raster_dict:
            logging.error(
                '\nERROR: No PRISM rasters were found matching the '
                'following pattern:\n  {}\n\nDouble check that the script '
                'and folder are for the same resolution '
                '(800m vs 4km)\n\n'.format(prism_normal_re.pattern))
            logging.error()
            sys.exit()

        # PRISM input data workspace
        # input_ws = os.path.join(prism_ws, data_name.lower())
        # if not os.path.isdir(input_ws):
        #    logging.error('\nERROR: The PRISM {} folder does not exist'.format(
        #        data_name.lower()))
        #    sys.exit()

        # PRISM output data workspace
        output_ws = os.path.join(hru.param_ws, data_name.lower() + '_rasters')
        if not os.path.isdir(output_ws):
            os.mkdir(output_ws)

        # Remove all non year/month rasters in PRISM temp folder
        logging.info('  Removing existing PRISM files')
        for item in os.listdir(output_ws):
            # if prism_normal_re.match(item) and overwrite_flag:
            if prism_normal_re.match(item):
                os.remove(os.path.join(output_ws, item))

        # Extract, project/resample, clip
        # Process images by month
        zs_prism_dict = dict()
        # env.extent = hru.extent
        for month in month_list:
            logging.info('  Month: {}'.format(month))

            # Projected/clipped PRISM raster
            input_raster = input_raster_dict[month]
            # input_name = 'PRISM_{}_30yr_normal_4kmM2_{1}_bil.bil'.format(
            #    data_name.lower(), input_month)
            # input_raster = os.path.join(input_ws, input_name)
            output_name = 'PRISM_{}_30yr_normal_4kmM2_{}.img'.format(
                data_name.lower(), month)
            output_raster = os.path.join(output_ws, output_name)

            # Set preferred transforms
            input_sr = arcpy.sa.Raster(input_raster).spatialReference
            transform_str = support.transform_func(hru.sr, input_sr)
            if transform_str:
                logging.debug('  Transform: {}'.format(transform_str))

            # Project PRISM rasters to HRU coordinate system
            # DEADBEEF - Arc10.2 ProjectRaster does not extent
            support.project_raster_func(input_raster, output_raster, hru.sr,
                                        prism_proj_method.upper(), prism_cs,
                                        transform_str,
                                        '{} {}'.format(hru.ref_x, hru.ref_y),
                                        input_sr, hru)
            # arcpy.ProjectRaster_management(
            #    input_raster, output_raster, hru.sr,
            #    prism_proj_method.upper(), prism_cs, transform_str,
            #    '{} {}'.format(hru.ref_x, hru.ref_y),
            #    input_sr)

            # Save parameters for calculating zonal stats
            zs_field = '{}_{}'.format(data_name, month)
            zs_prism_dict[zs_field] = [output_raster, 'MEAN']

            # Cleanup
            del input_raster, output_raster, output_name
            del input_sr, transform_str, zs_field

        # Cleanup
        # arcpy.ClearEnvironment('extent')

        # Calculate zonal statistics
        logging.info('\nCalculating PRISM zonal statistics')
        support.zonal_stats_func(zs_prism_dict, hru.polygon_path,
                                 hru.point_path, hru)
        del zs_prism_dict

    # Jensen-Haise Potential ET air temperature coefficient
    # Update Jensen-Haise PET estimate using PRISM air temperature
    # DEADBEEF - First need to figure out month with highest Tmax
    #            Then get Tmin for same month
    if calc_jh_coef_flag:
        logging.info('\nRe-Calculating JH_COEF_HRU')
        logging.info('  Using PRISM temperature values')
        tmax_field_list = ['!TMAX_{:02d}!'.format(m) for m in range(1, 13)]
        tmin_field_list = ['!TMIN_{:02d}!'.format(m) for m in range(1, 13)]
        tmax_expr = 'max([{}])'.format(','.join(tmax_field_list))
        arcpy.CalculateField_management(hru.polygon_path, hru.jh_tmax_field,
                                        tmax_expr, 'PYTHON')
        # Sort TMAX and get TMIN for same month
        tmin_expr = 'max(zip([{}],[{}]))[1]'.format(','.join(tmax_field_list),
                                                    ','.join(tmin_field_list))
        arcpy.CalculateField_management(hru.polygon_path, hru.jh_tmin_field,
                                        tmin_expr, 'PYTHON')

        # Pass unit scalar to convert DEM_ADJ to feet if necessary
        support.jensen_haise_func(hru.polygon_path, hru.jh_coef_field,
                                  hru.dem_adj_field, hru.jh_tmin_field,
                                  hru.jh_tmax_field, dem_unit_scalar)
def crt_fill_parameters(config_path):
    """Calculate GSFLOW CRT Fill Parameters

    Parameters
    ----------
    config_path : str
        Project configuration file (.ini) path.

    Returns
    -------
    None

    """
    # Initialize hru_parameters class
    hru = support.HRUParameters(config_path)

    # Open input parameter config file
    inputs_cfg = ConfigParser.ConfigParser()
    try:
        inputs_cfg.readfp(open(config_path))
    except Exception as e:
        logging.error('\nERROR: Config file could not be read, '
                      'is not an input file, or does not exist\n'
                      '  config_file = {}\n'
                      '  Exception: {}\n'.format(config_path, e))
        sys.exit()

    # Log DEBUG to file
    log_file_name = 'crt_fill_parameters_log.txt'
    log_console = logging.FileHandler(filename=os.path.join(
        hru.log_ws, log_file_name),
                                      mode='w')
    log_console.setLevel(logging.DEBUG)
    log_console.setFormatter(logging.Formatter('%(message)s'))
    logging.getLogger('').addHandler(log_console)
    logging.info('\nGSFLOW CRT Fill Parameters')

    # Parameters
    exit_seg = 0

    # CRT Parameters
    try:
        use_crt_fill_flag = inputs_cfg.getboolean('INPUTS',
                                                  'use_crt_fill_flag')
    except ConfigParser.NoOptionError:
        use_crt_fill_flag = False
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'use_crt_fill_flag', use_crt_fill_flag))

    try:
        crt_hruflg = inputs_cfg.getint('INPUTS', 'crt_hruflg')
    except ConfigParser.NoOptionError:
        crt_hruflg = 0
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'crt_hruflg', crt_hruflg))
    try:
        crt_flowflg = inputs_cfg.getint('INPUTS', 'crt_flowflg')
    except ConfigParser.NoOptionError:
        crt_flowflg = 1
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'crt_flowflg', crt_flowflg))
    try:
        crt_dpit = inputs_cfg.getfloat('INPUTS', 'crt_dpit')
    except ConfigParser.NoOptionError:
        crt_dpit = 0.01
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'crt_dpit', crt_dpit))
    try:
        crt_outitmax = inputs_cfg.getint('INPUTS', 'crt_outitmax')
    except ConfigParser.NoOptionError:
        crt_outitmax = 100000
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'crt_outitmax', crt_outitmax))

    # Intentionally not allowing user to change this value
    crt_iprn = 1

    # CRT Fill Parameters
    fill_ws_name = 'fill_work'
    fill_strmflg = 0
    fill_visflg = 0
    fill_ifill = 1

    # CRT Executable
    crt_exe_path = inputs_cfg.get('INPUTS', 'crt_exe_path')
    output_name = 'outputstat.txt'

    # Check input paths
    if not arcpy.Exists(hru.polygon_path):
        logging.error('\nERROR: Fishnet ({}) does not exist\n'.format(
            hru.polygon_path))
        sys.exit()
    # Check that input fields exist and have data
    # Fields generated by hru_parameters
    for f in [hru.type_field, hru.row_field, hru.col_field]:
        if not arcpy.ListFields(hru.polygon_path, f):
            logging.error(
                '\nERROR: Input field {} is not present in fishnet'
                '\nERROR: Try re-running hru_parameters.py\n'.format(f))
            sys.exit()
        elif support.field_stat_func(hru.polygon_path, f, 'MAXIMUM') == 0:
            logging.error(
                '\nERROR: Input field {} contains only 0'
                '\nERROR: Try re-running hru_parameters.py\n'.format(f))
            sys.exit()
    # Fields generated by dem_2_streams
    for f in [
            hru.irunbound_field, hru.iseg_field, hru.flow_dir_field,
            hru.outflow_field, hru.subbasin_field
    ]:
        if not arcpy.ListFields(hru.polygon_path, f):
            logging.error(
                '\nERROR: Input field {} is not present in fishnet'
                '\nERROR: Try re-running dem_2_streams.py\n'.format(f))
            sys.exit()
        elif support.field_stat_func(hru.polygon_path, f, 'MAXIMUM') == 0:
            logging.error(
                '\nERROR: Input field {} contains only 0'
                '\nERROR: Try re-running dem_2_streams.py\n'.format(f))
            sys.exit()

    # Build output folder if necessary
    fill_ws = os.path.join(hru.param_ws, fill_ws_name)
    if not os.path.isdir(fill_ws):
        os.makedirs(fill_ws)

    # Copy CRT executable if necessary
    crt_exe_name = os.path.basename(crt_exe_path)
    if not os.path.isfile(os.path.join(fill_ws, crt_exe_name)):
        shutil.copy(crt_exe_path, fill_ws)
    if not os.path.isfile(os.path.join(fill_ws, crt_exe_name)):
        logging.error('\nERROR: CRT executable ({}) does not exist\n'.format(
            os.path.join(fill_ws, crt_exe_name)))
        sys.exit()

    # Fill files
    fill_hru_casc_path = os.path.join(fill_ws, 'HRU_CASC.DAT')
    fill_outflow_hru_path = os.path.join(fill_ws, 'OUTFLOW_HRU.DAT')
    fill_land_elev_path = os.path.join(fill_ws, 'LAND_ELEV.DAT')
    fill_xy_path = os.path.join(fill_ws, 'XY.DAT')

    # Output names
    # dem_adj_raster_name = 'dem_adj'
    # hru_type_raster_name = 'hru_type'
    # lakes_raster_name = 'lakes'
    # streams_raster_name = 'streams'
    # iseg_raster_name = 'iseg'
    # irunbound_raster_name = 'irunbound'

    # Output raster paths
    # dem_adj_raster = os.path.join(fill_ws, dem_adj_raster_name + '.img')
    # hru_type_raster = os.path.join(fill_ws, hru_type_raster_name + '.img')

    # Output ascii paths
    # a_fmt = '{}_ascii.txt'
    # dem_adj_ascii = os.path.join(fill_ws, a_fmt.format(dem_adj_raster_name))
    # hru_type_ascii = os.path.join(fill_ws, a_fmt.format(hru_type_raster_name))

    # Set ArcGIS environment variables
    arcpy.CheckOutExtension('Spatial')
    env.overwriteOutput = True
    # env.pyramid = 'PYRAMIDS -1'
    env.pyramid = 'PYRAMIDS 0'
    env.workspace = fill_ws
    env.scratchWorkspace = hru.scratch_ws

    # Add fields if necessary
    logging.info('\nAdding fields if necessary')
    support.add_field_func(hru.polygon_path, hru.krch_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.irch_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.jrch_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.iseg_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.reach_field, 'LONG')
    # add_field_func(hru.polygon_path, hru.rchlen_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.maxreach_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.outseg_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.irunbound_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.crt_elev_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.crt_fill_field, 'DOUBLE')

    # Calculate KRCH, IRCH, JRCH for stream segments
    logging.info('\nKRCH, IRCH, & JRCH for streams')
    fields = [
        hru.type_field, hru.iseg_field, hru.row_field, hru.col_field,
        hru.krch_field, hru.irch_field, hru.jrch_field
    ]
    with arcpy.da.UpdateCursor(hru.polygon_path, fields) as update_c:
        for row in update_c:
            if (int(row[0]) in [1, 3] and int(row[1]) > 0):
                row[4], row[5], row[6] = 1, int(row[2]), int(row[3])
            else:
                row[4], row[5], row[6] = 0, 0, 0
            update_c.updateRow(row)

    # Get list of segments and downstream cell for each stream/lake cell
    # Downstream is calculated from flow direction
    # Use IRUNBOUND instead of ISEG, since ISEG will be zeroed for lakes
    # DEADBEEF - I don't think ISEG will be zero for lakes anymore
    logging.info('Cell out-flow dictionary')
    cell_dict = dict()
    fields = [
        hru.type_field, hru.krch_field, hru.lake_id_field, hru.iseg_field,
        hru.irunbound_field, hru.dem_adj_field, hru.flow_dir_field,
        hru.col_field, hru.row_field, hru.id_field
    ]
    for row in arcpy.da.SearchCursor(hru.polygon_path, fields):
        # Skip inactive cells
        if int(row[0]) == 0:
            continue
        # Skip non-stream and non-lake cells
        elif (int(row[1]) == 0 and int(row[2]) == 0):
            continue

        # ROW / COL
        cell = (int(row[7]), int(row[8]))

        # Read in parameters
        # HRU_ID, ISEG, support.next_row_col(FLOW_DIR, CELL), DEM_ADJ, X, X, X
        cell_dict[cell] = [
            int(row[9]),
            int(row[4]),
            support.next_row_col(int(row[6]), cell),
            float(row[5]), 0, 0, 0
        ]

    # Build list of unique segments
    iseg_list = sorted(list(set([v[1] for v in cell_dict.values()])))
    print(iseg_list)

    # Calculate IREACH and OUTSEG
    logging.info('Calculate {} and {}'.format(hru.reach_field,
                                              hru.outseg_field))
    outseg_dict = dict()
    for iseg in sorted(iseg_list):
        logging.debug('    Segment: {}'.format(iseg))

        # Subset of cell_dict for current iseg
        iseg_dict = dict([(k, v) for k, v in cell_dict.items()
                          if v[1] == iseg])

        # List of all cells in current iseg
        iseg_cells = iseg_dict.keys()

        # List of out_cells for all cells in current iseg
        out_cells = [value[2] for value in iseg_dict.values()]

        # Every iseg will (should?) have one out_cell
        out_cell = list(set(out_cells) - set(iseg_cells))

        # Process streams and lakes separately
        # Streams
        if iseg > 0:
            # If there is more than one out_cell
            #   there is a problem with the stream network
            if len(out_cell) != 1:
                logging.error(
                    '\nERROR: ISEG {} has more than one out put cell'
                    '\n  Out cells: {}'
                    '\n  Check for streams exiting then re-entering a lake'
                    '\n  Lake cell elevations may not be constant\n'.format(
                        iseg, out_cell))
                sys.exit()

            # If not output cell, assume edge of domain
            try:
                outseg = cell_dict[out_cell[0]][1]
            except KeyError:
                outseg = exit_seg

            # Track sub-basin outseg
            outseg_dict[iseg] = outseg

            # Calculate reach number for each cell
            reach_dict = dict()
            start_cell = list(set(iseg_cells) - set(out_cells))[0]
            for i in range(len(out_cells)):
                # logging.debug('    Reach: {}  Cell: {}'.format(i+1, start_cell))
                reach_dict[start_cell] = i + 1
                start_cell = iseg_dict[start_cell][2]
            # For each cell in iseg, save outseg, reach, & maxreach
            for iseg_cell in iseg_cells:
                cell_dict[iseg_cell][4:] = [
                    outseg, reach_dict[iseg_cell],
                    len(iseg_cells)
                ]
            del reach_dict, start_cell
        # Lakes
        else:
            # For lake cells, there can be multiple outlets if all of them
            #   are to inactive cells or out of the model
            # Otherwise, like streams, there should only be one outcell per iseg
            logging.debug('  Length: {}'.format(len(out_cells)))
            if len(out_cell) == 1:
                try:
                    outseg = cell_dict[out_cell[0]][1]
                except KeyError:
                    outseg = exit_seg
            elif (len(out_cell) != 1
                  and all(x[0] not in cell_dict.keys() for x in out_cell)):
                outseg = exit_seg
                logging.debug('  All out cells are inactive, setting outseg '
                              'to exit_seg {}'.format(exit_seg))
            else:
                logging.error(
                    '\nERROR: ISEG {} has more than one out put cell'
                    '\n  Out cells: {}'
                    '\n  Check for streams exiting then re-entering a lake'
                    '\n  Lake cell elevations may not be constant\n'.format(
                        iseg, out_cell))
                raw_input('ENTER')

            # Track sub-basin outseg
            outseg_dict[iseg] = outseg

            # For each lake segment cell, only save outseg
            # All lake cells are routed directly to the outseg
            for iseg_cell in iseg_cells:
                cell_dict[iseg_cell][4:] = [outseg, 0, 0]
            del outseg

        del iseg_dict, iseg_cells, iseg
        del out_cells, out_cell

    # Saving ireach and outseg
    logging.info('Save {} and {}'.format(hru.reach_field, hru.outseg_field))
    fields = [
        hru.type_field, hru.iseg_field, hru.col_field, hru.row_field,
        hru.outseg_field, hru.reach_field, hru.maxreach_field
    ]
    with arcpy.da.UpdateCursor(hru.polygon_path, fields) as update_c:
        for row in update_c:
            # if (int(row[0]) > 0 and int(row[1]) > 0):
            # #DEADBEEF - I'm not sure why only iseg > 0 in above line
            # DEADBEEF - This should set outseg for streams and lakes
            if (int(row[0]) > 0 and int(row[1]) != 0):
                row[4:] = cell_dict[(int(row[2]), int(row[3]))][4:]
            else:
                row[4:] = [0, 0, 0]
            update_c.updateRow(row)

    # Set all lake iseg to 0
    logging.info('Lake {}'.format(hru.iseg_field))
    update_rows = arcpy.UpdateCursor(hru.polygon_path)
    for row in update_rows:
        if int(row.getValue(hru.type_field)) != 2:
            continue
        iseg = int(row.getValue(hru.iseg_field))
        if iseg < 0:
            row.setValue(hru.iseg_field, 0)
        update_rows.updateRow(row)
        del row, iseg
    del update_rows

    # Set environment parameters
    env.extent = hru.extent
    env.cellsize = hru.cs
    env.outputCoordinateSystem = hru.sr

    # # Build rasters
    # logging.info('\nOutput model grid rasters')
    # arcpy.PolygonToRaster_conversion(
    #    hru.polygon_path, hru.type_field, hru_type_raster,
    #    'CELL_CENTER', '', hru.cs)
    # arcpy.PolygonToRaster_conversion(
    #    hru.polygon_path, hru.dem_adj_field, dem_adj_raster,
    #    'CELL_CENTER', '', hru.cs)
    #
    # # Build rasters
    # logging.info('Output model grid ascii')
    # arcpy.RasterToASCII_conversion(hru_type_raster, hru_type_ascii)
    # arcpy.RasterToASCII_conversion(dem_adj_raster, dem_adj_ascii)

    logging.debug('\nRemoving existing CRT fill files')
    if os.path.isfile(fill_outflow_hru_path):
        os.remove(fill_outflow_hru_path)
    if os.path.isfile(fill_hru_casc_path):
        os.remove(fill_hru_casc_path)
    if os.path.isfile(fill_land_elev_path):
        os.remove(fill_land_elev_path)
    if os.path.isfile(fill_xy_path):
        os.remove(fill_xy_path)

    # Input parameters files for Cascade Routing Tool (CRT)
    logging.info('\nBuilding output CRT fill files')

    # Generate OUTFLOW_HRU.DAT for CRT
    # Outflow cells exit the model to inactive cells or out of the domain
    #   Outflow field is set in dem_2_streams
    logging.info('  {}'.format(os.path.basename(fill_outflow_hru_path)))
    outflow_hru_list = []
    fields = [
        hru.type_field, hru.outflow_field, hru.subbasin_field, hru.row_field,
        hru.col_field
    ]
    for row in arcpy.da.SearchCursor(hru.polygon_path, fields):
        if int(row[0]) != 0 and int(row[1]) == 1:
            outflow_hru_list.append([int(row[3]), int(row[4])])
    if outflow_hru_list:
        with open(fill_outflow_hru_path, 'w+') as f:
            f.write('{}    NUMOUTFLOWHRU\n'.format(len(outflow_hru_list)))
            for i, outflow_hru in enumerate(outflow_hru_list):
                f.write('{} {} {}   OUTFLOW_ID ROW COL\n'.format(
                    i + 1, outflow_hru[0], outflow_hru[1]))
        f.close()
    else:
        logging.error('\nERROR: No OUTFLOWHRU points, exiting')
        sys.exit()
    del outflow_hru_list

    # # DEADBEEF - Old method for setting OUTFLOW_HRU.DAT
    # #   Only streams that flow to real gauges are used
    # # Generate OUTFLOW_HRU.DAT for CRT
    # logging.info('  {}'.format(
    #    os.path.basename(fill_outflow_hru_path)))
    # outflow_hru_list = []
    # fields = [
    #    hru.type_field, hru.iseg_field, hru.outseg_field, hru.reach_field,
    #    hru.maxreach_field, hru.col_field, hru.row_field]
    # for row in arcpy.da.SearchCursor(hru.polygon_path, fields):
    #    if int(row[0]) != 1 or int(row[1]) == 0:
    #        continue
    #    if int(row[2]) == 0 and int(row[3]) == int(row[4]):
    #        outflow_hru_list.append([int(row[6]), int(row[5])])
    # if outflow_hru_list:
    #    with open(fill_outflow_hru_path, 'w+') as f:
    #        f.write('{}    NUMOUTFLOWHRU\n'.format(
    #            len(outflow_hru_list)))
    #        for i, outflow_hru in enumerate(outflow_hru_list):
    #            f.write('{} {} {}   OUTFLOW_ID ROW COL\n'.format(
    #                i+1, outflow_hru[0], outflow_hru[1]))
    #    f.close()
    # del outflow_hru_list

    # Generate HRU_CASC.DAT for CRT from hru_polygon
    logging.info('  {}'.format(os.path.basename(fill_hru_casc_path)))
    hru_type_dict = defaultdict(dict)
    for row in sorted(
            arcpy.da.SearchCursor(hru.polygon_path, [
                hru.row_field, hru.col_field, hru.type_field, hru.dem_adj_field
            ])):
        # Calculate CRT fill for all non-lake and non-ocean (elev > 0) cells
        # if row[3] > 0 and row[2] == 0:
        #    hru_type_dict[int(row[0])][int(row[1])] = 1
        # else: hru_type_dict[int(row[0])][int(row[1])] = row[2]
        # Calculate CRT fill for all active cells
        hru_type_dict[int(row[0])][int(row[1])] = row[2]
    hru_casc_header = (
        '{} {} {} {} {} {} {} {}     '
        'HRUFLG STRMFLG FLOWFLG VISFLG IPRN IFILL DPIT OUTITMAX\n').format(
            crt_hruflg, fill_strmflg, crt_flowflg, fill_visflg, crt_iprn,
            fill_ifill, crt_dpit, crt_outitmax)
    with open(fill_hru_casc_path, 'w+') as f:
        f.write(hru_casc_header)
        for row, col_data in sorted(hru_type_dict.items()):
            f.write(' '.join([str(t)
                              for c, t in sorted(col_data.items())]) + '\n')
    f.close()
    del hru_casc_header, hru_type_dict
    # # Generate HRU_CASC.DATA for CRT from raster/ascii
    # with open(hru_type_ascii, 'r') as f: ascii_data = f.readlines()
    # f.close()
    # hru_casc_header = (
    #    '{} {} {} {} {} {} {} {}     '
    #    'HRUFLG STRMFLG FLOWFLG VISFLG '
    #    'IPRN IFILL DPIT OUTITMAX\n').format(
    #        crt_hruflg, fill_strmflg, crt_flowflg, fill_visflg,
    #        crt_iprn, fill_ifill, crt_dpit, crt_outitmax)
    # with open(fill_hru_casc_path, 'w+') as f:
    #    f.write(hru_casc_header)
    #    for ascii_line in ascii_data[6:]: f.write(ascii_line)
    # f.close()
    # del hru_casc_header, ascii_data

    # Generate LAND_ELEV.DAT for CRT from hru_polygon
    logging.info('  {}'.format(os.path.basename(fill_land_elev_path)))
    dem_adj_dict = defaultdict(dict)
    for row in sorted(
            arcpy.da.SearchCursor(
                hru.polygon_path,
                [hru.row_field, hru.col_field, hru.dem_adj_field])):
        dem_adj_dict[int(row[0])][int(row[1])] = row[2]
    with open(fill_land_elev_path, 'w+') as f:
        row_first = dem_adj_dict.keys()[0]
        f.write('{} {}       NROW NCOL\n'.format(len(dem_adj_dict.keys()),
                                                 len(dem_adj_dict[row_first])))
        for row, col_data in sorted(dem_adj_dict.items()):
            f.write(' '.join(
                ['{:10.6f}'.format(t)
                 for c, t in sorted(col_data.items())]) + '\n')
    f.close()
    del dem_adj_dict
    # # Generate LAND_ELEV.DAT for CRT from raster/ascii
    # logging.info('  {}'.format(os.path.basename(fill_land_elev_path)))
    # with open(dem_adj_ascii, 'r') as f: ascii_data = f.readlines()
    # f.close()
    # with open(fill_land_elev_path, 'w+') as f:
    #    f.write('{} {}       NROW NCOL\n'.format(
    #        ascii_data[1].split()[1], ascii_data[0].split()[1]))
    #    for ascii_line in ascii_data[6:]: f.write(ascii_line)
    # f.close()
    # del ascii_data

    # Generate XY.DAT for CRT
    logging.info('  {}'.format(os.path.basename(fill_xy_path)))
    xy_list = [
        map(int, row) for row in sorted(
            arcpy.da.SearchCursor(hru.polygon_path,
                                  [hru.id_field, hru.x_field, hru.y_field]))
    ]
    with open(fill_xy_path, 'w+') as f:
        for line in sorted(xy_list):
            f.write(' '.join(map(str, line)) + '\n')
    f.close()
    del xy_list

    # Run CRT
    logging.info('\nRunning CRT')
    subprocess.check_output(crt_exe_name, cwd=fill_ws, shell=True)

    # Read in outputstat.txt and get filled DEM
    logging.info('\nReading CRT {}'.format(output_name))
    output_path = os.path.join(fill_ws, output_name)
    with open(output_path, 'r') as f:
        output_data = [l.strip() for l in f.readlines()]
    f.close()

    # Determine where filled data is in the file
    try:
        crt_dem_i = output_data.index(
            'CRT FILLED LAND SURFACE MODEL USED TO GENERATE CASCADES')
        crt_fill_i = output_data.index(
            'DIFFERENCES BETWEEN FILLED AND UNFILLED LAND SURFACE MODELS')
    except ValueError:
        logging.error('\nERROR: CRT didn\'t completely run\n'
                      '  Check the CRT outputstat.txt file\n')
        sys.exit()

    logging.info('  Break indices: {}, {}'.format(crt_dem_i, crt_fill_i))
    crt_dem_data = [
        r.split() for r in output_data[crt_dem_i + 1:crt_dem_i + hru.rows + 1]
    ]
    crt_fill_data = [
        r.split()
        for r in output_data[crt_fill_i + 1:crt_fill_i + hru.rows + 1]
    ]
    logging.info('  ROWS/COLS: {}/{}'.format(len(crt_dem_data),
                                             len(crt_dem_data[0])))
    logging.info('  ROWS/COLS: {}/{}'.format(len(crt_fill_data),
                                             len(crt_fill_data[0])))

    #   crt_type_i = crt_fill_i + (crt_fill_i - crt_dem_i)

    #    crt_dem_data = [
    #        r.split() for r in output_data[crt_dem_i+1: crt_dem_i+hru.rows+1]]
    #    crt_fill_data = [
    #        r.split() for r in output_data[crt_fill_i+1: crt_type_i-1]]

    # Build dictionaries of the CRT data
    crt_dem_dict = defaultdict(dict)
    crt_fill_dict = defaultdict(dict)
    for i, r in enumerate(crt_dem_data):
        crt_dem_dict[i + 1] = dict([(j + 1, c)
                                    for j, c in enumerate(crt_dem_data[i])])
    for i, r in enumerate(crt_fill_data):
        crt_fill_dict[i + 1] = dict([(j + 1, c)
                                     for j, c in enumerate(crt_fill_data[i])])

    # Write CRT values to hru_polygon
    logging.info('Writing CRT data to fishnet')
    logging.debug('  {:<4s} {:<4s} {:>7s}'.format('ROW', 'COL', 'FILL'))
    fields = [
        hru.row_field, hru.col_field, hru.crt_elev_field, hru.crt_fill_field,
        hru.dem_adj_field
    ]
    with arcpy.da.UpdateCursor(hru.polygon_path, fields) as update_c:
        for row in update_c:
            # If DEM values are too large for CRT, they may be symbols that will be skipped
            if support.is_number(crt_dem_dict[int(row[0])][int(row[1])]):
                row[2] = float(crt_dem_dict[int(row[0])][int(row[1])])
                row[3] = float(crt_fill_dict[int(row[0])][int(row[1])])
                if float(row[3]) > 0:
                    logging.debug('  {:>4d} {:>4d} {:>7.2f}'.format(
                        row[0], row[1], float(row[3])))
                if use_crt_fill_flag and float(row[3]) > 0:
                    row[4] = row[2]
                update_c.updateRow(row)
예제 #6
0
def hru_parameters(config_path):
    """Calculate GSFLOW HRU Parameters

    Parameters
    ----------
    config_path : str
        Project configuration file (.ini) path.

    Returns
    -------
    None

    """
    # Initialize hru parameters class
    hru = support.HRUParameters(config_path)

    # Open input parameter config file
    inputs_cfg = ConfigParser.ConfigParser()
    try:
        inputs_cfg.readfp(open(config_path))
    except Exception as e:
        logging.error(
            '\nERROR: Config file could not be read, '
            'is not an input file, or does not exist\n'
            '  config_file = {}\n'
            '  Exception: {}\n'.format(config_path, e))
        sys.exit()

    # Log DEBUG to file
    log_file_name = 'hru_parameters_log.txt'
    log_console = logging.FileHandler(
        filename=os.path.join(hru.log_ws, log_file_name), mode='w')
    log_console.setLevel(logging.DEBUG)
    log_console.setFormatter(logging.Formatter('%(message)s'))
    logging.getLogger('').addHandler(log_console)
    logging.info('\nGSFLOW HRU Parameters')

    # Read parameters from config file
    study_area_orig_path = inputs_cfg.get('INPUTS', 'study_area_path')
    try:
        set_lake_flag = inputs_cfg.getboolean('INPUTS', 'set_lake_flag')
    except ConfigParser.NoOptionError:
        set_lake_flag = False
        logging.info(
            '  Missing INI parameter, setting {} = {}'.format(
                'set_lake_flag', set_lake_flag))

    if set_lake_flag:
        lake_orig_path = inputs_cfg.get('INPUTS', 'lake_path')
        lake_zone_field = inputs_cfg.get('INPUTS', 'lake_zone_field')
        lake_area_pct = inputs_cfg.getfloat('INPUTS', 'lake_area_pct')

    # Model points
    model_inputs_path = inputs_cfg.get('INPUTS', 'model_points_path')
    try:
        model_points_zone_field = inputs_cfg.get(
            'INPUTS', 'model_points_zone_field')
    except:
        model_points_zone_field = 'FID'
        logging.info(
            '  Missing INI parameter, setting {} = {}'.format(
                'model_points_zone_field', model_points_zone_field))
    try:
        model_points_type_field = inputs_cfg.get(
            'INPUTS', 'model_points_type_field')
    except:
        model_points_type_field = 'TYPE'
        logging.info(
            '  Missing INI parameter, setting {} = {}'.format(
                'model_points_type_field', model_points_type_field))

    # Control flags
    try:
        calc_flow_acc_dem_flag = inputs_cfg.getboolean(
            'INPUTS', 'calc_flow_acc_dem_flag')
    except ConfigParser.NoOptionError:
        calc_flow_acc_dem_flag = False
        logging.info(
            '  Missing INI parameter, setting {} = {}'.format(
                'calc_flow_acc_dem_flag', calc_flow_acc_dem_flag))

    try:
        calc_topo_index_flag = inputs_cfg.getboolean(
            'INPUTS', 'calc_topo_index_flag')
    except ConfigParser.NoOptionError:
        calc_topo_index_flag = False
        logging.info(
            '  Missing INI parameter, setting {} = {}'.format(
                'calc_topo_index_flag', calc_topo_index_flag))

    # try:
    #     set_ppt_zones_flag = inputs_cfg.getboolean(
    #         'INPUTS', 'set_ppt_zones_flag')
    # except ConfigParser.NoOptionError:
    #     set_ppt_zones_flag = False
    #     logging.info(
    #         '  Missing INI parameter, setting {} = {}'.format(
    #             'set_ppt_zones_flag', set_ppt_zones_flag))

    # Check input paths
    if not arcpy.Exists(hru.polygon_path):
        logging.error(
            '\nERROR: Fishnet ({}) does not exist'.format(
                hru.polygon_path))
        sys.exit()

    if set_lake_flag:
        if not arcpy.Exists(lake_orig_path):
            logging.error(
                '\nERROR: Lake layer ({}) does not exist'.format(
                    lake_orig_path))
            sys.exit()
        # lake_path must be a polygon shapefile
        if arcpy.Describe(lake_orig_path).datasetType != 'FeatureClass':
            logging.error(
                '\nERROR: lake_path must be a polygon shapefile')
            sys.exit()
        # Check lake_zone_field
        if lake_zone_field.upper() in ['', 'FID', 'NONE']:
            lake_zone_field = arcpy.Describe(lake_orig_path).OIDFieldName
            logging.warning(
                '\n  NOTE: Using {} to set {}\n'.format(
                    lake_zone_field, hru.lake_id_field))
        elif not arcpy.ListFields(lake_orig_path, lake_zone_field):
            logging.error(
                '\nERROR: lake_zone_field field {} does not exist\n'.format(
                    lake_zone_field))
            sys.exit()
        # Need to check that lake_zone_field is an int type
        elif not [f.type for f in arcpy.Describe(lake_orig_path).fields
                  if (f.name == lake_zone_field and
                      f.type in ['SmallInteger', 'Integer'])]:
            logging.error(
                '\nERROR: lake_zone_field field {} must be an '
                'integer type\n'.format(lake_zone_field))
            sys.exit()

    # Check model points
    if not os.path.isfile(model_inputs_path):
        logging.error(
            '\nERROR: Model points shapefiles does not exist'
            '\nERROR:   {}'.format(model_inputs_path))
        sys.exit()
    # model_points_path must be a point shapefile
    elif arcpy.Describe(model_inputs_path).datasetType != 'FeatureClass':
        logging.error(
            '\nERROR: model_points_path must be a point shapefile')
        sys.exit()

    # For now, study area has to be a polygon
    if arcpy.Describe(study_area_orig_path).datasetType != 'FeatureClass':
        logging.error(
            '\nERROR: For now, study area must be a polygon shapefile')
        sys.exit()


    # Build output folder if necessary
    hru_temp_ws = os.path.join(hru.param_ws, 'hru_temp')
    if not os.path.isdir(hru_temp_ws):
        os.mkdir(hru_temp_ws)
    # Output paths
    study_area_path = os.path.join(hru_temp_ws, 'study_area.shp')
    lake_path = os.path.join(hru_temp_ws, 'lakes.shp')
    lake_clip_path = os.path.join(hru_temp_ws, 'lake_clip.shp')
    model_points_path = os.path.join(hru_temp_ws, 'model_points.shp')


    # Set ArcGIS environment variables
    arcpy.CheckOutExtension('Spatial')
    env.overwriteOutput = True
    env.pyramid = 'PYRAMIDS -1'
    # env.pyramid = 'PYRAMIDS 0'
    env.workspace = hru.param_ws
    env.scratchWorkspace = hru.scratch_ws

    # Create HRU points at polygon centroids
    if not arcpy.Exists(hru.point_path):
        logging.info('\n  Building HRU point shapefile')
        # FeatureToPoint will copy all fields in hru.polygon_path
        # arcpy.FeatureToPoint_management(
        #    hru.polygon_path, hru.point_path)
        # Build point_path directly
        arcpy.CreateFeatureclass_management(
            os.path.dirname(hru.point_path),
            os.path.basename(hru.point_path), 'POINT')
        arcpy.DefineProjection_management(hru.point_path, hru.sr)
        arcpy.AddField_management(
            hru.point_path, hru.fid_field, 'LONG')
        hru_centroid_list = [
            row for row in arcpy.da.SearchCursor(
                hru.polygon_path, ['OID@', 'SHAPE@XY'])]
        with arcpy.da.InsertCursor(
                hru.point_path,
                ['OID@', 'SHAPE@XY', hru.fid_field]) as update_c:
            for hru_centroid in hru_centroid_list:
                update_c.insertRow(
                    [hru_centroid[0], hru_centroid[1], hru_centroid[0]])
        del hru_centroid_list
    # Check existing HRU points
    else:
        # Remove any extra fields
        field_remove_list = [
            f.name for f in arcpy.ListFields(hru.point_path)
            if f.name not in ['FID', 'Shape', hru.fid_field]]
        # Skip if there is only one field in the shapefile
        if field_remove_list and len(field_remove_list) > 1:
            logging.info('\n  Removing HRU point fields')
            for field in field_remove_list:
                logging.debug('    {}'.format(field))
                try:
                    arcpy.DeleteField_management(hru.point_path, field)
                except Exception as e:
                    logging.debug('    Unhandled exception: {}'.format(e))
                    continue
        # Save original FID
        if len(arcpy.ListFields(hru.point_path, hru.fid_field)) == 0:
            arcpy.AddField_management(
                hru.point_path, hru.fid_field, 'LONG')
        arcpy.CalculateField_management(
            hru.point_path, hru.fid_field, '!FID!', 'PYTHON')
        if len(arcpy.ListFields(hru.point_path, 'Id')) > 0:
            arcpy.DeleteField_management(hru.point_path, 'Id')
        del field_remove_list

    # Add all output fields
    logging.info('\nAdding fields if necessary')
    logging.info(
        '  Note: You may see duplicate field names when writing to a network '
        'drive')

    # HRU/DEM Fields
    support.add_field_func(hru.polygon_path, hru.fid_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.id_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.type_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.dem_mean_field, 'DOUBLE')
    #support.add_field_func(hru.polygon_path, hru.dem_median_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.dem_min_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.dem_max_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.dem_adj_field, 'DOUBLE')
    if calc_flow_acc_dem_flag:
        support.add_field_func(hru.polygon_path, hru.dem_flowacc_field, 'DOUBLE')
        support.add_field_func(hru.polygon_path, hru.dem_sum_field, 'DOUBLE')
        support.add_field_func(hru.polygon_path, hru.dem_count_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.dem_sink_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.crt_elev_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.crt_fill_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.dem_aspect_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.dem_slope_deg_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.dem_slope_rad_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.dem_slope_pct_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.area_field, 'DOUBLE')
    if calc_topo_index_flag:
        support.add_field_func(hru.polygon_path, hru.topo_index_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.row_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.col_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.x_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.y_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.lat_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.lon_field, 'DOUBLE')

    # Lake fields
    support.add_field_func(hru.polygon_path, hru.lake_id_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.lake_area_field, 'DOUBLE')

    # Stream fields
    support.add_field_func(hru.polygon_path, hru.iseg_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.irunbound_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.flow_dir_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.krch_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.irch_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.jrch_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.iseg_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.reach_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.rchlen_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.maxreach_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.outseg_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.iupseg_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.subbasin_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.segbasin_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.outflow_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.strm_top_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.strm_slope_field, 'DOUBLE')

    # Sink field
    support.add_field_func(hru.polygon_path, hru.hru_sink_field, 'LONG')

    # Precipitation zone fields
    support.add_field_func(hru.polygon_path, hru.ppt_zone_id_field, 'SHORT')
    support.add_field_func(hru.polygon_path, hru.hru_psta_field, 'SHORT')

    # Temperature zone fields
    # if temp_calc_method == 'ZONES':
    # support.add_field_func(hru.polygon_path, hru.temp_zone_id_field, 'SHORT')
    # support.add_field_func(hru.polygon_path, hru.hru_tsta_field, 'SHORT')

    # DEM based
    support.add_field_func(hru.polygon_path, hru.jh_tmax_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.jh_tmin_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.jh_coef_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.snarea_thresh_field, 'DOUBLE')

    # Aspect based
    support.add_field_func(hru.polygon_path, hru.tmax_adj_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.tmin_adj_field, 'DOUBLE')

    # Vegetation fields
    support.add_field_func(hru.polygon_path, hru.cov_type_field, 'SHORT')
    support.add_field_func(hru.polygon_path, hru.covden_sum_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.covden_win_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.rad_trncf_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.snow_intcp_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.srain_intcp_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.wrain_intcp_field, 'DOUBLE')

    # Soil fields
    support.add_field_func(hru.polygon_path, hru.awc_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.clay_pct_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.sand_pct_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.ksat_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.soil_type_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.soil_root_max_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.moist_init_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.moist_max_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.rechr_init_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.rechr_max_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.ssr2gw_rate_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.ssr2gw_k_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.slowcoef_lin_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.slowcoef_sq_field, 'DOUBLE')

    # Impervious fields
    support.add_field_func(hru.polygon_path, hru.imperv_pct_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.carea_max_field, 'DOUBLE')

    # PRISM mean monthly fields
    month_list = ['{:02d}'.format(m) for m in range(1, 13)]
    # month_list.extend(['14'])
    for prism_data_name in ['PPT', 'TMAX', 'TMIN']:
        for month in month_list:
            support.add_field_func(
                hru.polygon_path,
                '{}_{}'.format(prism_data_name, month), 'DOUBLE')
    # PRISM mean monthly precipitation ratio fields
    for month in month_list:
        if month == '14':
            continue
        support.add_field_func(
            hru.polygon_path, 'PPT_RT_{}'.format(month), 'DOUBLE')
    # Temperature adjust fields are added in temp_adjust_parameters.py if needed
    # for month in month_list:
    #     if month == '14':
    #         continue
    #     support.add_field_func(
    #         hru.polygon_path, 'TMX_ADJ_{}'.format(month), 'DOUBLE')
    # for month in month_list:
    #     if month == '14':
    #         continue
    #     support.add_field_func(
    #         hru.polygon_path, 'TMN_ADJ_{}'.format(month), 'DOUBLE')

    # Id field is added by default to new fishnets
    if arcpy.ListFields(hru.polygon_path, 'Id'):
        arcpy.DeleteField_management(hru.polygon_path, 'Id')

    logging.info('\nCalculating parameters')
    # Keep original FID for subsetting in zonal stats
    logging.info('  Saving original HRU FID to {}'.format(
        hru.fid_field))
    arcpy.CalculateField_management(
        hru.polygon_path, hru.fid_field, '!FID!', 'PYTHON')

    # Cell X/Y
    logging.info('  Calculating cell X/Y')
    cell_xy_func(hru.polygon_path, hru.x_field, hru.y_field)

    # Create unique ID, start at top left corner, work down rows
    # Row/Col numbered from top left corner (1's based numbering)
    logging.info('  Calculating cell ID/row/col')
    cell_id_col_row_func(
        hru.polygon_path, hru.id_field, hru.col_field, hru.row_field,
        hru.extent, hru.cs)

    # Cell Lat/Lon
    logging.info('  Calculating cell lat/lon')
    cell_lat_lon_func(
        hru.polygon_path, hru.lat_field, hru.lon_field, hru.sr.GCS)

    # Cell Area
    logging.info('  Calculating cell area (acres)')
    arcpy.CalculateField_management(
        hru.polygon_path, hru.area_field, '!SHAPE.AREA@acres!', 'PYTHON')

    # Reset HRU_TYPE
    logging.info('\nResetting {} to 0'.format(hru.type_field))
    arcpy.CalculateField_management(
        hru.polygon_path, hru.type_field, 0, 'PYTHON')
    # Reset LAKE_ID
    if set_lake_flag:
        logging.info('Resetting {} to 0'.format(hru.lake_id_field))
        arcpy.CalculateField_management(
            hru.polygon_path, hru.lake_id_field, 0, 'PYTHON')
    if set_lake_flag:
        logging.info('Resetting {} to 0'.format(hru.lake_area_field))
        arcpy.CalculateField_management(
            hru.polygon_path, hru.lake_area_field, 0, 'PYTHON')

    # Calculate HRU Type
    logging.info('\nCalculating cell HRU Type')
    study_area_desc = arcpy.Describe(study_area_orig_path)
    study_area_sr = study_area_desc.spatialReference
    logging.debug('  Study area: {}'.format(study_area_orig_path))
    logging.debug('  Study area spat. ref.:  {}'.format(
        study_area_sr.name))
    logging.debug('  Study area GCS:         {}'.format(
        study_area_sr.GCS.name))
    # If study area spat_ref doesn't match hru_param spat_ref
    # Project study area to hru_param spat ref
    # Otherwise, read study_area directly
    if hru.sr.name != study_area_sr.name:
        logging.info('  Projecting study area...')
        # Set preferred transforms
        transform_str = support.transform_func(hru.sr, study_area_sr)
        logging.debug('    Transform: {}'.format(transform_str))
        # Project study area shapefile
        arcpy.Project_management(
            study_area_orig_path, study_area_path, hru.sr,
            transform_str, study_area_sr)
        del transform_str
    else:
        arcpy.Copy_management(study_area_orig_path, study_area_path)
    support.zone_by_centroid_func(
        study_area_path, hru.type_field, 1,
        hru.polygon_path, hru.point_path, hru)

    # Calculate HRU Type for lakes (HRU_TYPE = 2)
    if set_lake_flag:
        logging.info('\nCalculating cell HRU Type & ID for lakes')
        lake_layer = 'lake_layer'
        lake_desc = arcpy.Describe(lake_orig_path)
        lake_sr = lake_desc.spatialReference
        logging.debug('  Lakes: {}'.format(lake_orig_path))
        logging.debug('  Lakes spat. ref.:  {}'.format(lake_sr.name))
        logging.debug('  Lakes GCS:         {}'.format(lake_sr.GCS.name))

        # If lakes spat_ref doesn't match hru_param spat_ref
        # Project lakes to hru_param spat ref
        # Otherwise, read lakes directly
        if hru.sr.name != lake_sr.name:
            logging.info('  Projecting lakes...')
            # Set preferred transforms
            transform_str = support.transform_func(hru.sr, lake_sr)
            logging.debug('    Transform: {}'.format(transform_str))
            # Project lakes shapefile
            arcpy.Project_management(
                lake_orig_path, lake_path, hru.sr, transform_str, lake_sr)
            arcpy.MakeFeatureLayer_management(lake_path, lake_layer)
            del lake_path, transform_str
        else:
            arcpy.MakeFeatureLayer_management(
                lake_orig_path, lake_layer)

        # Clip lakes by study area after projecting lakes
        logging.info('  Clipping lakes...')
        arcpy.Clip_analysis(lake_layer, study_area_path, lake_clip_path)
        # Remove all unnecesary fields
        for field in arcpy.ListFields(lake_clip_path):
            if field.name not in [lake_zone_field, 'Shape']:
                try:
                    arcpy.DeleteField_management(lake_clip_path, field.name)
                except Exception as e:
                    logging.debug('    Unhandled exception: {}'.format(e))
                    continue

        # Set lake HRU_TYPE
        logging.info('  Setting lake {}'.format(hru.type_field))
        support.zone_by_area_func(
            lake_clip_path, hru.type_field, 2,
            hru.polygon_path, hru, hru.area_field,
            hru.lake_area_field, lake_area_pct)
        # Set lake ID
        logging.info('  Setting {}'.format(hru.lake_id_field))
        support.zone_by_area_func(
            lake_clip_path, hru.lake_id_field, lake_zone_field,
            hru.polygon_path, hru, hru.area_field,
            hru.lake_area_field, lake_area_pct)
        # Cleanup
        del lake_layer, lake_desc, lake_sr


    # Read in model points shapefile
    logging.info('\nChecking model points shapefile')
    model_points_desc = arcpy.Describe(model_inputs_path)
    model_points_sr = model_points_desc.spatialReference
    logging.debug('  Points: {}'.format(model_inputs_path))
    logging.debug('  Points spat. ref.:  {}'.format(model_points_sr.name))
    logging.debug('  Points GCS:         {}'.format(model_points_sr.GCS.name))

    # If model points spat_ref doesn't match hru_param spat_ref
    # Project model points to hru_param spat ref
    # Otherwise, read model points directly
    if hru.sr.name != model_points_sr.name:
        logging.info(
            '  Model points projection does not match fishnet.\n'
            '  Projecting model points.\n')
        # Set preferred transforms
        transform_str = support.transform_func(hru.sr, model_points_sr)
        logging.debug('    Transform: {}'.format(transform_str))
        arcpy.Project_management(
            model_inputs_path, model_points_path,
            hru.sr, transform_str, model_points_sr)
    else:
        arcpy.Copy_management(model_inputs_path, model_points_path)
    model_points_lyr = 'model_points_lyr'
    arcpy.MakeFeatureLayer_management(model_points_path, model_points_lyr)

    # Check model point types
    logging.info('  Checking model point types')
    model_point_types = [str(r[0]).upper() for r in arcpy.da.SearchCursor(
        model_points_path, [model_points_type_field])]
    if not set(model_point_types).issubset(set(['OUTLET', 'SUBBASIN', 'SWALE'])):
        logging.error('\nERROR: Unsupported model point type(s) found, exiting')
        logging.error('\n  Model point types: {}\n'.format(model_point_types))
        sys.exit()
    elif not set(model_point_types).intersection(set(['OUTLET', 'SWALE'])):
        logging.error(
            '\nERROR: At least one model point must be an OUTLET or SWALE, '
            'exiting\n')
        sys.exit()
    else:
        logging.debug('  {}'.format(', '.join(model_point_types)))

    if 'SWALE' in model_point_types:
        arcpy.SelectLayerByAttribute_management(
            model_points_lyr, 'NEW_SELECTION', '"TYPE" = \'SWALE\'')

        logging.info('  Setting swale (sink) cells to {}=3'.format(
            hru.type_field))
        hru_polygon_lyr = 'hru_polygon_lyr'
        arcpy.MakeFeatureLayer_management(hru.polygon_path, hru_polygon_lyr)
        arcpy.SelectLayerByAttribute_management(hru_polygon_lyr, 'CLEAR_SELECTION')
        arcpy.SelectLayerByLocation_management(
            hru_polygon_lyr, 'INTERSECT', model_points_lyr)
        arcpy.CalculateField_management(
            hru_polygon_lyr, hru.type_field, 3, 'PYTHON')
        arcpy.SelectLayerByAttribute_management(hru_polygon_lyr, 'CLEAR_SELECTION')
        arcpy.SelectLayerByAttribute_management(model_points_lyr, 'CLEAR_SELECTION')
        arcpy.Delete_management(hru_polygon_lyr)
        del hru_polygon_lyr
    arcpy.Delete_management(model_points_lyr)
    del model_points_lyr


    # Setting HRU_PSTA to default value of 1
    if all([row[0] == 0 for row in arcpy.da.SearchCursor(
            hru.polygon_path, [hru.hru_psta_field])]):
        logging.info('Setting {} to 1'.format(
            hru.hru_psta_field))
        arcpy.CalculateField_management(
            hru.polygon_path, hru.hru_psta_field, '1', 'PYTHON')

    # Cleanup
    del study_area_desc, study_area_sr
예제 #7
0
def veg_parameters(config_path):
    """Calculate GSFLOW Vegetation Parameters

    Parameters
    ----------
    config_path : str
        Project configuration file (.ini) path.

    Returns
    -------
    None

    """
    # Initialize hru_parameters class
    hru = support.HRUParameters(config_path)

    # Open input parameter config file
    inputs_cfg = ConfigParser.ConfigParser()
    try:
        inputs_cfg.readfp(open(config_path))
    except Exception as e:
        logging.error('\nERROR: Config file could not be read, '
                      'is not an input file, or does not exist\n'
                      '  config_file = {}\n'
                      '  Exception: {}\n'.format(config_path, e))
        sys.exit()

    # Log DEBUG to file
    log_file_name = 'veg_parameters_log.txt'
    log_console = logging.FileHandler(filename=os.path.join(
        hru.log_ws, log_file_name),
                                      mode='w')
    log_console.setLevel(logging.DEBUG)
    log_console.setFormatter(logging.Formatter('%(message)s'))
    logging.getLogger('').addHandler(log_console)
    logging.info('\nGSFLOW Vegetation Parameters')

    # Landfire Vegetation Type
    veg_type_orig_path = inputs_cfg.get('INPUTS', 'veg_type_orig_path')
    veg_type_cs = inputs_cfg.getint('INPUTS', 'veg_type_cellsize')
    try:
        veg_type_field = inputs_cfg.get('INPUTS', 'veg_type_field')
    except ConfigParser.NoOptionError:
        veg_type_field = None
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'veg_type_field', veg_type_field))

    # Landfire Vegetation Cover
    veg_cover_orig_path = inputs_cfg.get('INPUTS', 'veg_cover_orig_path')
    veg_cover_cs = inputs_cfg.getint('INPUTS', 'veg_cover_cellsize')

    # Remap
    remap_ws = inputs_cfg.get('INPUTS', 'remap_folder')
    cov_type_remap_name = inputs_cfg.get('INPUTS', 'cov_type_remap')
    covden_sum_remap_name = inputs_cfg.get('INPUTS', 'covden_sum_remap')
    covden_win_remap_name = inputs_cfg.get('INPUTS', 'covden_win_remap')
    snow_intcp_remap_name = inputs_cfg.get('INPUTS', 'snow_intcp_remap')
    srain_intcp_remap_name = inputs_cfg.get('INPUTS', 'srain_intcp_remap')
    wrain_intcp_remap_name = inputs_cfg.get('INPUTS', 'wrain_intcp_remap')
    root_depth_remap_name = inputs_cfg.get('INPUTS', 'root_depth_remap')

    # Get remap conversion factors
    try:
        snow_intcp_remap_factor = inputs_cfg.getfloat(
            'INPUTS', 'snow_intcp_remap_factor')
    except ConfigParser.NoOptionError:
        snow_intcp_remap_factor = 0.01
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'snow_intcp_remap_factor', snow_intcp_remap_factor))
    try:
        wrain_intcp_remap_factor = inputs_cfg.getfloat(
            'INPUTS', 'wrain_intcp_remap_factor')
    except ConfigParser.NoOptionError:
        wrain_intcp_remap_factor = 0.01
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'wrain_intcp_remap_factor', wrain_intcp_remap_factor))
    try:
        srain_intcp_remap_factor = inputs_cfg.getfloat(
            'INPUTS', 'srain_intcp_remap_factor')
    except ConfigParser.NoOptionError:
        srain_intcp_remap_factor = 0.01
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'srain_intcp_remap_factor', srain_intcp_remap_factor))

    # Check input paths
    if not arcpy.Exists(hru.polygon_path):
        logging.error('\nERROR: Fishnet ({}) does not exist'.format(
            hru.polygon_path))
        sys.exit()
    # Check that either the original vegetation raster exist
    if not arcpy.Exists(veg_cover_orig_path):
        logging.error('\nERROR: Vegetation cover raster does not exist')
        sys.exit()
    if not arcpy.Exists(veg_type_orig_path):
        logging.error('\nERROR: Vegetation type raster does not exist')
        sys.exit()
    # Vegetation cover can be set from another field in the raster
    # This is mostly for US_120EVT
    if not veg_type_field:
        logging.info('\n  Using VALUE field to set vegetation type')
        veg_type_field = 'VALUE'
    elif len(arcpy.ListFields(veg_type_orig_path, veg_type_field)) == 0:
        logging.info('  veg_type_field {} does not exist\n  Using VALUE '
                     'field to set vegetation type'.format(veg_type_field))
        veg_type_field = 'VALUE'
    elif arcpy.ListFields(veg_type_orig_path, veg_type_field)[0].type not in [
            'Integer', 'SmallInteger'
    ]:
        logging.info(
            '  veg_type_field {} is not an integer type\n  Using VALUE '
            'field to set vegetation type'.format(veg_type_field))
        veg_type_field = 'VALUE'

    # Check that remap folder is valid
    if not os.path.isdir(remap_ws):
        logging.error('\nERROR: Remap folder does not exist')
        sys.exit()
    # Check that remap files exist
    # Check remap files comment style
    cov_type_remap_path = os.path.join(remap_ws, cov_type_remap_name)
    covden_sum_remap_path = os.path.join(remap_ws, covden_sum_remap_name)
    covden_win_remap_path = os.path.join(remap_ws, covden_win_remap_name)
    snow_intcp_remap_path = os.path.join(remap_ws, snow_intcp_remap_name)
    srain_intcp_remap_path = os.path.join(remap_ws, srain_intcp_remap_name)
    wrain_intcp_remap_path = os.path.join(remap_ws, wrain_intcp_remap_name)
    root_depth_remap_path = os.path.join(remap_ws, root_depth_remap_name)
    remap_path_list = [
        cov_type_remap_path, covden_sum_remap_path, covden_win_remap_path,
        snow_intcp_remap_path, srain_intcp_remap_path, wrain_intcp_remap_path,
        root_depth_remap_path
    ]
    for remap_path in remap_path_list:
        support.remap_check(remap_path)

    # Check other inputs
    if veg_type_cs <= 0:
        logging.error('\nERROR: Veg. type cellsize must be greater than 0')
        sys.exit()
    if veg_cover_cs <= 0:
        logging.error('\nERROR: Veg. cover cellsize must be greater than 0')
        sys.exit()

    # Build output folders if necesssary
    veg_temp_ws = os.path.join(hru.param_ws, 'veg_rasters')
    if not os.path.isdir(veg_temp_ws):
        os.mkdir(veg_temp_ws)
    # Output paths
    veg_cover_path = os.path.join(veg_temp_ws, 'veg_cover.img')
    veg_type_path = os.path.join(veg_temp_ws, 'veg_type.img')
    cov_type_path = os.path.join(veg_temp_ws, 'cov_type.img')
    covden_sum_path = os.path.join(veg_temp_ws, 'covden_sum.img')
    covden_win_path = os.path.join(veg_temp_ws, 'covden_win.img')
    snow_intcp_path = os.path.join(veg_temp_ws, 'snow_intcp.img')
    wrain_intcp_path = os.path.join(veg_temp_ws, 'wrain_intcp.img')
    srain_intcp_path = os.path.join(veg_temp_ws, 'srain_intcp.img')
    root_depth_path = os.path.join(veg_temp_ws, 'root_depth.img')
    rad_trncf_path = os.path.join(veg_temp_ws, 'rad_trncf.img')

    # Set ArcGIS environment variables
    arcpy.CheckOutExtension('Spatial')
    env.overwriteOutput = True
    env.pyramid = 'PYRAMIDS -1'
    # env.pyramid = 'PYRAMIDS 0'
    env.workspace = veg_temp_ws
    env.scratchWorkspace = hru.scratch_ws

    # Check fields
    logging.info('\nAdding vegetation fields if necessary')
    support.add_field_func(hru.polygon_path, hru.cov_type_field, 'SHORT')
    support.add_field_func(hru.polygon_path, hru.covden_sum_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.covden_win_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.rad_trncf_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.snow_intcp_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.srain_intcp_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.wrain_intcp_field, 'DOUBLE')
    # support.add_field_func(hru.polygon_path, hru.root_depth_field, 'DOUBLE')

    # Check that remaps have all necessary values
    logging.info('\nChecking remap tables against all raster cells'
                 '  (i.e. even those outside the study area)')
    check_remap_keys(cov_type_remap_path, veg_type_orig_path)
    check_remap_keys(covden_sum_remap_path, veg_cover_orig_path)
    check_remap_keys(root_depth_remap_path, veg_type_orig_path)

    # Assume all vegetation rasters will need to be rebuilt
    # Check veg cover and veg type rasters
    # This will check for matching spat. ref., snap point, and cellsize

    # Project/clip veg cover to match HRU
    logging.info('\nProjecting/clipping vegetation cover raster')
    veg_cover_orig_sr = arcpy.sa.Raster(veg_cover_orig_path).spatialReference
    # Remove existing clipped/projected veg cover raster
    if arcpy.Exists(veg_cover_path):
        arcpy.Delete_management(veg_cover_path)
    # Set preferred transforms
    transform_str = support.transform_func(hru.sr, veg_cover_orig_sr)
    logging.debug('  Transform: {}'.format(transform_str))
    logging.debug('  Projection method: NEAREST')

    # Project veg cover
    # DEADBEEF - Arc10.2 ProjectRaster does not extent
    support.project_raster_func(veg_cover_orig_path, veg_cover_path, hru.sr,
                                'NEAREST', veg_cover_cs, transform_str,
                                '{} {}'.format(hru.ref_x, hru.ref_y),
                                veg_cover_orig_sr, hru)
    # env.extent = hru.extent
    # arcpy.ProjectRaster_management(
    #    veg_cover_orig_path, veg_cover_path, hru.sr,
    #    'NEAREST', veg_cover_cs, transform_str,
    #    '{} {}'.format(hru.ref_x, hru.ref_y),
    #    veg_cover_orig_sr)
    # arcpy.ClearEnvironment('extent')
    del transform_str, veg_cover_orig_sr

    # Project/clip veg type to match HRU
    logging.info('Projecting/clipping vegetation type raster')
    veg_type_orig_sr = arcpy.sa.Raster(veg_type_orig_path).spatialReference
    # Remove existing clipped/projected veg type raster
    if arcpy.Exists(veg_type_path):
        arcpy.Delete_management(veg_type_path)
    # Set preferred transforms
    transform_str = support.transform_func(hru.sr, veg_type_orig_sr)
    logging.debug('  Transform: {}'.format(transform_str))
    logging.debug('  Projection method: NEAREST')
    # Use a different field to calculate vegetation type
    if veg_type_field != 'VALUE':
        logging.info('  Calculating vegetation type from {} field'.format(
            veg_type_field))
        veg_type_obj = arcpy.sa.Lookup(veg_type_orig_path, veg_type_field)
    else:
        veg_type_obj = arcpy.sa.Raster(veg_type_orig_path)

    # Project veg type
    # DEADBEEF - Arc10.2 ProjectRaster does not honor extent
    support.project_raster_func(veg_type_obj, veg_type_path, hru.sr, 'NEAREST',
                                veg_type_cs, transform_str,
                                '{} {}'.format(hru.ref_x, hru.ref_y),
                                veg_type_orig_sr, hru)
    # env.extent = hru.extent
    # arcpy.ProjectRaster_management(
    #    veg_type_obj, veg_type_path, hru.sr,
    #    'NEAREST', veg_type_cs, transform_str,
    #    '{} {}'.format(hru.ref_x, hru.ref_y),
    #    veg_type_orig_sr)
    # arcpy.ClearEnvironment('extent')
    del transform_str, veg_type_orig_sr, veg_type_obj

    # Reclassifying vegetation cover type
    logging.info('\nCalculating COV_TYPE')
    logging.debug('  Reclassifying: {}'.format(cov_type_remap_path))
    cov_type_obj = arcpy.sa.ReclassByASCIIFile(veg_type_path,
                                               cov_type_remap_path)
    cov_type_obj.save(cov_type_path)
    del cov_type_obj

    # Summer cover density
    logging.info('Calculating COVDEN_SUM')
    logging.debug('  Reclassifying: {}'.format(covden_sum_remap_path))
    covden_sum_obj = arcpy.sa.ReclassByASCIIFile(veg_cover_path,
                                                 covden_sum_remap_path)
    covden_sum_obj *= 0.01
    covden_sum_obj.save(covden_sum_path)
    del covden_sum_obj

    # Winter cover density
    logging.info('Calculating COVDEN_WIN')
    logging.debug('  Reclassifying: {}'.format(covden_win_remap_path))
    covden_win_obj = arcpy.sa.ReclassByASCIIFile(cov_type_path,
                                                 covden_win_remap_path)
    covden_win_obj *= 0.01
    covden_win_obj *= arcpy.sa.Raster(covden_sum_path)
    covden_win_obj.save(covden_win_path)
    del covden_win_obj

    # Snow interception storage capacity
    logging.info('Calculating SNOW_INTCP')
    logging.debug('  Reclassifying: {}'.format(snow_intcp_remap_path))
    snow_intcp_obj = arcpy.sa.ReclassByASCIIFile(cov_type_path,
                                                 snow_intcp_remap_path)
    snow_intcp_obj *= snow_intcp_remap_factor
    snow_intcp_obj.save(snow_intcp_path)
    del snow_intcp_obj

    # Winter rain interception storage capacity
    logging.info('Calculating WRAIN_INTCP')
    logging.debug('  Reclassifying: {}'.format(wrain_intcp_remap_path))
    wrain_intcp_obj = arcpy.sa.ReclassByASCIIFile(cov_type_path,
                                                  wrain_intcp_remap_path)
    wrain_intcp_obj *= wrain_intcp_remap_factor
    wrain_intcp_obj.save(wrain_intcp_path)
    del wrain_intcp_obj

    # Summer rain interception storage capacity
    logging.info('Calculating SRAIN_INTCP')
    logging.debug('  Reclassifying: {}'.format(srain_intcp_remap_path))
    srain_intcp_obj = arcpy.sa.ReclassByASCIIFile(cov_type_path,
                                                  srain_intcp_remap_path)
    srain_intcp_obj *= srain_intcp_remap_factor
    srain_intcp_obj.save(srain_intcp_path)
    del srain_intcp_obj

    # Root depth
    logging.info('Calculating ROOT_DEPTH')
    logging.debug('  Reclassifying: {}'.format(root_depth_remap_path))
    root_depth_obj = arcpy.sa.ReclassByASCIIFile(veg_type_path,
                                                 root_depth_remap_path)
    root_depth_obj.save(root_depth_path)
    del root_depth_obj

    # Short-wave radiation transmission coefficent
    logging.info('Calculating {}'.format(hru.rad_trncf_field))
    rad_trncf_obj = 0.9917 * arcpy.sa.Exp(
        -2.7557 * arcpy.sa.Raster(covden_win_path))
    rad_trncf_obj.save(rad_trncf_path)
    del rad_trncf_obj

    # List of rasters, fields, and stats for zonal statistics
    zs_veg_dict = dict()
    zs_veg_dict[hru.cov_type_field] = [cov_type_path, 'MAJORITY']
    zs_veg_dict[hru.covden_sum_field] = [covden_sum_path, 'MEAN']
    zs_veg_dict[hru.covden_win_field] = [covden_win_path, 'MEAN']
    zs_veg_dict[hru.snow_intcp_field] = [snow_intcp_path, 'MEAN']
    zs_veg_dict[hru.srain_intcp_field] = [srain_intcp_path, 'MEAN']
    zs_veg_dict[hru.wrain_intcp_field] = [wrain_intcp_path, 'MEAN']
    # zs_veg_dict[hru.root_depth_field] = [root_depth_path, 'MEAN']
    zs_veg_dict[hru.rad_trncf_field] = [rad_trncf_path, 'MEAN']

    # Calculate zonal statistics
    logging.info('\nCalculating vegetation zonal statistics')
    support.zonal_stats_func(zs_veg_dict, hru.polygon_path, hru.point_path,
                             hru)

    # Short-wave radiation transmission coefficient
    # logging.info('\nCalculating {}'.format(hru.rad_trncf_field))
    # arcpy.CalculateField_management(
    #    hru.polygon_path, hru.rad_trncf_field,
    #    '0.9917 * math.exp(-2.7557 * !{}!)'.format(hru.covden_win_field),
    #    'PYTHON')

    # Clear COV_TYPE values for lake cells (HRU_TYPE == 2)
    if True:
        logging.info('\nClearing lake nodata vegetation parameters')
        # logging.info(
        #     '\nClearing vegetation parameters for lake and inactive cells')
        hru_polygon_layer = "hru_polygon_layer"
        arcpy.MakeFeatureLayer_management(hru.polygon_path, hru_polygon_layer)
        arcpy.SelectLayerByAttribute_management(
            hru_polygon_layer, "NEW_SELECTION",
            '"{0}" = 2 OR ("{0}" = 0 AND "{1}" = 0)'.format(
                hru.type_field, hru.dem_adj_field))
        arcpy.CalculateField_management(hru_polygon_layer, hru.cov_type_field,
                                        0, 'PYTHON')
        arcpy.CalculateField_management(hru_polygon_layer,
                                        hru.covden_sum_field, 0, 'PYTHON')
        arcpy.CalculateField_management(hru_polygon_layer,
                                        hru.covden_win_field, 0, 'PYTHON')
        arcpy.CalculateField_management(hru_polygon_layer,
                                        hru.snow_intcp_field, 0, 'PYTHON')
        arcpy.CalculateField_management(hru_polygon_layer,
                                        hru.srain_intcp_field, 0, 'PYTHON')
        arcpy.CalculateField_management(hru_polygon_layer,
                                        hru.wrain_intcp_field, 0, 'PYTHON')
        arcpy.CalculateField_management(hru_polygon_layer, hru.rad_trncf_field,
                                        0, 'PYTHON')
        arcpy.Delete_management(hru_polygon_layer)
        del hru_polygon_layer
예제 #8
0
def soil_parameters(config_path, overwrite_flag=False, debug_flag=False):
    """Calculate GSFLOW Soil Parameters

    Args:
        config_file (str): Project config file path
        ovewrite_flag (bool): if True, overwrite existing files
        debug_flag (bool): if True, enable debug level logging

    Returns:
        None
    """

    # Initialize hru_parameters class
    hru = support.HRUParameters(config_path)

    # Open input parameter config file
    inputs_cfg = ConfigParser.ConfigParser()
    try:
        inputs_cfg.readfp(open(config_path))
    except Exception as e:
        logging.error(
            '\nERROR: Config file could not be read, '
            'is not an input file, or does not exist\n'
            '  config_file = {}\n'
            '  Exception: {}\n'.format(config_path, e))
        sys.exit()

    # Log DEBUG to file
    log_file_name = 'soil_parameters_log.txt'
    log_console = logging.FileHandler(
        filename=os.path.join(hru.log_ws, log_file_name), mode='w')
    log_console.setLevel(logging.DEBUG)
    log_console.setFormatter(logging.Formatter('%(message)s'))
    logging.getLogger('').addHandler(log_console)
    logging.info('\nGSFLOW Soil Parameters')

    # Input parameters
    try:
        soil_pct_flag = inputs_cfg.getboolean('INPUTS', 'soil_pct_flag')
    except ConfigParser.NoOptionError:
        soil_pct_flag = True
        logging.info(
            '  Missing INI parameter, setting {} = {}'.format(
                'soil_pct_flag', soil_pct_flag))
    try:
        moist_init_ratio = inputs_cfg.getfloat('INPUTS', 'moist_init_ratio')
    except ConfigParser.NoOptionError:
        moist_init_ratio = 0.1
        logging.info(
            '  Missing INI parameter, setting {} = {}'.format(
                'moist_init_ratio', moist_init_ratio))
    try:
        rechr_init_ratio = inputs_cfg.getfloat('INPUTS', 'rechr_init_ratio')
    except ConfigParser.NoOptionError:
        rechr_init_ratio = 0.1
        logging.info(
            '  Missing INI parameter, setting {} = {}'.format(
                'rechr_init_ratio', rechr_init_ratio))

    # Read and apply ssr2gw multiplier raster
    # Otherwise default value will be used
    try:
        ssr2gw_mult_flag = inputs_cfg.getboolean('INPUTS', 'ssr2gw_mult_flag')
    except ConfigParser.NoOptionError:
        ssr2gw_mult_flag = False
    try:
        ssr2gw_k_default = inputs_cfg.getfloat('INPUTS', 'ssr2gw_k_default')
    except ConfigParser.NoOptionError:
        ssr2gw_k_default = 0.001
        logging.info(
        '  Missing INI parameter, setting {} = {}'.format(
            'ssr2gw_k_default', ssr2gw_k_default))

    # Read and apply soil depth raster
    # Otherwise soil depth will only be derived from rooting depth
    try:
        soil_depth_flag = inputs_cfg.getboolean('INPUTS', 'soil_depth_flag')
    except ConfigParser.NoOptionError:
        soil_depth_flag = False
        logging.info(
            '  Missing INI parameter, setting {} = {}'.format(
                'soil_depth_flag', soil_depth_flag))

    # Input folders
    soil_temp_ws = os.path.join(hru.param_ws, 'soil_rasters')
    if not os.path.isdir(soil_temp_ws):
        os.mkdir(soil_temp_ws)

    # Input paths
    awc_path = os.path.join(soil_temp_ws, 'awc.img')
    clay_pct_path = os.path.join(soil_temp_ws, 'clay_pct.img')
    sand_pct_path = os.path.join(soil_temp_ws, 'sand_pct.img')
    ksat_path = os.path.join(soil_temp_ws, 'ksat.img')
    soil_depth_path = os.path.join(soil_temp_ws, 'soil_depth.img')
    soil_root_max_path = os.path.join(soil_temp_ws, 'soil_root_max.img')
    ssr2gw_mult_path = os.path.join(soil_temp_ws, 'ssr2gw_mult.img')

    # Check input paths
    if not arcpy.Exists(hru.polygon_path):
        logging.error(
            '\nERROR: Fishnet ({}) does not exist'.format(
                hru.polygon_path))
        sys.exit()
    # All of the soil rasters must exist
    # Check that the projected/clipped/filled raster exists
    if not arcpy.Exists(awc_path):
        logging.error('\nERROR: AWC raster does not exist')
        sys.exit()
    if not arcpy.Exists(clay_pct_path):
        logging.error('\nERROR: Clay raster does not exist')
        sys.exit()
    if not arcpy.Exists(sand_pct_path):
        logging.error('\nERROR: Sand raster does not exist')
        sys.exit()
    if not arcpy.Exists(ksat_path):
        logging.error('\nERROR: Ksat raster does not exist')
        sys.exit()
    if soil_depth_flag and not arcpy.Exists(soil_depth_path):
        logging.error('\nERROR: Soil depth raster does not exist')
        sys.exit()
    if ssr2gw_mult_flag and not arcpy.Exists(ssr2gw_mult_path):
        logging.error('\nERROR: SSR2GW multiplier raster does not exist')
        sys.exit()
    # Check soil init ratios
    if moist_init_ratio < 0 or moist_init_ratio > 1:
        logging.error('\nERROR: Soil moist_init_ratio must be between 0 & 1')
        sys.exit()
    if rechr_init_ratio < 0 or rechr_init_ratio > 1:
        logging.error('\nERROR: Soil rechr_init_ratio must be between 0 & 1')
        sys.exit()

    # DEM Slope is needed for SSR2GW_RATE
    dem_temp_ws = os.path.join(hru.param_ws, 'dem_rasters')
    dem_slope_path = os.path.join(dem_temp_ws, 'dem_slope.img')
    if not os.path.isdir(dem_temp_ws):
        logging.error(
            '\nERROR: DEM temp folder does not exist\n' +
            '\nERROR: Try re-running dem_2_stream.py')
        sys.exit()
    if not os.path.isfile(dem_slope_path):
        logging.error(
            '\nERROR: Slope raster does not exist\n' +
            '\nERROR: Try re-running dem_2_stream.py')
        sys.exit()

    # Output paths
    # soil_type_path = os.path.join(soil_temp_ws, 'soil_type.img')
    moist_max_path = os.path.join(soil_temp_ws, 'soil_moist_max.img')
    rechr_max_path = os.path.join(soil_temp_ws, 'soil_rechr_max.img')

    # Root depth is calculated by veg script
    veg_temp_ws = os.path.join(hru.param_ws, 'veg_rasters')
    root_depth_path = os.path.join(veg_temp_ws, 'root_depth.img')
    if not arcpy.Exists(root_depth_path):
        logging.error(
            '\nERROR: Root depth raster does not exists' +
            '\nERROR: Try re-running veg_parameters script\n')
        sys.exit()


    # Set ArcGIS environment variables
    arcpy.CheckOutExtension('Spatial')
    env.overwriteOutput = True
    env.pyramid = 'PYRAMIDS -1'
    # env.pyramid = 'PYRAMIDS 0'
    env.workspace = soil_temp_ws
    env.scratchWorkspace = hru.scratch_ws

    # Check field
    logging.info('\nAdding soil fields if necessary')
    support.add_field_func(hru.polygon_path, hru.awc_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.clay_pct_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.sand_pct_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.ksat_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.soil_type_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.soil_root_max_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.moist_init_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.moist_max_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.rechr_init_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.rechr_max_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.ssr2gw_rate_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.slowcoef_lin_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.slowcoef_sq_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.ssr2gw_k_field, 'DOUBLE')


    # Compute soil depth as max of root and soil depth
    if soil_depth_flag:
        logging.info('\nComputing max soil depth from root and soil depth')
        soil_depth_obj = arcpy.sa.Con(
           arcpy.sa.Raster(root_depth_path) > arcpy.sa.Raster(soil_depth_path),
           arcpy.sa.Raster(root_depth_path), arcpy.sa.Raster(soil_depth_path))
        soil_depth_obj.save(soil_root_max_path)
    else:
        soil_depth_obj = arcpy.sa.Raster(root_depth_path)

    # Calculate maximum soil moisture
    # logging.info('\nCalculating soil {}'.format(hru.moist_max_field))
    # moist_max_obj = arcpy.sa.Raster(awc_path) * soil_depth_obj
    # moist_max_obj.save(moist_max_path)
    # del moist_max_obj

    # # Calculate soil recharge zone maximum
    # logging.info('Calculating soil {}'.format(hru.rechr_max_field))
    # # Minimum of rooting depth and 18 (inches?)
    # rechr_max_obj = arcpy.sa.Float(
    #     arcpy.sa.Con(soil_depth_obj < 18, soil_depth_obj, 18))
    # rechr_max_obj *= arcpy.sa.Raster(awc_path)
    # rechr_max_obj.save(rechr_max_path)
    # del rechr_max_obj

    # # Read in slope raster and convert to radians
    # dem_slope_obj = math.pi * arcpy.sa.Raster(dem_slope_path) / 180
    # porosity_obj = 0.475
    #
    # # Gravity drainage to groundwater reservoir linear coefficient
    # logging.info('\nCalculating SSR2GW_RATE')
    # logging.info('  Assuming slope is in degrees')
    # logging.info('  Porosity is currently fixed at: {}'.format(
    #     porosity_obj))
    # ssr2gw_rate_obj = (
    #     arcpy.sa.Raster(ksat_path) * porosity_obj * (1 - dem_slope_obj))
    # ssr2gw_rate_obj.save(ssr2gw_rate_path)
    # del ssr2gw_rate_obj
    #
    # # Gravity drainage to groundwater reservoir linear coefficient
    # logging.info('\nCalculating SLOWCOEF_L')
    # logging.info('  Assuming slope is in degrees')
    # logging.info('  Porosity is currently fixed at: {}'.format(
    # slowcoef_lin_obj = (
    #     arcpy.sa.Raster(ksat_path) * math.sin(dem_slope_obj) /
    #     (porosity_obj * hru_length_obj))
    # slowcoef_lin_obj.save(slowcoef_lin_path)
    # del slowcoef_lin_obj, hru_length_obj
    # del dem_slope_obj, porosity_obj
    # # This block ^^ could be used to perform operations on a raster level if wanted


    # List of rasters, fields, and stats for zonal statistics
    zs_soil_dict = dict()
    zs_soil_dict[hru.awc_field] = [awc_path, 'MEAN']
    zs_soil_dict[hru.clay_pct_field] = [clay_pct_path, 'MEAN']
    zs_soil_dict[hru.sand_pct_field] = [sand_pct_path, 'MEAN']
    zs_soil_dict[hru.ksat_field] = [ksat_path, 'MEAN']
    if soil_depth_flag:
        zs_soil_dict[hru.soil_root_max_field] = [soil_root_max_path, 'MEAN']
    else:
        zs_soil_dict[hru.soil_root_max_field] = [root_depth_path, 'MEAN']
    if ssr2gw_mult_flag:
        zs_soil_dict[hru.ssr2gw_k_field] = [ssr2gw_mult_path, 'MEAN']
    # zs_soil_dict[hru.moist_max_field] = [moist_max_path, 'MEAN']
    # zs_soil_dict[hru.rechr_max_field] = [rechr_max_path, 'MEAN']

    # Calculate zonal statistics
    logging.info('\nCalculating zonal statistics')
    support.zonal_stats_func(
        zs_soil_dict, hru.polygon_path, hru.point_path, hru)


    # Make a fishnet layer for calculating fields
    hru_polygon_layer = "hru_polygon_layer"
    arcpy.MakeFeatureLayer_management(
        hru.polygon_path, hru_polygon_layer)

    # Calculate maximum soil moisture
    arcpy.CalculateField_management(
        hru_polygon_layer, hru.moist_max_field,
        '!{}! * !{}!'.format(hru.soil_root_max_field, hru.awc_field),
        'PYTHON')

    # Calculate soil recharge zone maximum
    logging.info('Calculating soil {}'.format(hru.rechr_max_field))
    # Minimum of rooting depth and 18 (inches)
    rech_max_cb = (
        'def rech_max_func(soil_root_max, awc):\n' +
        '    if soil_root_max > 18: return 18*awc\n' +
        '    else: return soil_root_max*awc\n')
    arcpy.CalculateField_management(
        hru_polygon_layer, hru.rechr_max_field,
        'rech_max_func(!{}!, !{}!)'.format(
            hru.soil_root_max_field, hru.awc_field),
        'PYTHON', rech_max_cb)

    # Calculate SOIL_TYPE
    logging.info('\nCalculating {}'.format(hru.soil_type_field))
    arcpy.SelectLayerByAttribute_management(
        hru_polygon_layer, "NEW_SELECTION",
        '"{}" = 1'.format(hru.type_field))
    if soil_pct_flag:
        soil_type_pct = (50, 40)
    else:
        soil_type_pct = (0.50, 0.40)
    soil_type_cb = (
        'def soil_type_func(clay, sand):\n' +
        '    if sand > {}: return 1\n' +
        '    elif clay > {}: return 3\n' +
        '    else: return 2\n').format(*soil_type_pct)
    arcpy.CalculateField_management(
        hru_polygon_layer, hru.soil_type_field,
        'soil_type_func(!{}!, !{}!)'.format(
            hru.clay_pct_field, hru.sand_pct_field),
        'PYTHON', soil_type_cb)
    arcpy.SelectLayerByAttribute_management(
        hru_polygon_layer, "SWITCH_SELECTION")
    arcpy.CalculateField_management(
        hru_polygon_layer, hru.soil_type_field, '0', 'PYTHON')

    # Calculate SOIL_MOIST_INIT & SOIL_RECHR_INIT from max values
    arcpy.SelectLayerByAttribute_management(
        hru_polygon_layer, "NEW_SELECTION",
        '"{}" = 1 AND "{}" >= 0'.format(
            hru.type_field, hru.moist_max_field))
    logging.info('\nCalculating {0} as {2} * {1}'.format(
        hru.moist_init_field, hru.moist_max_field, moist_init_ratio))
    arcpy.CalculateField_management(
        hru.polygon_path, hru.moist_init_field,
        '!{}! * {}'.format(hru.moist_max_field, moist_init_ratio), 'PYTHON')
    arcpy.SelectLayerByAttribute_management(
        hru_polygon_layer, "SWITCH_SELECTION")
    arcpy.CalculateField_management(
        hru_polygon_layer, hru.moist_init_field, '0', 'PYTHON')
    arcpy.CalculateField_management(
        hru_polygon_layer, hru.moist_max_field, '0', 'PYTHON')

    # Calculate SOIL_MOIST_INIT & SOIL_RECHR_INIT from max values
    arcpy.SelectLayerByAttribute_management(
        hru_polygon_layer, "NEW_SELECTION",
        '"{}" = 1 AND "{}" >= 0'.format(
            hru.type_field, hru.rechr_max_field))
    logging.info('Calculating {0} as {2} * {1}'.format(
        hru.rechr_init_field, hru.rechr_max_field, moist_init_ratio))
    arcpy.CalculateField_management(
        hru.polygon_path, hru.rechr_init_field,
        '!{}! * {}'.format(hru.rechr_max_field, moist_init_ratio), 'PYTHON')
    arcpy.SelectLayerByAttribute_management(
        hru_polygon_layer, "SWITCH_SELECTION",
        '"{}" != 1'.format(hru.type_field))
    arcpy.CalculateField_management(
        hru_polygon_layer, hru.rechr_init_field, '0', 'PYTHON')
    arcpy.CalculateField_management(
        hru_polygon_layer, hru.rechr_max_field, '0', 'PYTHON')

    # Calculate SSR2G_KFAC from ssr2gw_mult raster
    arcpy.SelectLayerByAttribute_management(
        hru_polygon_layer, "NEW_SELECTION",
        '"{}" = 1 AND "{}" >= 0'.format(
            hru.type_field, hru.ssr2gw_k_field))
    logging.info('Using {1} to calculate {0}'.format(
        hru.ssr2gw_k_field, ssr2gw_mult_path))
    arcpy.CalculateField_management(
        hru.polygon_path, hru.ssr2gw_k_field,
        '!{}!'.format(hru.ssr2gw_k_field), 'PYTHON')

    # Fill SSR2G_K multiplier value if field not set
    logging.info ('ssr2gw_k_default = {}'.format(ssr2gw_k_default))
    if (all([row[0] == 0 for row in arcpy.da.SearchCursor(
            hru.polygon_path, [hru.ssr2gw_k_field])])):
        logging.info('Filling {} from default value in config file'.format(
            hru.ssr2gw_k_field))
        arcpy.CalculateField_management(
            hru.polygon_path, hru.ssr2gw_k_field,
            ssr2gw_k_default, 'PYTHON')
    else:
        logging.info(
            ('{} appears to already have been set and ' +
             'will not be overwritten').format(hru.ssr2gw_k_field))

    # Calculating ssr2gw_rate
    # Gravity drainage to groundwater reservoir linear coefficient
    # Default value is 0.1 (range 0-1)
    # Convert Ksat from um/s to in/day
    # ssr2gw_rate = ks / sat_threshold
    # sat_threshold = moist_max * (sand% / 100)
    logging.info('\nCalculating {}'.format(hru.ssr2gw_rate_field))
    logging.info('  assuming {} is in units of um/s'.format(hru.ksat_field))
    # porosity_flt = 0.475
    ssr2gw_exp = 1
    logging.debug('  using eqn: ssr2gw_rate = ks/sat threshold')
    # logging.debug('  default values: porosity_flt = 0.475')
    logging.debug('  default values: ssr2gw_exp = 1')
    arcpy.SelectLayerByAttribute_management(
        hru_polygon_layer, "NEW_SELECTION",
        '"{}" = 1 AND "{}" > 0 AND "{}" > 0'.format(
            hru.type_field, hru.moist_max_field, hru.sand_pct_field))
    arcpy.CalculateField_management(
        hru_polygon_layer, hru.ssr2gw_rate_field,
        '(!{}! * (3600 * 24 / (2.54 * 10000))) * !{}! / (!{}! * (!{}!/ 100))'.format(
            hru.ksat_field, hru.ssr2gw_k_field, hru.moist_max_field, hru.sand_pct_field),
        'PYTHON')
    arcpy.SelectLayerByAttribute_management(
        hru_polygon_layer, "SWITCH_SELECTION")
    arcpy.CalculateField_management(
        hru_polygon_layer, hru.ssr2gw_rate_field, '0', 'PYTHON')

    # Calculating slowceof_lin
    # Default value is 0.015 (range 0-1)
    # Convert Ksat from um/s to m/day
    logging.info('Calculating {}'.format(hru.slowcoef_lin_field))
    logging.info('  {} must be in um/s'.format(hru.ksat_field))
    arcpy.SelectLayerByAttribute_management(
        hru_polygon_layer, "NEW_SELECTION",
        '"{}" = 1'.format(hru.type_field))
    slowcoef_lin_cb = (
        'def slowcoef_lin(ksat, slope, cs):\n' +
        '    return 0.1 * ksat * 0.0864 * math.sin(slope) / cs\n')
    arcpy.CalculateField_management(
        hru_polygon_layer, hru.slowcoef_lin_field,
        'slowcoef_lin(!{0}!, !{1}!, {2})'.format(
            hru.ksat_field, hru.dem_slope_rad_field, hru.cs),
        'PYTHON', slowcoef_lin_cb)
    arcpy.SelectLayerByAttribute_management(
        hru_polygon_layer, "SWITCH_SELECTION")
    arcpy.CalculateField_management(
        hru_polygon_layer, hru.slowcoef_lin_field, '0', 'PYTHON')

    # Calculating slowceof_sq
    # Default value is 0.015 (range 0-1)
    # Convert Ksat from um/s to m/day
    logging.info('Calculating {}'.format(hru.slowcoef_sq_field))
    logging.info('  {} must be in um/s'.format(hru.ksat_field))
    arcpy.SelectLayerByAttribute_management(
        hru_polygon_layer, "NEW_SELECTION",
        '"{}" = 1 AND "{}" > 0 AND "{}" > 0'.format(
            hru.type_field, hru.moist_max_field, hru.sand_pct_field))
    slowcoef_sq_cb = (
        'def slowcoef_sq(ksat, slope, moist_max, sand, cs):\n' +
        '    return 0.9 * (ksat * 0.0864 * math.sin(slope) / ' +
        '(moist_max * (sand / 100) * cs))\n')
    arcpy.CalculateField_management(
        hru_polygon_layer, hru.slowcoef_sq_field,
        'slowcoef_sq(!{0}!, !{1}!, !{2}!, !{3}!, {4})'.format(
            hru.ksat_field, hru.dem_slope_rad_field,
            hru.moist_max_field, hru.sand_pct_field, hru.cs),
        'PYTHON', slowcoef_sq_cb)
    arcpy.SelectLayerByAttribute_management(
        hru_polygon_layer, "SWITCH_SELECTION")
    arcpy.CalculateField_management(
        hru_polygon_layer, hru.slowcoef_sq_field, '0', 'PYTHON')

    # Cleanup
    arcpy.Delete_management(hru_polygon_layer)
    del hru_polygon_layer
예제 #9
0
def dem_parameters(config_path, overwrite_flag=False, debug_flag=False):
    """Calculate GSFLOW DEM Parameters

    Args:
        config_path: Project config file path
        ovewrite_flag (bool): if True, overwrite existing files
        debug_flag (bool): if True, enable debug level logging

    Returns:
        None
    """

    # Initialize hru parameters class
    hru = support.HRUParameters(config_path)

    # Open input parameter config file
    inputs_cfg = ConfigParser.ConfigParser()
    try:
        inputs_cfg.readfp(open(config_path))
    except Exception as e:
        logging.error('\nERROR: Config file could not be read, '
                      'is not an input file, or does not exist\n'
                      '  config_file = {}\n'
                      '  Exception: {}\n'.format(config_path, e))
        sys.exit()

    # Log DEBUG to file
    log_file_name = 'dem_parameters_log.txt'
    log_console = logging.FileHandler(filename=os.path.join(
        hru.log_ws, log_file_name),
                                      mode='w')
    log_console.setLevel(logging.DEBUG)
    log_console.setFormatter(logging.Formatter('%(message)s'))
    logging.getLogger('').addHandler(log_console)
    logging.info('\nGSFLOW DEM Parameters')

    #
    dem_orig_path = inputs_cfg.get('INPUTS', 'dem_orig_path')
    # Resampling method 'BILINEAR', 'CUBIC', 'NEAREST'
    dem_proj_method = inputs_cfg.get('INPUTS', 'dem_projection_method').upper()
    dem_cs = inputs_cfg.getint('INPUTS', 'dem_cellsize')

    # DEADBEEF - This could/should be moved to support_functions.py since it is
    #   in this script and in both PRISM scripts.
    # DEM Units
    dem_units = inputs_cfg.get('INPUTS', 'dem_units').lower()
    dem_unit_types = {
        'meters': 'meter',
        'm': 'meter',
        'meter': 'meter',
        'feet': 'feet',
        'ft': 'meter',
        'foot': 'meter',
    }
    try:
        dem_units = dem_unit_types[dem_units]
    except:
        logging.error(
            '\nERROR: DEM unit "{}" is not supported\n'.format(dem_units))
        sys.exit()
    # Many expressions are hardcoded to units of feet
    # If dem_units are in meters, scale DEM_ADJ to get to feet
    if dem_units == 'meter':
        dem_unit_scalar = 0.3048
    else:
        dem_unit_scalar = 1.0

    #
    try:
        reset_dem_adj_flag = inputs_cfg.getboolean('INPUTS',
                                                   'reset_dem_adj_flag')
    except:
        reset_dem_adj_flag = False
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'reset_dem_adj_flag', reset_dem_adj_flag))

    try:
        calc_flow_acc_dem_flag = inputs_cfg.getboolean(
            'INPUTS', 'calc_flow_acc_dem_flag')
    except:
        calc_flow_acc_dem_flag = True
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'calc_flow_acc_dem_flag', calc_flow_acc_dem_flag))

    try:
        dem_adj_copy_field = inputs_cfg.get('INPUTS', 'dem_adj_copy_field')
    except:
        if calc_flow_acc_dem_flag:
            dem_adj_copy_field = 'DEM_FLOWAC'
        else:
            dem_adj_copy_field = 'DEM_MEAN'
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'dem_adj_copy_field', dem_adj_copy_field))

    # Use PRISM temperature to set Jensen-Haise coefficient
    # Typically these values will not be available when dem_parameters is first run
    # Setting it True means that the values will remain consistent even if
    #   dem_parameters is run again after the prism_script.
    try:
        calc_prism_jh_coef_flag = inputs_cfg.getboolean(
            'INPUTS', 'calc_prism_jh_coef_flag')
    except:
        calc_prism_jh_coef_flag = True
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'calc_prism_jh_coef_flag', calc_prism_jh_coef_flag))

    # Calculate flow accumulation weighted elevation
    if calc_flow_acc_dem_flag:
        # Get factor for scaling dem_flowacc values to avoid 32 bit int limits
        try:
            flow_acc_dem_factor = float(
                inputs_cfg.get('INPUTS', 'flow_acc_dem_factor'))
        except:
            # This is a worst case for keeping flow_acc_dem from exceeding 2E9
            # Assume all cells flow to 1 cell
            flow_acc_dem_factor = int(
                arcpy.GetCount_management(hru.point_path).getOutput(0))
            # Assume flow acc is in every DEM cell in HRU cell
            flow_acc_dem_factor *= (float(hru.cs) / dem_cs)**2
            # Need to account for the elevation in this worst cell
            # For now just make it 100
            # flow_acc_dem_factor *= max_elevation
            flow_acc_dem_factor *= 100
            # Calculate ratio of flow_acc_dem to a 32 bit int
            flow_acc_dem_factor /= (0.5 * 2**32)
            # If the ratio is less than 0.1, round up to 0.1 so factor -> 1.0
            flow_acc_dem_factor = min(0.1, flow_acc_dem_factor)
            # Round up to next multiple of 10 just to be safe
            flow_acc_dem_factor = 1.0 / 10**(
                int(math.log10(flow_acc_dem_factor)) + 1)
            logging.info(
                '  flow_acc_dem_factor was not set in the input file\n'
                '  Using automatic flow_acc_dem_factor: {}'.format(
                    flow_acc_dem_factor))

    # Calc flow_acc/flow_dir
    # DEADBEEF - For now, set these to True only if needed
    # calc_flow_acc_flag = inputs_cfg.getboolean('INPUTS', 'calc_flow_acc_flag')
    # calc_flow_dir_flag = inputs_cfg.getboolean('INPUTS', 'calc_flow_dir_flag')
    if calc_flow_acc_dem_flag:
        calc_flow_acc_flag = True
        calc_flow_dir_flag = True
    else:
        calc_flow_acc_flag = False
        calc_flow_dir_flag = False

    # Remap
    remap_ws = inputs_cfg.get('INPUTS', 'remap_folder')
    temp_adj_remap_name = inputs_cfg.get('INPUTS', 'temp_adj_remap')

    # Check input paths
    if not arcpy.Exists(hru.polygon_path):
        logging.error('\nERROR: Fishnet ({}) does not exist\n'.format(
            hru.polygon_path))
        sys.exit()
    # Check that either the original DEM raster exists
    if not arcpy.Exists(dem_orig_path):
        logging.error(
            '\nERROR: DEM ({}) raster does not exist\n'.format(dem_orig_path))
        sys.exit()
    # Check that remap folder is valid
    if not os.path.isdir(remap_ws):
        logging.error('\nERROR: Remap folder does not exist\n')
        sys.exit()
    # Check that remap files exist
    # Check remap files comment style
    temp_adj_remap_path = os.path.join(remap_ws, temp_adj_remap_name)
    remap_path_list = [temp_adj_remap_path]
    # remap_path_list = [aspect_remap_path, temp_adj_remap_path]
    for remap_path in remap_path_list:
        support.remap_check(remap_path)

    # DEADBEEF - Trying out setting SWALE points before filling
    model_inputs_path = inputs_cfg.get('INPUTS', 'model_points_path')
    try:
        model_points_type_field = inputs_cfg.get('INPUTS',
                                                 'model_points_type_field')
    except:
        model_points_type_field = 'TYPE'
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'model_points_type_field', model_points_type_field))

    # Check model points
    if not os.path.isfile(model_inputs_path):
        logging.error('\nERROR: Model points shapefiles does not exist'
                      '\nERROR:   {}'.format(model_inputs_path))
        sys.exit()
    # model_points_path must be a point shapefile
    elif arcpy.Describe(model_inputs_path).datasetType != 'FeatureClass':
        logging.error('\nERROR: model_points_path must be a point shapefile')
        sys.exit()

    # DEADBEEF
    # if not os.path.isfile(temp_adj_remap_path):
    #    logging.error(
    #        '\nERROR: ASCII remap file ({}) does not exist\n'.format(
    #            os.path.basename(temp_adj_remap_path)))
    #    sys.exit()
    #  Check remap files comment style
    # if '10.2' in arcpy.GetInstallInfo()['version']:
    #    if remap_comment_check(temp_adj_remap_path):
    #        logging.error(
    #            ('\nERROR: ASCII remap file ({}) has pre-ArcGIS 10.2 ' +
    #             'comments\n').format(os.path.basename(temp_adj_remap_path)))
    #        sys.exit()

    # Check other inputs
    if dem_cs <= 0:
        logging.error('\nERROR: DEM cellsize must be greater than 0')
        sys.exit()
    dem_proj_method_list = ['BILINEAR', 'CUBIC', 'NEAREST']
    if dem_proj_method not in dem_proj_method_list:
        logging.error('\nERROR: DEM projection method must be: {}'.format(
            ', '.join(dem_proj_method_list)))
        sys.exit()
    if reset_dem_adj_flag:
        logging.warning(
            '\nWARNING: All values in {} will be overwritten'.format(
                hru.dem_adj_field))
        raw_input('  Press ENTER to continue')

    # Build output folder if necessary
    dem_temp_ws = os.path.join(hru.param_ws, 'dem_rasters')
    if not os.path.isdir(dem_temp_ws):
        os.mkdir(dem_temp_ws)

    # Output paths
    dem_path = os.path.join(dem_temp_ws, 'dem.img')
    dem_fill_path = os.path.join(dem_temp_ws, 'dem_fill.img')
    flow_dir_path = os.path.join(dem_temp_ws, 'flow_dir.img')
    flow_acc_path = os.path.join(dem_temp_ws, 'flow_acc.img')
    flow_acc_dem_path = os.path.join(dem_temp_ws, 'flow_acc_x_dem.img')
    flow_acc_filter_path = os.path.join(dem_temp_ws, 'flow_acc_filter.img')
    dem_integer_path = os.path.join(dem_temp_ws, 'dem_integer.img')
    dem_slope_path = os.path.join(dem_temp_ws, 'dem_slope.img')
    dem_aspect_path = os.path.join(dem_temp_ws, 'dem_aspect.img')
    dem_aspect_reclass_path = os.path.join(dem_temp_ws, 'aspect_reclass.img')
    temp_adj_path = os.path.join(dem_temp_ws, 'temp_adj.img')
    swale_path = os.path.join(dem_temp_ws, 'swale.img')
    model_points_path = os.path.join(dem_temp_ws, 'model_points.shp')

    # Set ArcGIS environment variables
    arcpy.CheckOutExtension('Spatial')
    env.overwriteOutput = True
    env.pyramid = 'PYRAMIDS -1'
    # env.pyramid = 'PYRAMIDS 0'
    # env.rasterStatistics = 'NONE'
    # env.extent = 'MINOF'
    env.workspace = dem_temp_ws
    env.scratchWorkspace = hru.scratch_ws

    # DEADBEEF - Trying out setting SWALE points before filling
    # Read in model points shapefile
    logging.info('\nChecking model points shapefile')
    model_points_desc = arcpy.Describe(model_inputs_path)
    model_points_sr = model_points_desc.spatialReference
    logging.debug('  Points: {}'.format(model_inputs_path))
    logging.debug('  Points spat. ref.:  {}'.format(model_points_sr.name))
    logging.debug('  Points GCS:         {}'.format(model_points_sr.GCS.name))

    # If model points spat_ref doesn't match hru_param spat_ref
    # Project model points to hru_param spat ref
    # Otherwise, read model points directly
    if hru.sr.name != model_points_sr.name:
        logging.info('  Model points projection does not match fishnet.\n'
                     '  Projecting model points.\n')
        # Set preferred transforms
        transform_str = support.transform_func(hru.sr, model_points_sr)
        logging.debug('    Transform: {}'.format(transform_str))
        arcpy.Project_management(model_inputs_path, model_points_path, hru.sr,
                                 transform_str, model_points_sr)
    else:
        arcpy.Copy_management(model_inputs_path, model_points_path)
    model_points_lyr = 'model_points_lyr'
    arcpy.MakeFeatureLayer_management(model_points_path, model_points_lyr)

    # Check model point types
    logging.info('  Checking model point types')
    model_point_types = [
        str(r[0]).upper() for r in arcpy.da.SearchCursor(
            model_points_path, [model_points_type_field])
    ]
    if not set(model_point_types).issubset(set(['OUTLET', 'SUBBASIN', 'SWALE'
                                                ])):
        logging.error(
            '\nERROR: Unsupported model point type(s) found, exiting')
        logging.error('\n  Model point types: {}\n'.format(model_point_types))
        sys.exit()
    elif not set(model_point_types).issubset(set(['OUTLET', 'SWALE'])):
        logging.error(
            '\nERROR: At least one model point must be an OUTLET or SWALE, '
            'exiting\n')
        sys.exit()
    else:
        logging.debug('  {}'.format(', '.join(model_point_types)))

    # Check DEM field
    logging.info('\nAdding DEM fields if necessary')
    support.add_field_func(hru.polygon_path, hru.dem_mean_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.dem_max_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.dem_min_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.dem_adj_field, 'DOUBLE')
    if calc_flow_acc_dem_flag:
        support.add_field_func(hru.polygon_path, hru.dem_flowacc_field,
                               'DOUBLE')
        support.add_field_func(hru.polygon_path, hru.dem_sum_field, 'DOUBLE')
        support.add_field_func(hru.polygon_path, hru.dem_count_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.dem_sink_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.dem_aspect_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.dem_slope_deg_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.dem_slope_rad_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.dem_slope_pct_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.jh_tmin_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.jh_tmax_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.jh_coef_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.snarea_thresh_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.tmax_adj_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.tmin_adj_field, 'DOUBLE')

    # Check that dem_adj_copy_field exists
    if len(arcpy.ListFields(hru.polygon_path, dem_adj_copy_field)) == 0:
        logging.error('\nERROR: dem_adj_copy_field {} does not exist\n'.format(
            dem_adj_copy_field))
        sys.exit()

    # Assume all DEM rasters will need to be rebuilt
    # Check slope, aspect, and proejcted DEM rasters
    # This will check for matching spat. ref., snap point, and cellsize

    # If DEM is GCS, project it to 10m to match
    # DEADBEEF - I had originally wanted the DEM to get projected only once
    #   but if the user wants to rerun this script, then all steps should
    #   be rerun.  This also allows the user to change the DEM raster
    # dem_flag = valid_raster_func(
    #    dem_path, 'projected DEM', hru, dem_cs)
    # if arcpy.Exists(dem_orig_path) and not dem_flag:
    logging.info('\nProjecting DEM raster')
    dem_orig_sr = arcpy.sa.Raster(dem_orig_path).spatialReference
    logging.debug('  DEM GCS:   {}'.format(dem_orig_sr.GCS.name))
    # Remove existing projected DEM
    if arcpy.Exists(dem_path):
        arcpy.Delete_management(dem_path)
    # Set preferred transforms
    transform_str = support.transform_func(hru.sr, dem_orig_sr)
    logging.debug('  Transform: {}'.format(transform_str))
    logging.debug('  Projection method: {}'.format(dem_proj_method))
    # Project DEM
    # DEADBEEF - Arc10.2 ProjectRaster does not honor extent
    logging.debug('  Input SR:  {}'.format(dem_orig_sr.exportToString()))
    logging.debug('  Output SR: {}'.format(hru.sr.exportToString()))
    support.project_raster_func(dem_orig_path,
                                dem_path,
                                hru.sr,
                                dem_proj_method,
                                dem_cs,
                                transform_str,
                                '{} {}'.format(hru.ref_x, hru.ref_y),
                                dem_orig_sr,
                                hru,
                                in_memory=False)
    # env.extent = hru.extent
    # arcpy.ProjectRaster_management(
    #    dem_orig_path, dem_path, hru.sr,
    #    dem_proj_method, dem_cs, transform_str,
    #    '{} {}'.format(hru.ref_x, hru.ref_y),
    #    dem_orig_sr)
    # arcpy.ClearEnvironment('extent')

    # Check linear unit of raster
    # DEADBEEF - The conversion could probably be dynamic
    dem_obj = arcpy.sa.Raster(dem_path)
    linear_unit_list = ['METERS', 'METER', 'FOOT_US', 'FOOT']
    linear_unit = dem_obj.spatialReference.linearUnitName.upper()
    if linear_unit not in linear_unit_list:
        logging.error(
            '\nERROR: The linear unit of the projected/clipped DEM must'
            ' be meters or feet\n  {}'.format(linear_unit))
        sys.exit()
    del dem_obj

    # DEADBEEF - Trying out setting SWALE points before filling
    hru_polygon_lyr = 'hru_polygon_lyr'
    arcpy.MakeFeatureLayer_management(hru.polygon_path, hru_polygon_lyr)
    arcpy.SelectLayerByAttribute_management(hru_polygon_lyr, 'CLEAR_SELECTION')
    arcpy.CalculateField_management(hru_polygon_lyr, hru.outflow_field, 0,
                                    'PYTHON')

    if 'SWALE' in model_point_types:
        logging.info('  Building SWALE point raster')
        arcpy.SelectLayerByAttribute_management(model_points_lyr,
                                                'NEW_SELECTION',
                                                '"TYPE" = \'SWALE\'')

        # DEADBEEF - Should SWALE points be written to OUTFLOWHRU.TXT?
        arcpy.SelectLayerByLocation_management(hru_polygon_lyr, 'INTERSECT',
                                               model_points_lyr)
        arcpy.CalculateField_management(hru_polygon_lyr, hru.outflow_field, 1,
                                        'PYTHON')

        arcpy.PointToRaster_conversion(model_points_lyr,
                                       model_points_type_field, swale_path, "",
                                       "", hru.cs)
        swale_obj = arcpy.sa.Raster(swale_path)
        arcpy.SelectLayerByAttribute_management(model_points_lyr,
                                                'CLEAR_SELECTION')

    dem_obj = arcpy.sa.Raster(dem_path)

    if 'SWALE' in model_point_types:
        logging.debug('  Setting DEM_ADJ values to NoData for SWALE cells')
        dem_obj = arcpy.sa.Con(arcpy.sa.IsNull(swale_obj), dem_obj)

    # Calculate filled DEM, flow_dir, & flow_acc
    logging.info('\nCalculating filled DEM raster')
    dem_fill_obj = arcpy.sa.Fill(dem_obj)
    dem_fill_obj.save(dem_fill_path)
    del dem_fill_obj

    # # Calculate filled DEM, flow_dir, & flow_acc
    # logging.info('\nCalculating filled DEM raster')
    # dem_fill_obj = arcpy.sa.Fill(dem_obj)
    # dem_fill_obj.save(dem_fill_path)
    # del dem_fill_obj

    if calc_flow_dir_flag:
        logging.info('Calculating flow direction raster')
        dem_fill_obj = arcpy.sa.Raster(dem_fill_path)
        flow_dir_obj = arcpy.sa.FlowDirection(dem_fill_obj, True)
        flow_dir_obj.save(flow_dir_path)
        del flow_dir_obj, dem_fill_obj
    if calc_flow_acc_flag:
        logging.info('Calculating flow accumulation raster')
        flow_dir_obj = arcpy.sa.Raster(flow_dir_path)
        flow_acc_obj = arcpy.sa.FlowAccumulation(flow_dir_obj)
        flow_acc_obj.save(flow_acc_path)
        del flow_acc_obj, flow_dir_obj
    if calc_flow_acc_dem_flag:
        # flow_acc_dem_obj = dem_fill_obj * flow_acc_obj
        # Low pass filter of flow_acc then take log10
        flow_acc_filter_obj = arcpy.sa.Filter(arcpy.sa.Raster(flow_acc_path),
                                              'LOW', 'NODATA')
        flow_acc_filter_obj *= flow_acc_dem_factor
        flow_acc_filter_obj.save(flow_acc_filter_path)
        flow_acc_dem_obj = arcpy.sa.Raster(dem_fill_path) * flow_acc_filter_obj
        flow_acc_dem_obj.save(flow_acc_dem_path)
        del flow_acc_dem_obj, flow_acc_filter_obj

    # Calculate an integer version of DEM for median zonal stats
    dem_integer_obj = arcpy.sa.Int(arcpy.sa.Raster(dem_path) * 100)
    dem_integer_obj.save(dem_integer_path)
    del dem_integer_obj

    # Calculate slope
    logging.info('Calculating slope raster')
    dem_slope_obj = arcpy.sa.Slope(dem_fill_path, 'DEGREE')
    # Setting small slopes to zero
    logging.info('  Setting slopes <= 0.01 to 0')
    dem_slope_obj = arcpy.sa.Con(dem_slope_obj <= 0.01, 0, dem_slope_obj)
    dem_slope_obj.save(dem_slope_path)
    del dem_slope_obj

    # Calculate aspect
    logging.info('Calculating aspect raster')
    dem_aspect_obj = arcpy.sa.Int(arcpy.sa.Aspect(dem_fill_path))
    # Set small slopes to -1 aspect
    logging.debug('  Setting aspect for slopes <= 0.01 to -1')
    dem_aspect_obj = arcpy.sa.Con(
        arcpy.sa.Raster(dem_slope_path) > 0.01, dem_aspect_obj, -1)
    dem_aspect_obj.save(dem_aspect_path)
    del dem_aspect_obj

    # Temperature Aspect Adjustment
    logging.info('Calculating temperature aspect adjustment raster')
    temp_adj_obj = arcpy.sa.Float(
        arcpy.sa.ReclassByASCIIFile(dem_aspect_path, temp_adj_remap_path))
    # temp_adj_obj = arcpy.sa.Float(arcpy.sa.ReclassByASCIIFile(
    #     dem_aspect_reclass_path, temp_adj_remap_path))
    # Since reclass can't remap to floats directly
    # Values are scaled by 10 and stored as integers
    temp_adj_obj *= 0.1
    temp_adj_obj.save(temp_adj_path)
    del temp_adj_obj

    # List of rasters, fields, and stats for zonal statistics
    zs_dem_dict = dict()
    zs_dem_dict[hru.dem_mean_field] = [dem_path, 'MEAN']
    if calc_flow_acc_dem_flag:
        zs_dem_dict[hru.dem_sum_field] = [flow_acc_dem_path, 'SUM']
        zs_dem_dict[hru.dem_count_field] = [flow_acc_filter_path, 'SUM']
    zs_dem_dict[hru.dem_max_field] = [dem_path, 'MAXIMUM']
    zs_dem_dict[hru.dem_min_field] = [dem_path, 'MINIMUM']
    zs_dem_dict[hru.dem_aspect_field] = [dem_aspect_path, 'MEAN']
    zs_dem_dict[hru.dem_slope_deg_field] = [dem_slope_path, 'MEAN']
    zs_dem_dict[hru.tmax_adj_field] = [temp_adj_path, 'MEAN']
    zs_dem_dict[hru.tmin_adj_field] = [temp_adj_path, 'MEAN']

    # Calculate DEM zonal statistics
    logging.info('\nCalculating DEM zonal statistics')
    support.zonal_stats_func(zs_dem_dict, hru.polygon_path, hru.point_path,
                             hru)

    # Flow accumulation weighted elevation
    if calc_flow_acc_dem_flag:
        logging.info('Calculating {}'.format(hru.dem_flowacc_field))
        hru_polygon_layer = 'hru_polygon_layer'
        arcpy.MakeFeatureLayer_management(hru.polygon_path, hru_polygon_layer)
        arcpy.SelectLayerByAttribute_management(
            hru_polygon_layer, "NEW_SELECTION",
            '"{}" > 0'.format(hru.dem_count_field))
        arcpy.CalculateField_management(
            hru_polygon_layer, hru.dem_flowacc_field,
            'float(!{}!) / !{}!'.format(hru.dem_sum_field,
                                        hru.dem_count_field), 'PYTHON')
        # Clear dem_flowacc for any cells that have zero sum or count
        arcpy.SelectLayerByAttribute_management(
            hru_polygon_layer, "NEW_SELECTION",
            '("{}" = 0) OR ("{}" = 0)'.format(hru.dem_count_field,
                                              hru.dem_sum_field))
        arcpy.CalculateField_management(hru_polygon_layer,
                                        hru.dem_flowacc_field, 0, 'PYTHON')
        arcpy.Delete_management(hru_polygon_layer)

    # Fill DEM_ADJ if it is not set
    if all([
            row[0] == 0 for row in arcpy.da.SearchCursor(
                hru.polygon_path, [hru.dem_adj_field])
    ]):
        logging.info('Filling {} from {}'.format(hru.dem_adj_field,
                                                 dem_adj_copy_field))
        arcpy.CalculateField_management(
            hru.polygon_path, hru.dem_adj_field,
            'float(!{}!)'.format(dem_adj_copy_field), 'PYTHON')
    elif reset_dem_adj_flag:
        logging.info('Filling {} from {}'.format(hru.dem_adj_field,
                                                 dem_adj_copy_field))
        arcpy.CalculateField_management(
            hru.polygon_path, hru.dem_adj_field,
            'float(!{}!)'.format(dem_adj_copy_field), 'PYTHON')
    else:
        logging.info('{} appears to already have been set and '
                     'will not be overwritten'.format(hru.dem_adj_field))

    # HRU_SLOPE in radians
    logging.info('Calculating {} (Slope in Radians)'.format(
        hru.dem_slope_rad_field))
    arcpy.CalculateField_management(
        hru.polygon_path, hru.dem_slope_rad_field,
        'math.pi * !{}! / 180'.format(hru.dem_slope_deg_field), 'PYTHON')
    # HRU_SLOPE in percent
    logging.info('Calculating {} (Percent Slope)'.format(
        hru.dem_slope_pct_field))
    arcpy.CalculateField_management(
        hru.polygon_path, hru.dem_slope_pct_field,
        'math.tan(!{}!)'.format(hru.dem_slope_rad_field), 'PYTHON')

    # Jensen-Haise Potential ET air temperature coefficient
    logging.info('Calculating JH_COEF_HRU')
    # First check if PRISM TMAX/TMIN have been set
    # If max July value is 0, use default values
    if (calc_prism_jh_coef_flag
            and (len(arcpy.ListFields(hru.polygon_path, 'TMAX_07')) == 0
                 or support.field_stat_func(hru.polygon_path, 'TMAX_07',
                                            'MAXIMUM') == 0)):
        calc_prism_jh_coef_flag = False
    # Use PRISM temperature values
    if calc_prism_jh_coef_flag:
        logging.info('  Using PRISM temperature values')
        tmax_field_list = ['!TMAX_{:02d}!'.format(m) for m in range(1, 13)]
        tmin_field_list = ['!TMIN_{:02d}!'.format(m) for m in range(1, 13)]
        tmax_expr = 'max([{}])'.format(','.join(tmax_field_list))
        arcpy.CalculateField_management(hru.polygon_path, hru.jh_tmax_field,
                                        tmax_expr, 'PYTHON')
        # Get TMIN for same month as maximum TMAX
        tmin_expr = 'max(zip([{}],[{}]))[1]'.format(','.join(tmax_field_list),
                                                    ','.join(tmin_field_list))
        arcpy.CalculateField_management(hru.polygon_path, hru.jh_tmin_field,
                                        tmin_expr, 'PYTHON')
    # Use default temperature values
    else:
        logging.info('  setting temperature values (7 & 25)')
        arcpy.CalculateField_management(hru.polygon_path, hru.jh_tmax_field,
                                        25, 'PYTHON')
        arcpy.CalculateField_management(hru.polygon_path, hru.jh_tmin_field, 7,
                                        'PYTHON')
    # Pass unit scalar to convert DEM_ADJ to feet if necessary
    support.jensen_haise_func(hru.polygon_path, hru.jh_coef_field,
                              hru.dem_adj_field, hru.jh_tmin_field,
                              hru.jh_tmax_field, dem_unit_scalar)

    # SNAREA_THRESH
    # Convert DEM_ADJ to feet if necessary
    logging.info('Calculating {}'.format(hru.snarea_thresh_field))
    elev_min = support.field_stat_func(hru.polygon_path, hru.dem_adj_field,
                                       'MINIMUM')
    arcpy.CalculateField_management(
        hru.polygon_path, hru.snarea_thresh_field,
        '(!{}! - {}) * 0.005'.format(hru.dem_adj_field,
                                     elev_min * dem_unit_scalar), 'PYTHON')

    # Clear slope/aspect values for lake cells (HRU_TYPE == 2)
    # Also clear for ocean cells (HRU_TYPE == 0 and DEM_ADJ == 0)
    if True:
        logging.info('\nClearing slope/aspect parameters for lake cells')
        hru_polygon_layer = "hru_polygon_layer"
        arcpy.MakeFeatureLayer_management(hru.polygon_path, hru_polygon_layer)
        arcpy.SelectLayerByAttribute_management(
            hru_polygon_layer, "NEW_SELECTION",
            '"{0}" = 2 OR ("{0}" = 0 AND "{1}" = 0)'.format(
                hru.type_field, hru.dem_adj_field))
        arcpy.CalculateField_management(hru_polygon_layer,
                                        hru.dem_aspect_field, 0, 'PYTHON')
        arcpy.CalculateField_management(hru_polygon_layer,
                                        hru.dem_slope_deg_field, 0, 'PYTHON')
        arcpy.CalculateField_management(hru_polygon_layer,
                                        hru.dem_slope_rad_field, 0, 'PYTHON')
        arcpy.CalculateField_management(hru_polygon_layer,
                                        hru.dem_slope_pct_field, 0, 'PYTHON')
        # arcpy.CalculateField_management(
        #    hru_polygon_layer, hru.deplcrv_field, 0, 'PYTHON')
        # arcpy.CalculateField_management(
        #    hru_polygon_layer, hru.snarea_field, 0, 'PYTHON')
        # arcpy.CalculateField_management(
        #    hru_polygon_layer, hru.tmax_adj_field, 0, 'PYTHON')
        # arcpy.CalculateField_management(
        #    hru_polygon_layer, hru.tmin_adj_field, 0, 'PYTHON')

        # Should JH coefficients be cleared for lakes?
        # logging.info('\nClearing JH parameters for ocean cells')
        arcpy.SelectLayerByAttribute_management(
            hru_polygon_layer, "NEW_SELECTION",
            '"{}" = 0 AND "{}" = 0'.format(hru.type_field, hru.dem_adj_field))
        arcpy.CalculateField_management(hru_polygon_layer, hru.jh_coef_field,
                                        0, 'PYTHON')
        arcpy.CalculateField_management(hru_polygon_layer, hru.jh_tmax_field,
                                        0, 'PYTHON')
        arcpy.CalculateField_management(hru_polygon_layer, hru.jh_tmin_field,
                                        0, 'PYTHON')

        arcpy.Delete_management(hru_polygon_layer)
        del hru_polygon_layer
예제 #10
0
def flow_parameters(config_path, overwrite_flag=False, debug_flag=False):
    """Calculate GSFLOW Flow Parameters

    Args:
        config_file (str): Project config file path
        ovewrite_flag (bool): if True, overwrite existing files
        debug_flag (bool): if True, enable debug level logging

    Returns:
        None
    """

    # Initialize hru_parameters class
    hru = support.HRUParameters(config_path)

    # Open input parameter config file
    inputs_cfg = ConfigParser.ConfigParser()
    try:
        inputs_cfg.readfp(open(config_path))
    except Exception as e:
        logging.error('\nERROR: Config file could not be read, '
                      'is not an input file, or does not exist\n'
                      '  config_file = {}\n'
                      '  Exception: {}\n'.format(config_path, e))
        sys.exit()

    # Log DEBUG to file
    log_file_name = 'dem_2_stream_log.txt'
    log_console = logging.FileHandler(filename=os.path.join(
        hru.log_ws, log_file_name),
                                      mode='w')
    log_console.setLevel(logging.DEBUG)
    log_console.setFormatter(logging.Formatter('%(message)s'))
    logging.getLogger('').addHandler(log_console)
    logging.info('\nGSFLOW DEM To Streams')

    # Check whether lake parameters should be calculated
    try:
        set_lake_flag = inputs_cfg.getboolean('INPUTS', 'set_lake_flag')
    except ConfigParser.NoOptionError:
        set_lake_flag = False
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'set_lake_flag', set_lake_flag))

    # Model points
    model_inputs_path = inputs_cfg.get('INPUTS', 'model_points_path')
    try:
        model_points_zone_field = inputs_cfg.get('INPUTS',
                                                 'model_points_zone_field')
    except:
        model_points_zone_field = 'FID'
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'model_points_zone_field', model_points_zone_field))
    try:
        model_points_type_field = inputs_cfg.get('INPUTS',
                                                 'model_points_type_field')
    except:
        model_points_type_field = 'TYPE'
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'model_points_type_field', model_points_type_field))

    # Flow parameters
    flow_acc_threshold = inputs_cfg.getint('INPUTS', 'flow_acc_threshold')
    flow_length_threshold = inputs_cfg.getint('INPUTS',
                                              'flow_length_threshold')
    try:
        calc_flow_dir_points_flag = inputs_cfg.getboolean(
            'INPUTS', 'calc_flow_dir_points_flag')
    except ConfigParser.NoOptionError:
        calc_flow_dir_points_flag = False
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'calc_flow_dir_points_flag', calc_flow_dir_points_flag))
    try:
        lake_seg_offset = inputs_cfg.getint('INPUTS', 'lake_seg_offset')
    except ConfigParser.NoOptionError:
        lake_seg_offset = 0
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'lake_seg_offset', lake_seg_offset))
    if lake_seg_offset < 0:
        logging.error(
            '\nERROR: lake_seg_offset must be an integer greater than 0')
        sys.exit()

    # Check input paths
    dem_temp_ws = os.path.join(hru.param_ws, 'dem_rasters')
    dem_path = os.path.join(dem_temp_ws, 'dem.img')
    if not arcpy.Exists(dem_path):
        logging.error(
            '\nERROR: Projected/clipped DEM ({}) does not exist'
            '\nERROR: Try rerunning dem_parameters.py'.format(dem_path))
        sys.exit()
    if not arcpy.Exists(hru.polygon_path):
        logging.error('\nERROR: Fishnet ({}) does not exist'.format(
            hru.polygon_path))
        sys.exit()

    # Check model points
    if not os.path.isfile(model_inputs_path):
        logging.error('\nERROR: Model points shapefiles does not exist'
                      '\nERROR:   {}'.format(model_inputs_path))
        sys.exit()
    # model_points_path must be a point shapefile
    elif arcpy.Describe(model_inputs_path).datasetType != 'FeatureClass':
        logging.error('\nERROR: model_points_path must be a point shapefile')
        sys.exit()

    # Build output folder if necessary
    flow_temp_ws = os.path.join(hru.param_ws, 'flow_rasters')
    if not os.path.isdir(flow_temp_ws):
        os.mkdir(flow_temp_ws)

    # Output paths
    hru_type_path = os.path.join(flow_temp_ws, 'hru_type.img')
    dem_adj_path = os.path.join(flow_temp_ws, 'dem_adj.img')
    lake_id_path = os.path.join(flow_temp_ws, 'lake_id.img')
    dem_sink_path = os.path.join(flow_temp_ws, 'dem_sink.img')
    dem_fill_path = os.path.join(flow_temp_ws, 'dem_fill.img')
    flow_dir_path = os.path.join(flow_temp_ws, 'flow_dir.img')
    flow_dir_points = os.path.join(flow_temp_ws, 'flow_dir_points.shp')
    flow_acc_full_path = os.path.join(flow_temp_ws, 'flow_acc_full.img')
    flow_acc_sub_path = os.path.join(flow_temp_ws, 'flow_acc_sub.img')
    flow_mask_path = os.path.join(flow_temp_ws, 'flow_mask.img')
    stream_link_path = os.path.join(flow_temp_ws, 'stream_link.img')
    stream_link_a_path = os.path.join(flow_temp_ws, 'stream_link_a.img')
    stream_link_b_path = os.path.join(flow_temp_ws, 'stream_link_b.img')
    stream_order_path = os.path.join(flow_temp_ws, 'stream_order.img')
    stream_length_path = os.path.join(flow_temp_ws, 'stream_length.img')
    watersheds_path = os.path.join(flow_temp_ws, 'watersheds.img')
    outlet_path = os.path.join(flow_temp_ws, 'outlet.img')
    swale_path = os.path.join(flow_temp_ws, 'swale.img')
    subbasin_path = os.path.join(flow_temp_ws, 'subbasin.img')
    basin_path = os.path.join(flow_temp_ws, 'basin.img')
    streams_path = os.path.join(flow_temp_ws, 'streams.shp')
    model_points_path = os.path.join(flow_temp_ws, 'model_points.shp')

    # Set ArcGIS environment variables
    arcpy.CheckOutExtension('Spatial')
    env.overwriteOutput = True
    # env.pyramid = 'PYRAMIDS -1'
    env.pyramid = 'PYRAMIDS 0'
    env.workspace = flow_temp_ws
    env.scratchWorkspace = hru.scratch_ws

    # Set environment parameters
    env.extent = hru.extent
    env.cellsize = hru.cs
    env.outputCoordinateSystem = hru.sr

    # Read in model points shapefile
    logging.info('\nChecking model points shapefile')
    model_points_desc = arcpy.Describe(model_inputs_path)
    model_points_sr = model_points_desc.spatialReference
    logging.debug('  Points: {}'.format(model_inputs_path))
    logging.debug('  Points spat. ref.:  {}'.format(model_points_sr.name))
    logging.debug('  Points GCS:         {}'.format(model_points_sr.GCS.name))

    # If model points spat_ref doesn't match hru_param spat_ref
    # Project model points to hru_param spat ref
    # Otherwise, read model points directly
    if hru.sr.name != model_points_sr.name:
        logging.info('  Model points projection does not match fishnet.\n'
                     '  Projecting model points.\n')
        # Set preferred transforms
        transform_str = support.transform_func(hru.sr, model_points_sr)
        logging.debug('    Transform: {}'.format(transform_str))
        arcpy.Project_management(model_inputs_path, model_points_path, hru.sr,
                                 transform_str, model_points_sr)
    else:
        arcpy.Copy_management(model_inputs_path, model_points_path)
    model_points_lyr = 'model_points_lyr'
    arcpy.MakeFeatureLayer_management(model_points_path, model_points_lyr)

    # Check model_points_zone_field
    if model_points_zone_field.upper() in ['', 'FID', 'NONE']:
        model_points_fid_field = arcpy.Describe(model_points_path).OIDFieldName
        logging.warning('  NOTE: Using {}+1 to set {}'.format(
            model_points_fid_field, hru.subbasin_field))
        model_points_zone_field = 'ZONE_VALUE'
        if not arcpy.ListFields(model_points_path, model_points_zone_field):
            arcpy.AddField_management(model_points_path,
                                      model_points_zone_field, 'LONG')
        arcpy.CalculateField_management(
            model_points_path, model_points_zone_field,
            '!{}! + 1'.format(model_points_fid_field), 'PYTHON')
    elif not arcpy.ListFields(model_points_path, model_points_zone_field):
        logging.error(
            '\nERROR: model_points_zone_field {} does not exist\n'.format(
                model_points_zone_field))
        sys.exit()
    # Need to check that model_points_zone_field is an int type
    elif not [
            f.type for f in arcpy.Describe(model_points_path).fields
            if (f.name == model_points_zone_field
                and f.type in ['SmallInteger', 'Integer'])
    ]:
        logging.error(
            '\nERROR: model_points_zone_field {} must be an integer type\n'.
            format(model_points_zone_field))
        sys.exit()

    # Need to check that model_points_zone_field is all positive values
    if min([
            row[0] for row in arcpy.da.SearchCursor(model_points_path,
                                                    [model_points_zone_field])
    ]) <= 0:
        logging.error(
            '\nERROR: model_points_zone_field values must be positive\n'.
            format(model_points_zone_field))
        sys.exit()

    # Check that subbasin values increment from 1 to nsub
    logging.info('  Checking subbasin numbering')
    subbasin_id_list = sorted(
        list(
            set([
                row[0] for row in arcpy.da.SearchCursor(
                    model_points_path, [model_points_zone_field])
            ])))
    if subbasin_id_list != range(1, len(subbasin_id_list) + 1):
        logging.error('\nERROR: SUB_BASINs must be sequential starting from 1'
                      '\nERROR:   {}'.format(subbasin_id_list))
        sys.exit()
    subbasin_input_count = len(subbasin_id_list)
    logging.debug('    {} subbasins'.format(subbasin_input_count))

    # Check model point types
    logging.info('  Checking model point types')
    model_point_types = [
        str(r[0]).upper() for r in arcpy.da.SearchCursor(
            model_points_path, [model_points_type_field])
    ]
    if not set(model_point_types).issubset(set(['OUTLET', 'SUBBASIN', 'SWALE'
                                                ])):
        logging.error(
            '\nERROR: Unsupported model point type(s) found, exiting')
        logging.error('\n  Model point types: {}\n'.format(model_point_types))
        sys.exit()
        ##    elif not set(model_point_types).issubset(set(['OUTLET', 'SWALE'])):
        ##        logging.error(
        ##            '\nERROR: At least one model point must be an OUTLET or SWALE, '
        ##            'exiting\n')
        sys.exit()
    else:
        logging.debug('  {}'.format(', '.join(model_point_types)))

    # Check DEM field
    logging.info('\nAdding DEM fields if necessary')
    support.add_field_func(hru.polygon_path, hru.iseg_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.irunbound_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.flow_dir_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.dem_sink_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.outflow_field, 'DOUBLE')

    if set_lake_flag:
        # Check lake cell elevations
        logging.info('\nChecking lake cell {}'.format(hru.dem_adj_field))
        lake_elev_dict = defaultdict(list)
        fields = [
            hru.type_field, hru.lake_id_field, hru.dem_adj_field, hru.id_field
        ]
        for row in arcpy.da.SearchCursor(hru.polygon_path, fields):
            if int(row[0]) != 2:
                continue
            lake_elev_dict[int(row[1])].append(float(row[2]))
        del fields
        logging.info('  {:>7} {:>12} {:>12} {:>12} {:>12}'.format(
            'Lake ID', 'Minimum', 'Mean', 'Maximum', 'Std. Dev.'))
        for lake_id, lake_elev_list in lake_elev_dict.items():
            lake_elev_array = np.array(lake_elev_list)
            logging.info('  {:7} {:12f} {:12f} {:12f} {:12f}'.format(
                lake_id, np.min(lake_elev_array), np.mean(lake_elev_array),
                np.max(lake_elev_array), np.std(lake_elev_array)))
            if np.std(lake_elev_array) > 1:
                logging.warning(
                    '  Please check the lake cell elevations\n'
                    '  They may need to be manually adjusted'.format(lake_id))
                raw_input('  Press ENTER to continue')
            del lake_elev_array

        # Build Lake raster
        logging.debug('  LAKE_ID')
        arcpy.PolygonToRaster_conversion(hru.polygon_path, hru.lake_id_field,
                                         lake_id_path, 'CELL_CENTER', '',
                                         hru.cs)
        lake_id_obj = arcpy.sa.Raster(lake_id_path)

    logging.info('\nExporting HRU polygon parameters to raster')
    logging.debug('  HRU_TYPE')
    arcpy.PolygonToRaster_conversion(hru.polygon_path, hru.type_field,
                                     hru_type_path, 'CELL_CENTER', '', hru.cs)
    hru_type_obj = arcpy.sa.Raster(hru_type_path)

    # Convert DEM_ADJ to raster
    logging.debug('  DEM_ADJ')
    arcpy.PolygonToRaster_conversion(hru.polygon_path, hru.dem_adj_field,
                                     dem_adj_path, 'CELL_CENTER', '', hru.cs)
    dem_adj_obj = arcpy.sa.Raster(dem_adj_path)
    # dem_adj_obj = arcpy.sa.Float(arcpy.sa.Raster(dem_adj_path))

    hru_polygon_lyr = 'hru_polygon_lyr'
    arcpy.MakeFeatureLayer_management(hru.polygon_path, hru_polygon_lyr)
    arcpy.SelectLayerByAttribute_management(hru_polygon_lyr, 'CLEAR_SELECTION')
    arcpy.CalculateField_management(hru_polygon_lyr, hru.outflow_field, 0,
                                    'PYTHON')

    if 'OUTLET' in model_point_types:
        arcpy.SelectLayerByAttribute_management(model_points_lyr,
                                                'NEW_SELECTION',
                                                '"TYPE" = \'OUTLET\'')

        arcpy.SelectLayerByLocation_management(hru_polygon_lyr, 'INTERSECT',
                                               model_points_lyr)
        arcpy.CalculateField_management(hru_polygon_lyr, hru.outflow_field, 1,
                                        'PYTHON')

        # The point of all of this code is to determine the flow direction
        #   at the outlet points since it won't be computed.
        # It might be easier to compute fill and flow dir. on the full raster
        logging.info('  Computing OUTLET point flow direction')

        # Get HRU values at outlet points
        outlet_points = [(int(r[0]), int(r[1])) for r in arcpy.da.SearchCursor(
            hru_polygon_lyr, [hru.col_field, hru.row_field])]

        # Get elevations and type of neighboring cells
        # Multiplying the cellsize by 1.5 is needed to get all possible
        #   neighbors but it can return extra cells that will need to be skipped
        # It might be easier to use the Select tool directly
        arcpy.SelectLayerByLocation_management(hru_polygon_lyr,
                                               'WITHIN_A_DISTANCE',
                                               model_points_lyr, 1.5 * hru.cs)
        elev_dict = dict()
        hru_type_dict = dict()
        fields = [
            hru.col_field, hru.row_field, hru.dem_adj_field, hru.type_field
        ]
        for row in arcpy.da.SearchCursor(hru_polygon_lyr, fields):
            elev_dict[(int(row[0]), int(row[1]))] = float(row[2])
            hru_type_dict[(int(row[0]), int(row[1]))] = int(row[3])

        # For each outlet cell, cycle through flow directions and find ?.
        # Outlet cells should exit to an inactive cell or out of the grid.
        outlet_flowdir = {}
        for outlet_pt in outlet_points:
            logging.debug('    Outlet Point: {}'.format(outlet_pt))
            outlet_slopes = []
            # Search non-diagonals first.
            for fd in [1, 4, 16, 64, 2, 8, 32, 128]:
                if support.next_row_col(fd, outlet_pt) not in elev_dict.keys():
                    # Don't compute other slopes if next cell is outside the grid
                    outlet_slopes.append([-9999, fd])
                    break
                elif hru_type_dict[support.next_row_col(fd, outlet_pt)] != 0:
                    # Only compute slope to inactive cells
                    continue
                else:
                    # Compute slope to next cell
                    slope = (elev_dict[support.next_row_col(fd, outlet_pt)] -
                             elev_dict[outlet_pt])
                    if fd in [2, 8, 32, 128]:
                        # For diagonals, adjust slope
                        # I think Arc approximates root(2) to 1.5
                        slope /= 1.5
                    outlet_slopes.append([slope, fd])
                logging.debug('    {:>3d} {}'.format(fd, slope))

            if not outlet_slopes:
                logging.error('\nERROR: The OUTLET model point is not at the '
                              'edge of the study area or model grid.\n'
                              '  Col: {0} Rol: {1}'.format(*outlet_pt))
                sys.exit()

            # Assign the flow direction with the steepest (positive) slope
            outlet_slope, outlet_fd = min(outlet_slopes)
            outlet_flowdir[outlet_pt] = outlet_fd
            if outlet_slope > 0:
                logging.warning(
                    '\n  WARNING: The OUTLET model point flow direction may '
                    'be invalid')
            logging.debug('    Flow Direction: {}'.format(outlet_fd))

        logging.info('  Building OUTLET point raster')
        outlet_array = np.zeros((hru.rows, hru.cols)).astype(np.uint8)
        for outlet_pt in outlet_points:
            outlet_array[outlet_pt[1] - 1,
                         outlet_pt[0] - 1] = outlet_flowdir[outlet_pt]
        support.array_to_raster(
            outlet_array, outlet_path,
            arcpy.Point(hru.extent.XMin, hru.extent.YMin, 0), hru.cs,
            outlet_array)
        outlet_obj = arcpy.sa.Raster(outlet_path)

    if 'SWALE' in model_point_types:
        logging.info('  Building SWALE point raster')
        arcpy.SelectLayerByAttribute_management(model_points_lyr,
                                                'NEW_SELECTION',
                                                '"TYPE" = \'SWALE\'')

        # DEADBEEF - Should SWALE points be written to OUTFLOWHRU.TXT?
        arcpy.SelectLayerByLocation_management(hru_polygon_lyr, 'INTERSECT',
                                               model_points_lyr)
        arcpy.CalculateField_management(hru_polygon_lyr, hru.outflow_field, 1,
                                        'PYTHON')

        arcpy.PointToRaster_conversion(model_points_lyr,
                                       model_points_type_field, swale_path, "",
                                       "", hru.cs)
        swale_obj = arcpy.sa.Raster(swale_path)
        arcpy.SelectLayerByAttribute_management(model_points_lyr,
                                                'CLEAR_SELECTION')

    arcpy.Delete_management(hru_polygon_lyr)

    logging.info('\nCalculating flow direction')
    # This will force all active cells to flow to an outlet
    logging.debug('  Setting DEM_ADJ values to 20000 for inactivate cells')
    dem_mod_obj = arcpy.sa.Con(hru_type_obj > 0, dem_adj_obj, 20000.0)
    if 'OUTLET' in model_point_types:
        logging.debug('  Setting DEM_ADJ values to NoData for OUTLET cells')
        dem_mod_obj = arcpy.sa.Con(arcpy.sa.IsNull(outlet_obj), dem_mod_obj)
    if 'SWALE' in model_point_types:
        logging.debug('  Setting DEM_ADJ values to NoData for SWALE cells')
        dem_mod_obj = arcpy.sa.Con(arcpy.sa.IsNull(swale_obj), dem_mod_obj)

    logging.info('  Filling DEM_ADJ (8-way)')
    dem_fill_obj = arcpy.sa.Fill(dem_mod_obj)
    del dem_mod_obj

    if 'OUTLET' in model_point_types:
        logging.debug('  Resetting OUTLET cell values')
        dem_fill_obj = arcpy.sa.Con(arcpy.sa.IsNull(outlet_obj), dem_fill_obj,
                                    dem_adj_obj)

    logging.info('  Calculating sinks (8-way)')
    # Threshold of 0.001 is needed to avoid noise from 32/64 bit conversion
    dem_sink_obj = arcpy.sa.Con(hru_type_obj > 0, dem_fill_obj - dem_adj_obj)
    dem_sink_obj = arcpy.sa.Con(dem_sink_obj > 0.001, dem_sink_obj)

    logging.info('  Calculating flow direction')
    flow_dir_obj = arcpy.sa.FlowDirection(dem_fill_obj, False)

    logging.debug('  Setting flow direction to NoData for inactive cells')
    flow_dir_obj = arcpy.sa.SetNull(hru_type_obj == 0, flow_dir_obj)

    if 'OUTLET' in model_point_types:
        logging.debug('  Resetting OUTLET cell flow direction')
        flow_dir_obj = arcpy.sa.Con(~arcpy.sa.IsNull(outlet_obj), outlet_obj,
                                    flow_dir_obj)
        del outlet_obj
    if 'SWALE' in model_point_types:
        logging.debug('  Resetting SWALE cell flow direction')
        flow_dir_obj = arcpy.sa.Con(~arcpy.sa.IsNull(swale_obj), 1,
                                    flow_dir_obj)
        del swale_obj

    logging.debug('  Resetting DEM_ADJ values for inactive cell')
    dem_fill_obj = arcpy.sa.Con(hru_type_obj == 0, dem_adj_obj, dem_fill_obj)

    flow_dir_obj.save(flow_dir_path)
    dem_fill_obj.save(dem_fill_path)
    dem_sink_obj.save(dem_sink_path)
    del dem_sink_obj

    # Save flow direction as points
    if calc_flow_dir_points_flag:
        logging.info('\nFlow direction points')
        # ArcGIS fails for raster_to_x conversions on a network path
        # You have to go through an in_memory file first
        flow_dir_temp = os.path.join('in_memory', 'flow_dir')
        arcpy.RasterToPoint_conversion(flow_dir_path, flow_dir_temp)
        try:
            arcpy.CopyFeatures_management(flow_dir_temp, flow_dir_points)
        except:
            time.sleep(1)
            logging.warning('Copy feature failed')
        arcpy.Delete_management(flow_dir_temp)
        del flow_dir_temp

        # Reclassify flow directions to angles, assuming 1 is 0
        remap_cb = ('def Reclass(value):\n' + '    if value == 1: return 0\n' +
                    '    elif value == 2: return 45\n' +
                    '    elif value == 4: return 90\n' +
                    '    elif value == 8: return 135\n' +
                    '    elif value == 16: return 180\n' +
                    '    elif value == 32: return 225\n' +
                    '    elif value == 64: return 270\n' +
                    '    elif value == 128: return 315\n')
        arcpy.CalculateField_management(flow_dir_points, 'grid_code',
                                        'Reclass(!{}!)'.format('grid_code'),
                                        'PYTHON', remap_cb)

    # Write flow direction to hru_polygon
    logging.debug('  Extracting flow direction at points')
    vt_list = [[flow_dir_path, hru.flow_dir_field]]
    mem_point_path = os.path.join('in_memory', 'hru_point')
    arcpy.CopyFeatures_management(hru.point_path, mem_point_path)
    arcpy.sa.ExtractMultiValuesToPoints(mem_point_path, vt_list, 'NONE')
    logging.debug('  Reading flow direction values at point')
    data_dict = defaultdict(dict)
    fields = [hru.flow_dir_field, hru.fid_field]
    with arcpy.da.SearchCursor(mem_point_path, fields) as s_cursor:
        for row in s_cursor:
            # Set nodata cells to 0
            if row[0] is not None and row[1] is not None:
                data_dict[int(row[1])][hru.flow_dir_field] = int(row[0])
            del row
    logging.debug('  Writing flow direction values to polygon')
    fields = [hru.flow_dir_field, hru.fid_field]
    with arcpy.da.UpdateCursor(hru.polygon_path, fields) as u_cursor:
        for row in u_cursor:
            row_dict = data_dict.get(int(row[-1]), None)
            for i, field in enumerate(fields[:-1]):
                if row_dict:
                    row[i] = row_dict[field]
                else:
                    row[i] = 0
            u_cursor.updateRow(row)
            del row_dict, row

    # DEADBEEF - This whole section seems to only be needed if the outflows
    #   are not specified by the user.
    # # Subbasins
    # # Select the HRU cells that intersect the subbasin point cells
    # logging.debug('  Reading input subbasin points')
    # hru_polygon_lyr = 'hru_polygon_lyr'
    # arcpy.MakeFeatureLayer_management(hru.polygon_path, hru_polygon_lyr)
    # arcpy.SelectLayerByLocation_management(
    #     hru_polygon_lyr, 'intersect', model_points_path)
    # input_xy_dict = dict()
    # fields = [hru.col_field, hru.row_field, hru.x_field, hru.y_field]
    # for row in arcpy.da.SearchCursor(hru_polygon_lyr, fields):
    #     input_xy_dict[(int(row[0]), int(row[1]))] = (int(row[2]), int(row[3]))
    # arcpy.Delete_management(hru_polygon_lyr)
    # del hru_polygon_lyr
    # # for k,v in input_xy_dict.items():
    # #    logging.debug('    {} {}'.format(k,v))

    # logging.info('\nBuilding all subbasin points')
    # # First calculate downstream cell for all cells
    # logging.debug('  Calculating downstream cells')
    # out_cell_dict = dict()
    # hru_type_dict = dict()
    # cell_xy_dict = dict()
    # fields = [
    #     hru.type_field, hru.flow_dir_field, hru.id_field,
    #     hru.col_field, hru.row_field, hru.x_field, hru.y_field]
    # for row in arcpy.da.SearchCursor(hru.polygon_path, fields):
    #     cell = (int(row[3]), int(row[4]))
    #     out_cell_dict[cell] = support.next_row_col(int(row[1]), cell)
    #     hru_type_dict[cell] = int(row[0])
    #     cell_xy_dict[cell] = (int(row[5]), int(row[6]))

    # # Identify all active/lake cells that exit the model
    # #   or flow to an inactive cell
    # logging.debug('  Identifying active cells that exit the model')
    # out_cell_xy_list = []
    # for cell, cell_xy in sorted(cell_xy_dict.items()):
    #     #  DEADBEEF - This is finding exit cells that aren't already gauges
    #     # if cell in input_xy_dict.keys():
    #     #    continue
    #     # elif cell not in hru_type_dict.keys():
    #     if cell not in hru_type_dict.keys():
    #         continue
    #     elif hru_type_dict[cell] not in [1, 2]:
    #         continue
    #     elif cell not in out_cell_dict.keys():
    #         continue
    #     elif out_cell_dict[cell] not in hru_type_dict.keys():
    #         out_cell_xy_list.append(cell_xy)
    #     elif (out_cell_dict[cell] in hru_type_dict.keys() and
    #           hru_type_dict[out_cell_dict[cell]] not in [1, 2]):
    #         out_cell_xy_list.append(cell_xy)

    # # Outflow cells exit the model to inactive cells or out of the domain
    # # These cells will be used to set the OUTFLOW_HRU.DAT for CRT
    # #   in crt_fill_parameters and stream_parameters
    # logging.info('  Flag outflow cells')
    # fields = [hru.type_field, hru.x_field, hru.y_field, hru.outflow_field]
    # with arcpy.da.UpdateCursor(hru.polygon_path, fields) as u_cursor:
    #     for row in u_cursor:
    #         cell_xy = (row[1], row[2])
    #         # Inactive cells can't be outflow cells
    #         if int(row[0]) == 0:
    #             continue
    #         elif out_cell_xy_list and cell_xy in out_cell_xy_list:
    #             row[3] = 1
    #         else:
    #             row[3] = 0
    #         u_cursor.updateRow(row)
    # del out_cell_dict, hru_type_dict, cell_xy_dict

    # DEADBEEF - This was added for sinks or ocean so that there would be
    #   subbasin points along the edge?
    # fields = ['SHAPE@XY', model_points_zone_field]
    # with arcpy.da.InsertCursor(model_points_path, fields) as insert_c:
    #     for out_cell_xy in sorted(out_cell_xy_list):
    #         insert_c.insertRow([out_cell_xy, subbasin_input_count + 1])
    # del fields
    # del out_cell_xy_list

    # Flow Accumulation
    logging.info('\nCalculating initial flow accumulation')
    flow_acc_full_obj = arcpy.sa.FlowAccumulation(flow_dir_obj)
    logging.info('  Only keeping flow_acc >= {}'.format(flow_acc_threshold))
    flow_acc_full_obj = arcpy.sa.Con(flow_acc_full_obj >= flow_acc_threshold,
                                     flow_acc_full_obj)
    flow_acc_full_obj.save(flow_acc_full_path)

    # Flow accumulation and stream link with lakes
    logging.info('\nCalculating flow accumulation & stream link (w/ lakes)')
    flow_acc_obj = arcpy.sa.Con((hru_type_obj >= 1) & (hru_type_obj <= 3),
                                flow_acc_full_obj)
    stream_link_obj = arcpy.sa.StreamLink(flow_acc_obj, flow_dir_obj)
    stream_link_obj.save(stream_link_a_path)
    del flow_acc_obj, stream_link_obj

    # Flow accumulation and stream link without lakes
    logging.info('Calculating flow accumulation & stream link (w/o lakes)')
    flow_acc_obj = arcpy.sa.Con((hru_type_obj == 1) | (hru_type_obj == 3),
                                flow_acc_full_obj)
    # flow_acc_obj.save(flow_acc_sub_path)
    stream_link_obj = arcpy.sa.StreamLink(flow_acc_obj, flow_dir_obj)
    stream_link_obj.save(stream_link_b_path)
    del flow_acc_obj, stream_link_obj

    # Initial Stream Link
    # logging.info('\nCalculating initial stream link')
    # stream_link_obj = StreamLink(flow_acc_obj, flow_dir_obj)
    # stream_link_obj.save(stream_link_path)
    # Calculate stream link with and without lakes
    # Initial Stream Order (w/ lakes)
    logging.info('Calculating stream order (w/ lakes)')
    logging.debug('  Using SHREVE ordering so after 1st order are removed, ' +
                  '2nd order will only be dangles')
    stream_order_obj = arcpy.sa.StreamOrder(stream_link_a_path, flow_dir_obj,
                                            'SHREVE')
    stream_order_obj.save(stream_order_path)

    # Stream Length (cell count w/o lakes)
    logging.info('Calculating stream length (cell count w/o lakes)')
    stream_length_obj = arcpy.sa.Lookup(stream_link_b_path, 'Count')
    stream_length_obj.save(stream_length_path)

    # Filter 1st order segments
    logging.info(
        '\nFilter all 1st order streams with length < {}' +
        '\nKeep all higher order streams'.format(flow_length_threshold))
    # Stream length is nodata for lakes, so put lakes back in
    # This removes short 1st order streams off of lakes
    flow_mask_obj = ((hru_type_obj == 3) | (hru_type_obj == 2) |
                     (stream_order_obj >= 2) |
                     ((stream_order_obj == 1) &
                      (stream_length_obj >= flow_length_threshold)))
    flow_mask_obj.save(flow_mask_path)
    flow_acc_sub_obj = arcpy.sa.Con(flow_mask_obj, flow_acc_full_obj)
    flow_acc_sub_obj.save(flow_acc_sub_path)
    del flow_mask_obj, stream_order_obj, stream_length_obj

    # Final Stream Link
    logging.info('\nCalculating final stream link')
    stream_link_obj = arcpy.sa.StreamLink(flow_acc_sub_obj, flow_dir_obj)
    # Get count of streams for automatically setting lake_seg_offset
    if not lake_seg_offset:
        lake_seg_count = int(
            arcpy.GetCount_management(stream_link_obj).getOutput(0))
        n = 10**math.floor(math.log10(lake_seg_count))
        lake_seg_offset = int(math.ceil((lake_seg_count + 1) / n)) * int(n)
        logging.info('  lake_segment_offset was not set in the input file\n' +
                     '  Using automatic lake segment offset: {}'.format(
                         lake_seg_offset))
    elif set_lake_flag:
        logging.info(
            '  Using manual lake segment offset: {}'.format(lake_seg_offset))

    # Include lake cells into 'stream_link' before calculating watersheds
    # Watershed function doesn't work for negative values
    # Convert lakes to large positive numbers for Watershed
    # ISEG needs to be negative values though
    if set_lake_flag:
        logging.info(
            '  Including lakes as {0} + {1}\n'
            '  This will allow for a watershed/subbasin for the lakes\n'
            '  {2} will be save as negative of {0} though'.format(
                hru.lake_id_field, lake_seg_offset, hru.iseg_field))
        stream_link_obj = arcpy.sa.Con((hru_type_obj == 2),
                                       (lake_id_obj + lake_seg_offset),
                                       stream_link_obj)
    stream_link_obj.save(stream_link_path)

    # Watersheds
    logging.info('Calculating watersheds')
    watersheds_obj = arcpy.sa.Watershed(flow_dir_obj, stream_link_obj)
    watersheds_obj.save(watersheds_path)
    del stream_link_obj, watersheds_obj

    # Subbasins
    logging.info('Calculating subbasins')
    subbasin_obj = arcpy.sa.Watershed(flow_dir_obj, model_points_path,
                                      model_points_zone_field)
    subbasin_obj.save(subbasin_path)
    del subbasin_obj

    # Basins
    logging.info('Calculating basins')
    basin_obj = arcpy.sa.Basin(flow_dir_obj)
    basin_obj.save(basin_path)
    del basin_obj

    # Clear subbasin value if HRU_TYPE is 0
    logging.info('Clearing subbasin ID for inactive cells')
    subbasin_obj = arcpy.sa.SetNull(hru_type_obj,
                                    arcpy.sa.Raster(subbasin_path), 'VALUE=0')
    subbasin_obj.save(subbasin_path)
    del subbasin_obj
    del hru_type_obj

    # Stream polylines
    logging.info('Calculating stream polylines')
    # ArcGIS fails for raster_to_x conversions on a network path
    # You have to go through an in_memory file first
    streams_temp = os.path.join('in_memory', 'streams')
    arcpy.sa.StreamToFeature(stream_link_path, flow_dir_obj, streams_temp,
                             'NO_SIMPLIFY')
    arcpy.CopyFeatures_management(streams_temp, streams_path)
    arcpy.Delete_management(streams_temp)
    del streams_temp

    # Write values to hru_polygon
    logging.info('\nExtracting stream parameters')
    vt_list = [
        [watersheds_path, hru.irunbound_field],
        [stream_link_path, hru.iseg_field],
        # [flow_dir_path, hru.flow_dir_field],
        [subbasin_path, hru.subbasin_field],
        [hru_type_path, hru.type_field]
    ]
    mem_point_path = os.path.join('in_memory', 'hru_point')
    arcpy.CopyFeatures_management(hru.point_path, mem_point_path)
    arcpy.sa.ExtractMultiValuesToPoints(mem_point_path, vt_list, 'NONE')
    del vt_list

    # Read values from points
    logging.info('  Reading cell values')
    data_dict = defaultdict(dict)
    fields = [
        hru.irunbound_field, hru.iseg_field, hru.subbasin_field,
        hru.type_field, hru.fid_field
    ]
    # fields = [
    #    hru.irunbound_field, hru.iseg_field, hru.flow_dir_field,
    #    hru.subbasin_field, hru.type_field, hru.fid_field]
    with arcpy.da.SearchCursor(mem_point_path, fields) as s_cursor:
        for row in s_cursor:
            for i, field in enumerate(fields[:-1]):
                # Set nodata or inactive cells to 0
                if row[i] is None or (int(row[-2]) == 0):
                    data_dict[int(row[-1])][field] = 0
                else:
                    data_dict[int(row[-1])][field] = int(row[i])
            del row
    del fields

    # ISEG for lake cells must be -1 * LAKE_ID, not LAKE_ID + OFFSET
    for k in data_dict.keys():
        irunbound = data_dict[k][hru.irunbound_field]
        iseg = data_dict[k][hru.iseg_field]
        if irunbound > lake_seg_offset:
            data_dict[k][hru.irunbound_field] = lake_seg_offset - irunbound
        if iseg > lake_seg_offset:
            data_dict[k][hru.iseg_field] = lake_seg_offset - iseg

    # data_dict = dict([(k,v) for k,v in data_dict.items()])
    # Write values to polygon
    logging.info('  Writing values to polygons')
    fields = [
        hru.irunbound_field, hru.iseg_field, hru.subbasin_field,
        hru.type_field, hru.fid_field
    ]
    # fields = [
    #    hru.irunbound_field, hru.iseg_field, hru.flow_dir_field,
    #    hru.subbasin_field, hru.type_field, hru.fid_field]
    with arcpy.da.UpdateCursor(hru.polygon_path, fields) as u_cursor:
        for row in u_cursor:
            row_dict = data_dict.get(int(row[-1]), None)
            for i, field in enumerate(fields[:-1]):
                if row_dict:
                    row[i] = row_dict[field]
                else:
                    row[i] = 0
            u_cursor.updateRow(row)
            del row_dict, row
    del fields

    # Write sink values to hru_polygon
    vt_list = []
    if arcpy.Exists(dem_sink_path):
        vt_list.append([dem_sink_path, hru.dem_sink_field])
    if vt_list:
        logging.info('\nExtracting sink values')
        for vt_item in vt_list:
            logging.debug('  {}: {}'.format(vt_item[1],
                                            os.path.basename(vt_item[0])))
        mem_point_path = os.path.join('in_memory', 'hru_point')
        arcpy.CopyFeatures_management(hru.point_path, mem_point_path)
        arcpy.sa.ExtractMultiValuesToPoints(mem_point_path, vt_list, 'NONE')

        # Read sink values from points
        logging.info('  Reading sink values')
        data_dict = defaultdict(dict)
        fields = [field for path, field in vt_list] + [hru.fid_field]
        with arcpy.da.SearchCursor(mem_point_path, fields) as s_cursor:
            for row in s_cursor:
                for i, field in enumerate(fields[:-1]):
                    # Set nodata or inactive cells to 0
                    if row[i] is None:
                        data_dict[int(row[-1])][field] = 0
                    else:
                        data_dict[int(row[-1])][field] = float(row[i])
                del row

        # Write sink values to polygon
        logging.info('  Writing sink values to polygons')
        fields = [field for path, field in vt_list] + [hru.fid_field]
        with arcpy.da.UpdateCursor(hru.polygon_path, fields) as u_cursor:
            for row in u_cursor:
                row_dict = data_dict.get(int(row[-1]), None)
                for i, field in enumerate(fields[:-1]):
                    if row_dict:
                        row[i] = row_dict[field]
                    else:
                        row[i] = 0
                u_cursor.updateRow(row)
                del row_dict, row

    # Cleanup
    arcpy.Delete_management(mem_point_path)
    del mem_point_path, vt_list, data_dict, field

    # Re-Calculate HRU_ELEV
    # logging.info('Calculating HRU_ELEV from DEM_ADJ')
    # logging.info('  Converting from meters to feet')
    # arcpy.CalculateField_management(
    #    hru.polygon_path, hru_elev_field,
    #    # Convert meters to feet
    #    '!{}! * 3.28084'.format(dem_adj_field), 'PYTHON')

    # Cleanup
    del dem_fill_obj
    if set_lake_flag:
        del lake_id_obj
    del flow_dir_obj
    del flow_acc_full_obj
    del flow_acc_sub_obj
예제 #11
0
def stream_parameters(config_path):
    """Calculate GSFLOW Stream Parameters

    Parameters
    ----------
    config_path : str
        Project configuration file (.ini) path.

    Returns
    -------
    None

    """
    # Initialize hru_parameters class
    hru = support.HRUParameters(config_path)

    # Open input parameter config file
    inputs_cfg = ConfigParser.ConfigParser()
    try:
        inputs_cfg.readfp(open(config_path))
    except Exception as e:
        logging.error(
            '\nERROR: Config file could not be read, '
            'is not an input file, or does not exist\n'
            '  config_file = {}\n'
            '  Exception: {}\n'.format(config_path, e))
        sys.exit()

    # Log DEBUG to file
    log_file_name = 'stream_parameters_log.txt'
    log_console = logging.FileHandler(
        filename=os.path.join(hru.log_ws, log_file_name), mode='w')
    log_console.setLevel(logging.DEBUG)
    log_console.setFormatter(logging.Formatter('%(message)s'))
    logging.getLogger('').addHandler(log_console)
    logging.info('\nGSFLOW Stream Parameters')

    # CRT Parameters
    try:
        crt_hruflg = inputs_cfg.getint('INPUTS', 'crt_hruflg')
    except ConfigParser.NoOptionError:
        crt_hruflg = 0
        logging.info(
            '  Missing INI parameter, setting {} = {}'.format(
                'crt_hruflg', crt_hruflg))
    try:
        crt_flowflg = inputs_cfg.getint('INPUTS', 'crt_flowflg')
    except ConfigParser.NoOptionError:
        crt_flowflg = 1
        logging.info(
            '  Missing INI parameter, setting {} = {}'.format(
                'crt_flowflg', crt_flowflg))
    try:
        crt_dpit = inputs_cfg.getfloat('INPUTS', 'crt_dpit')
    except ConfigParser.NoOptionError:
        crt_dpit = 0.01
        logging.info(
            '  Missing INI parameter, setting {} = {}'.format(
                'crt_dpit', crt_dpit))
    try:
        crt_outitmax = inputs_cfg.getint('INPUTS', 'crt_outitmax')
    except ConfigParser.NoOptionError:
        crt_outitmax = 100000
        logging.info(
            '  Missing INI parameter, setting {} = {}'.format(
                'crt_outitmax', crt_outitmax))
    # Intentionally not allowing user to change this value
    crt_iprn = 1

    # CRT streams/cascade parameters
    crt_ws = os.path.join(hru.param_ws, 'cascade_work')
    crt_strmflg = 1
    crt_visflg = 1
    crt_ifill = 1

    # CRT groundwater cascades
    gw_ws = os.path.join(hru.param_ws, 'cascade_gw_work')
    gw_strmflg = 1
    gw_visflg = 1
    gw_ifill = 1

    # CRT Executable
    crt_exe_path = inputs_cfg.get('INPUTS', 'crt_exe_path')
    output_name = 'outputstat.txt'

    # Override ascii and rasters flags to generate CRT inputs
    output_ascii_flag = True
    output_rasters_flag = True

    # Parameters
    exit_seg = 0

    # Check input paths
    if not arcpy.Exists(hru.polygon_path):
        logging.error(
            '\nERROR: Fishnet ({}) does not exist\n'.format(
                hru.polygon_path))
        sys.exit()
    # Streams shapefile from dem_2_streams is needed to get the length
    flow_temp_ws = os.path.join(hru.param_ws, 'flow_rasters')
    if not os.path.isdir(flow_temp_ws):
        logging.error(
             '\nERROR: Flow_rasters folder does not exist'
             '\nERROR:   {}'
             '\nERROR: Try re-running dem_2_streams.py\n'.format(
                 flow_temp_ws))
        sys.exit()
    streams_path = os.path.join(flow_temp_ws, 'streams.shp')
    if not os.path.isfile(streams_path):
        logging.error(
             '\nERROR: Stream shapefiles does not exist'
             '\nERROR:   {}'
             '\nERROR: Try re-running dem_2_streams.py\n'.format(
                 streams_path))
        sys.exit()
    # Check that input fields exist and have data
    # Fields generated by hru_parameters
    for f in [hru.type_field, hru.row_field, hru.col_field]:
        if not arcpy.ListFields(hru.polygon_path, f):
            logging.error(
                '\nERROR: Input field {} is not present in fishnet'
                '\nERROR: Try re-running hru_parameters.py\n'.format(f))
            sys.exit()
        elif support.field_stat_func(hru.polygon_path, f, 'MAXIMUM') == 0:
            logging.error(
                '\nERROR: Input field {} contains only 0'
                '\nERROR: Try re-running hru_parameters.py\n'.format(f))
            sys.exit()
    # Fields generated by dem_2_streams
    for f in [hru.irunbound_field, hru.iseg_field, hru.flow_dir_field,
              hru.outflow_field, hru.subbasin_field]:
        if not arcpy.ListFields(hru.polygon_path, f):
            logging.error(
                '\nERROR: Input field {} is not present in fishnet'
                '\nERROR: Try re-running dem_2_streams.py\n'.format(f))
            sys.exit()
        elif support.field_stat_func(hru.polygon_path, f, 'MAXIMUM') == 0:
            logging.error(
                '\nERROR: Input field {} contains only 0'
                '\nERROR: Try re-running dem_2_streams.py\n'.format(f))
            sys.exit()

    # Build output folder if necessary
    stream_temp_ws = os.path.join(hru.param_ws, 'stream_rasters')
    if not os.path.isdir(stream_temp_ws):
        os.mkdir(stream_temp_ws)
    if not os.path.isdir(crt_ws):
        os.mkdir(crt_ws)
    if not os.path.isdir(gw_ws):
        os.mkdir(gw_ws)

    # Copy CRT executable if necessary
    crt_exe_name = os.path.basename(crt_exe_path)
    if not os.path.isfile(os.path.join(crt_ws, crt_exe_name)):
        shutil.copy(crt_exe_path, crt_ws)
    if not os.path.isfile(os.path.join(gw_ws, crt_exe_name)):
        shutil.copy(crt_exe_path, gw_ws)
    if not os.path.isfile(os.path.join(crt_ws, crt_exe_name)):
        logging.error(
            '\nERROR: CRT executable ({}) does not exist\n'.format(
                os.path.join(crt_ws, crt_exe_name)))
        sys.exit()

    # Cascades files
    crt_hru_casc_path = os.path.join(crt_ws, 'HRU_CASC.DAT')
    crt_outflow_hru_path = os.path.join(crt_ws, 'OUTFLOW_HRU.DAT')
    crt_land_elev_path = os.path.join(crt_ws, 'LAND_ELEV.DAT')
    crt_stream_cells_path = os.path.join(crt_ws, 'STREAM_CELLS.DAT')
    crt_xy_path = os.path.join(crt_ws, 'XY.DAT')

    # Groundwater cascades files
    gw_hru_casc_path = os.path.join(gw_ws, 'HRU_CASC.DAT')
    gw_outflow_hru_path = os.path.join(gw_ws, 'OUTFLOW_HRU.DAT')
    gw_land_elev_path = os.path.join(gw_ws, 'LAND_ELEV.DAT')
    gw_stream_cells_path = os.path.join(gw_ws, 'STREAM_CELLS.DAT')
    gw_xy_path = os.path.join(gw_ws, 'XY.DAT')

    # Output names
    dem_adj_raster_name = 'dem_adj'
    hru_type_raster_name = 'hru_type'
    iseg_raster_name = 'iseg'
    irunbound_raster_name = 'irunbound'
    subbasin_raster_name = 'sub_basins'
    segbasin_raster_name = 'seg_basins'

    # Output raster paths
    dem_adj_raster = os.path.join(stream_temp_ws, dem_adj_raster_name + '.img')
    hru_type_raster = os.path.join(stream_temp_ws, hru_type_raster_name + '.img')
    iseg_raster = os.path.join(stream_temp_ws, iseg_raster_name + '.img')
    irunbound_raster = os.path.join(stream_temp_ws, irunbound_raster_name + '.img')
    subbasin_raster = os.path.join(stream_temp_ws, subbasin_raster_name + '.img')
    segbasin_raster = os.path.join(stream_temp_ws, segbasin_raster_name + '.img')

    # Output ascii paths
    a_fmt = '{}_ascii.txt'
    dem_adj_ascii = os.path.join(stream_temp_ws, a_fmt.format(dem_adj_raster_name))
    hru_type_ascii = os.path.join(stream_temp_ws, a_fmt.format(hru_type_raster_name))
    iseg_ascii = os.path.join(stream_temp_ws, a_fmt.format(iseg_raster_name))
    irunbound_ascii = os.path.join(stream_temp_ws, a_fmt.format(irunbound_raster_name))
    subbasin_ascii = os.path.join(stream_temp_ws, a_fmt.format(subbasin_raster_name))
    segbasin_ascii = os.path.join(stream_temp_ws, a_fmt.format(segbasin_raster_name))

    # Layers
    hru_polygon_lyr = 'hru_polygon_lyr'

    # Set ArcGIS environment variables
    arcpy.CheckOutExtension('Spatial')
    env.overwriteOutput = True
    # env.pyramid = 'PYRAMIDS -1'
    env.pyramid = 'PYRAMIDS 0'
    env.workspace = stream_temp_ws
    env.scratchWorkspace = hru.scratch_ws

    # Add fields if necessary
    logging.info('\nAdding fields if necessary')
    support.add_field_func(hru.polygon_path, hru.iseg_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.irunbound_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.flow_dir_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.krch_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.irch_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.jrch_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.iseg_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.reach_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.rchlen_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.maxreach_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.outseg_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.iupseg_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.subbasin_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.segbasin_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.strm_top_field, 'FLOAT')
    support.add_field_func(hru.polygon_path, hru.strm_slope_field, 'FLOAT')

    # Calculate KRCH, IRCH, JRCH for stream segments
    logging.info('\nKRCH, IRCH, & JRCH for streams')
    fields = [
        hru.type_field, hru.iseg_field, hru.row_field, hru.col_field,
        hru.krch_field, hru.irch_field, hru.jrch_field]
    with arcpy.da.UpdateCursor(hru.polygon_path, fields) as update_c:
        for row in update_c:
            if (int(row[0]) in [1, 3] and int(row[1]) > 0):
                row[4], row[5], row[6] = 1, int(row[2]), int(row[3])
            else:
                row[4], row[5], row[6] = 0, 0, 0
            update_c.updateRow(row)

    # Get stream length for each cell
    logging.info('Stream length')
    arcpy.MakeFeatureLayer_management(hru.polygon_path, hru_polygon_lyr)
    arcpy.SelectLayerByAttribute_management(
        hru_polygon_lyr, 'NEW_SELECTION',
        ' \"{}\" = 1 And "{}" <> 0'.format(hru.type_field, hru.iseg_field))
    length_path = os.path.join('in_memory', 'length')
    arcpy.Intersect_analysis(
        [hru_polygon_lyr, streams_path],
        length_path, 'ALL', '', 'LINE')
    arcpy.Delete_management(hru_polygon_lyr)
    length_field = 'LENGTH'
    arcpy.AddField_management(length_path, length_field, 'LONG')
    arcpy.CalculateField_management(
        length_path, length_field, '!shape.length@meters!', 'PYTHON')
    length_dict = defaultdict(int)
    # DEADBEEF - This probably needs a maximum limit
    for row in arcpy.da.SearchCursor(
            length_path, [hru.id_field, length_field]):
        length_dict[int(row[0])] += int(row[1])
    fields = [hru.type_field, hru.iseg_field, hru.rchlen_field, hru.id_field]
    with arcpy.da.UpdateCursor(hru.polygon_path, fields) as update_c:
        for row in update_c:
            if (int(row[0]) == 1 and int(row[1]) != 0):
                row[2] = length_dict[int(row[3])]
            else:
                row[2] = 0
            update_c.updateRow(row)
    del length_dict, length_field, fields, hru_polygon_lyr

    # Get list of segments and downstream cell for each stream/lake cell
    # Downstream is calculated from flow direction
    # Use IRUNBOUND instead of ISEG, since ISEG will be zeroed for lakes
    # DEADBEEF - I don't think ISEG will be zero for lakes anymore
    logging.info('Cell out-flow dictionary')
    cell_dict = dict()
    fields = [
        hru.type_field, hru.krch_field, hru.lake_id_field, hru.iseg_field,
        hru.irunbound_field, hru.dem_adj_field, hru.flow_dir_field,
        hru.col_field, hru.row_field, hru.id_field]
    for row in arcpy.da.SearchCursor(hru.polygon_path, fields):
        # Skip inactive cells
        if int(row[0]) == 0:
            continue
        # Skip if not lake and not stream
        elif (int(row[1]) == 0 and int(row[2]) == 0):
            continue

        # ROW / COL
        cell = (int(row[7]), int(row[8]))

        # Read in parameters
        # HRU_ID, ISEG, support.next_row_col(FLOW_DIR, CELL), DEM_ADJ, X, X, X
        cell_dict[cell] = [
            int(row[9]), int(row[4]), support.next_row_col(int(row[6]), cell),
            float(row[5]), 0, 0, 0]

    # Build list of unique segments
    iseg_list = sorted(list(set([v[1] for v in cell_dict.values()])))

    # Calculate IREACH and OUTSEG
    logging.info('Calculate {} and {}'.format(
        hru.reach_field, hru.outseg_field))
    outseg_dict = dict()
    for iseg in sorted(iseg_list):
        logging.debug('    Segment: {}'.format(iseg))

        # Subset of cell_dict for current iseg
        iseg_dict = dict([(k, v) for k, v in cell_dict.items() if v[1] == iseg])

        # List of all cells in current iseg
        iseg_cells = iseg_dict.keys()

        # List of out_cells for all cells in current iseg
        out_cells = [value[2] for value in iseg_dict.values()]

        # Every iseg will (should?) have one out_cell
        out_cell = list(set(out_cells) - set(iseg_cells))

        # Process streams and lakes separately
        # Streams
        if iseg > 0:
            # If there is more than one out_cell
            #   there is a problem with the stream network
            if len(out_cell) != 1:
                logging.error(
                    '\nERROR: ISEG {} has more than one out put cell'
                    '\n  Out cells: {}'
                    '\n  Check for streams exiting then re-entering a lake'
                    '\n  Lake cell elevations may not be constant\n'.format(
                        iseg, out_cell))
                sys.exit()

            # If not output cell, assume edge of domain
            try:
                outseg = cell_dict[out_cell[0]][1]
            except KeyError:
                outseg = exit_seg

            # Track sub-basin outseg
            outseg_dict[iseg] = outseg

            # Calculate reach number for each cell
            reach_dict = dict()
            start_cell = list(set(iseg_cells) - set(out_cells))[0]
            for i in range(len(out_cells)):
                # logging.debug('    Reach: {}  Cell: {}'.format(i+1, start_cell))
                reach_dict[start_cell] = i + 1
                start_cell = iseg_dict[start_cell][2]

            # For each cell in iseg, save outseg, reach, & maxreach
            for iseg_cell in iseg_cells:
                cell_dict[iseg_cell][4:] = [
                    outseg, reach_dict[iseg_cell], len(iseg_cells)]
            del reach_dict, start_cell, outseg
        # Lakes
        else:
            # For lake cells, there can be multiple outlets if all of them
            #   are to inactive cells or out of the model
            # Otherwise, like streams, there should only be one outcell per iseg
            logging.debug('  Length: {}'.format(len(out_cells)))
            if len(out_cell) == 1:
                try:
                    outseg = cell_dict[out_cell[0]][1]
                except KeyError:
                    outseg = exit_seg
            elif (len(out_cell) != 1 and
                  all(x[0] not in cell_dict.keys() for x in out_cell)):
                outseg = exit_seg
                logging.debug(
                    '  All out cells are inactive, setting outseg '
                    'to exit_seg {}'.format(exit_seg))
            else:
                logging.error(
                    '\nERROR: ISEG {} has more than one out put cell'
                    '\n  Out cells: {}'
                    '\n  Check for streams exiting then re-entering a lake'
                    '\n  Lake cell elevations may not be constant\n'.format(
                         iseg, out_cell))
                raw_input('ENTER')

            # Track sub-basin outseg
            outseg_dict[iseg] = outseg

            # For each lake segment cell, only save outseg
            # All lake cells are routed directly to the outseg
            for iseg_cell in iseg_cells:
                cell_dict[iseg_cell][4:] = [outseg, 0, 0]
            del outseg

        del iseg_dict, iseg_cells, iseg
        del out_cells, out_cell

    # Calculate stream elevation
    logging.info('Stream elevation (DEM_ADJ - 1 for now)')
    fields = [
        hru.type_field, hru.iseg_field, hru.dem_adj_field,
        hru.strm_top_field]
    with arcpy.da.UpdateCursor(hru.polygon_path, fields) as update_c:
        for row in update_c:
            if int(row[0]) == 1 and int(row[1]) != 0:
                row[3] = float(row[2]) - 1
            else:
                row[3] = 0
            update_c.updateRow(row)

    # Saving ireach and outseg
    logging.info('Save IREACH and OUTSEG')
    fields = [
        hru.type_field, hru.iseg_field, hru.col_field, hru.row_field,
        hru.outseg_field, hru.reach_field, hru.maxreach_field]
    with arcpy.da.UpdateCursor(hru.polygon_path, fields) as update_c:
        for row in update_c:
            # if (int(row[0]) > 0 and int(row[1]) > 0):
            # DEADBEEF - I'm not sure why only iseg > 0 in above line
            # DEADBEEF - This should set outseg for streams and lakes
            if (int(row[0]) > 0 and int(row[1]) != 0):
                row[4:] = cell_dict[(int(row[2]), int(row[3]))][4:]
            else:
                row[4:] = [0, 0, 0]
            update_c.updateRow(row)

    # Calculate IUPSEG for all segments flowing out of lakes
    logging.info('IUPSEG for streams flowing out of lakes')
    upseg_dict = dict(
        [(v, k) for k, v in outseg_dict.iteritems() if k < 0])
    fields = [hru.type_field, hru.iseg_field, hru.iupseg_field]
    with arcpy.da.UpdateCursor(hru.polygon_path, fields) as update_c:
        for row in update_c:
            if (int(row[0]) == 1 and int(row[1]) != 0 and
                    int(row[1]) in upseg_dict.keys()):
                row[2] = upseg_dict[int(row[1])]
            else:
                row[2] = 0
            update_c.updateRow(row)

    # Build dictionary of which segments flow into each segment
    # Used to calculate seg-basins (sub watersheds) for major streams
    # Also save list of all segments that pour to exit
    logging.info('Segment in/out-flow dictionary')
    inseg_dict = defaultdict(list)
    pourseg_dict = dict()
    pourseg_list = []
    for key, value in outseg_dict.iteritems():
        if key == exit_seg:
            continue
            # inseg_dict[key].append(key)
        elif value == exit_seg:
            pourseg_list.append(key)
            inseg_dict[key].append(key)
        else:
            inseg_dict[value].append(key)

    # Update pourseg for each segment, working up from initial pourseg
    # Pourseg is the final exit segment for each upstream segment
    for pourseg in pourseg_list:
        testseg_list = inseg_dict[pourseg]
        while testseg_list:
            testseg = testseg_list.pop()
            pourseg_dict[testseg] = pourseg
            if pourseg == testseg:
                continue
            testseg_list.extend(inseg_dict[testseg])
        del testseg_list

    # Calculate SEG_BASIN for all active cells
    # SEG_BASIN corresponds to the ISEG of the lowest segment
    logging.info('SEG_BASIN')
    fields = [hru.type_field, hru.irunbound_field, hru.segbasin_field]
    with arcpy.da.UpdateCursor(hru.polygon_path, fields) as update_c:
        for row in update_c:
            if int(row[0]) > 0 and int(row[1]) != 0:
                row[2] = pourseg_dict[int(row[1])]
            else:
                row[2] = 0
            update_c.updateRow(row)

    # # Set all swale cells back to hru_type 2 (lake)
    # logging.info('Swale HRU_TYPE')
    # with arcpy.da.UpdateCursor(hru.polygon_path, [hru.type_field]) as update_c:
    #     for row in update_c:
    #         if int(row[0]) == 3:
    #             row[0] = 2
    #             update_c.updateRow(row)

    # Set all lake iseg to 0
    logging.info('Lake ISEG')
    fields = [hru.type_field, hru.iseg_field]
    with arcpy.da.UpdateCursor(hru.polygon_path, fields) as update_c:
        for row in update_c:
            if int(row[0]) != 2:
                continue
            iseg = int(row[1])
            if iseg < 0:
                row[1] = 0
                update_c.updateRow(row)

    # Set environment parameters
    env.extent = hru.extent
    env.cellsize = hru.cs
    env.outputCoordinateSystem = hru.sr

    # Build rasters
    if output_rasters_flag:
        logging.info('\nOutput model grid rasters')
        arcpy.PolygonToRaster_conversion(
            hru.polygon_path, hru.type_field, hru_type_raster,
            'CELL_CENTER', '', hru.cs)
        arcpy.PolygonToRaster_conversion(
            hru.polygon_path, hru.dem_adj_field, dem_adj_raster,
            'CELL_CENTER', '', hru.cs)
        arcpy.PolygonToRaster_conversion(
            hru.polygon_path, hru.iseg_field, iseg_raster,
            'CELL_CENTER', '', hru.cs)
        arcpy.PolygonToRaster_conversion(
            hru.polygon_path, hru.irunbound_field, irunbound_raster,
            'CELL_CENTER', '', hru.cs)
        arcpy.PolygonToRaster_conversion(
            hru.polygon_path, hru.segbasin_field, segbasin_raster,
            'CELL_CENTER', '', hru.cs)
        arcpy.PolygonToRaster_conversion(
            hru.polygon_path, hru.subbasin_field, subbasin_raster,
            'CELL_CENTER', '', hru.cs)

    # Build rasters
    if output_ascii_flag:
        logging.info('Output model grid ascii')
        arcpy.RasterToASCII_conversion(hru_type_raster, hru_type_ascii)
        arcpy.RasterToASCII_conversion(dem_adj_raster, dem_adj_ascii)
        arcpy.RasterToASCII_conversion(iseg_raster, iseg_ascii)
        arcpy.RasterToASCII_conversion(irunbound_raster, irunbound_ascii)
        arcpy.RasterToASCII_conversion(segbasin_raster, segbasin_ascii)
        arcpy.RasterToASCII_conversion(subbasin_raster, subbasin_ascii)
        sleep(5)

    logging.debug('\nRemoving existing CRT fill files')
    if os.path.isfile(crt_hru_casc_path):
        os.remove(crt_hru_casc_path)
    if os.path.isfile(crt_outflow_hru_path):
        os.remove(crt_outflow_hru_path)
    if os.path.isfile(crt_land_elev_path):
        os.remove(crt_land_elev_path)
    if os.path.isfile(crt_stream_cells_path):
        os.remove(crt_stream_cells_path)
    if os.path.isfile(crt_xy_path):
        os.remove(crt_xy_path)

    # Input parameters files for Cascade Routing Tool (CRT)
    logging.info('\nBuilding output CRT files')

    # Generate STREAM_CELLS.DAT file for CRT
    # Include non-lake SWALES in streams file
    logging.info('  {}'.format(
        os.path.basename(crt_stream_cells_path)))
    stream_cells_list = []
    fields = [
        hru.type_field, hru.iseg_field, hru.reach_field,
        hru.col_field, hru.row_field]
    for row in arcpy.da.SearchCursor(hru.polygon_path, fields):
        if int(row[0]) in [1, 3] and int(row[1]) > 0:
            stream_cells_list.append(
                [int(row[4]), int(row[3]), int(row[1]), int(row[2]), 1])
    if stream_cells_list:
        with open(crt_stream_cells_path, 'w+') as f:
            f.write('{}    NREACH\n'.format(len(stream_cells_list)))
            for stream_cells_l in sorted(stream_cells_list):
                f.write(' '.join(map(str, stream_cells_l)) + '\n')
        f.close
    del stream_cells_list

    # Generate OUTFLOW_HRU.DAT for CRT
    # Outflow cells exit the model to inactive cells or out of the domain
    #   Outflow field is set in dem_2_streams
    logging.info('  {}'.format(
        os.path.basename(crt_outflow_hru_path)))
    outflow_hru_list = []
    fields = [
        hru.type_field, hru.outflow_field, hru.subbasin_field,
        hru.row_field, hru.col_field]
    for row in arcpy.da.SearchCursor(hru.polygon_path, fields):
        if int(row[0]) != 0 and int(row[1]) == 1:
            outflow_hru_list.append([int(row[3]), int(row[4])])
    if outflow_hru_list:
        with open(crt_outflow_hru_path, 'w+') as f:
            f.write('{}    NUMOUTFLOWHRU\n'.format(
                len(outflow_hru_list)))
            for i, outflow_hru in enumerate(outflow_hru_list):
                f.write('{} {} {}   OUTFLOW_ID ROW COL\n'.format(
                    i + 1, outflow_hru[0], outflow_hru[1]))
        f.close()
    else:
        logging.error('\nERROR: No OUTFLOWHRU points, exiting')
        sys.exit()
    del outflow_hru_list

    #  Generate OUTFLOW_HRU.DAT for CRT
    # logging.info('  {}'.format(
    #    os.path.basename(crt_outflow_hru_path)))
    # outflow_hru_list = []
    # fields = [
    #    hru.type_field, hru.iseg_field, hru.outseg_field, hru.reach_field,
    #    hru.maxreach_field, hru.col_field, hru.row_field]
    # for row in arcpy.da.SearchCursor(hru.polygon_path, fields):
    #    if int(row[0]) != 1 or int(row[1]) == 0: continue
    #    if int(row[2]) == 0 and int(row[3]) == int(row[4]):
    #        outflow_hru_list.append([int(row[6]), int(row[5])])
    # if outflow_hru_list:
    #    with open(crt_outflow_hru_path, 'w+') as f:
    #        f.write('{}    NUMOUTFLOWHRU\n'.format(
    #            len(outflow_hru_list)))
    #        for i, outflow_hru in enumerate(outflow_hru_list):
    #            f.write('{} {} {}   OUTFLOW_ID ROW COL\n'.format(
    #                i+1, outflow_hru[0], outflow_hru[1]))
    #    f.close()
    # del outflow_hru_list

    # Generate HRU_CASC.DAT for CRT
    logging.info('  {}'.format(os.path.basename(crt_hru_casc_path)))
    with open(hru_type_ascii, 'r') as f:
        ascii_data = f.readlines()
    f.close()
    hru_casc_header = (
        '{} {} {} {} {} {} {} {}     '
        'HRUFLG STRMFLG FLOWFLG VISFLG IPRN IFILL DPIT OUTITMAX\n').format(
            crt_hruflg, crt_strmflg, crt_flowflg, crt_visflg,
            crt_iprn, crt_ifill, crt_dpit, crt_outitmax)
    with open(crt_hru_casc_path, 'w+') as f:
        f.write(hru_casc_header)
        for ascii_line in ascii_data[6:]:
            f.write(ascii_line)
    f.close()
    del hru_casc_header, ascii_data

    # Generate LAND_ELEV.DAT for CRT
    logging.info('  {}'.format(os.path.basename(crt_land_elev_path)))
    with open(dem_adj_ascii, 'r') as f:
        ascii_data = f.readlines()
    f.close()
    with open(crt_land_elev_path, 'w+') as f:
        f.write('{} {}       NROW NCOL\n'.format(
            ascii_data[1].split()[1], ascii_data[0].split()[1]))
        for ascii_line in ascii_data[6:]:
            f.write(ascii_line)
    f.close()
    del ascii_data

    # Generate XY.DAT for CRT
    logging.info('  {}'.format(os.path.basename(crt_xy_path)))
    xy_list = [
        map(int, row)
        for row in sorted(arcpy.da.SearchCursor(
            hru.polygon_path, [hru.id_field, hru.x_field, hru.y_field]))]
    with open(crt_xy_path, 'w+') as f:
        for line in sorted(xy_list):
            f.write(' '.join(map(str, line)) + '\n')
    f.close()

    # Run CRT
    logging.info('\nRunning CRT')
    subprocess.check_output(crt_exe_name, cwd=crt_ws, shell=True)

    # Read in outputstat.txt to check for errors
    logging.info('\nReading CRT {}'.format(output_name))
    output_path = os.path.join(crt_ws, output_name)
    with open(output_path, 'r') as f:
        output_data = [l.strip() for l in f.readlines()]
    f.close()

    # Check if there are errors
    if 'CRT FOUND UNDECLARED SWALE HRUS' in output_data:
        logging.error(
            '\nERROR: CRT found undeclared swale HRUs (sinks)\n'
            '  All sinks must be filled before generating cascades\n'
            '  Check the CRT outputstat.txt file\n')
        sys.exit()
    elif 'CRT EXECUTION COMPLETE' not in output_data:
        logging.error('\nERROR: CRT did not successfully complete\n')
        sys.exit()




    # Rerun CRT without lakes to build groundwater cascades
    # This is only needed if there are lakes in the model
    # For now the input files are being coped from the cascade_work folder
    # (except HRU_CASC.DAT)
    logging.debug('\nRemoving existing CRT fill files')
    if os.path.isfile(gw_hru_casc_path):
        os.remove(gw_hru_casc_path)
    if os.path.isfile(gw_outflow_hru_path):
        os.remove(gw_outflow_hru_path)
    if os.path.isfile(gw_land_elev_path):
        os.remove(gw_land_elev_path)
    if os.path.isfile(gw_stream_cells_path):
        os.remove(gw_stream_cells_path)
    if os.path.isfile(gw_xy_path):
        os.remove(gw_xy_path)

    logging.info('\nCopying cascade CRT files (except HRU_CASC.DAT)')
    shutil.copy(crt_outflow_hru_path, gw_outflow_hru_path)
    shutil.copy(crt_land_elev_path, gw_land_elev_path)
    shutil.copy(crt_stream_cells_path, gw_stream_cells_path)
    shutil.copy(crt_xy_path, gw_xy_path)

    # Input parameters files for Cascade Routing Tool (CRT)
    logging.info('\nBuilding groundwater cascade CRT files')

    # Generate HRU_CASC.DAT for CRT
    logging.info('  {}'.format(os.path.basename(gw_hru_casc_path)))
    with open(hru_type_ascii, 'r') as f:
        ascii_data = f.readlines()
    f.close()
    hru_casc_header = (
        '{} {} {} {} {} {} {} {}     '
        'HRUFLG STRMFLG FLOWFLG VISFLG IPRN IFILL DPIT OUTITMAX\n').format(
            crt_hruflg, crt_strmflg, crt_flowflg, crt_visflg,
            crt_iprn, crt_ifill, crt_dpit, crt_outitmax)
    with open(gw_hru_casc_path, 'w+') as f:
        f.write(hru_casc_header)
        for ascii_line in ascii_data[6:]:
            # Convert all lakes to active
            # Should swales (type 3) be converted also?
            f.write(ascii_line.replace('2', '1'))
    f.close()
    del hru_casc_header, ascii_data

    # Run CRT
    logging.info('\nRunning CRT for groundwater cascades')
    subprocess.check_output(crt_exe_name, cwd=gw_ws, shell=True)

    # Read in outputstat.txt to check for errors
    logging.info('\nReading CRT {}'.format(output_name))
    output_path = os.path.join(gw_ws, output_name)
    with open(output_path, 'r') as f:
        output_data = [l.strip() for l in f.readlines()]
    f.close()

    # Check if there are errors
    if 'CRT FOUND UNDECLARED SWALE HRUS' in output_data:
        logging.error(
            '\nERROR: CRT found undeclared swale HRUs (sinks)\n'
            '  All sinks must be filled before generating cascades\n'
            '  Check the CRT outputstat.txt file\n')
        sys.exit()
    elif 'CRT EXECUTION COMPLETE' not in output_data:
        logging.error('\nERROR: CRT did not successfully complete\n')
        sys.exit()
def temp_adjust_parameters(config_path):
    """Calculate GSFLOW Temperature Adjustment Parameters

    Parameters
    ----------
    config_path : str
        Project configuration file (.ini) path.

    Returns
    -------
    None

    """
    # Hardcoded HRU field formats for now
    tmax_field_fmt = 'TMAX_{:02d}'
    tmin_field_fmt = 'TMIN_{:02d}'
    tmax_adj_field_fmt = 'TMX_ADJ_{:02d}'
    tmin_adj_field_fmt = 'TMN_ADJ_{:02d}'

    # Initialize hru_parameters class
    hru = support.HRUParameters(config_path)

    # Open input parameter config file
    inputs_cfg = ConfigParser.ConfigParser()
    try:
        inputs_cfg.readfp(open(config_path))
    except Exception as e:
        logging.error('\nERROR: Config file could not be read, '
                      'is not an input file, or does not exist\n'
                      '  config_file = {}\n'
                      '  Exception: {}\n'.format(config_path, e))
        sys.exit()

    # Log DEBUG to file
    log_file_name = 'temp_adjust_parameters_log.txt'
    log_console = logging.FileHandler(filename=os.path.join(
        hru.log_ws, log_file_name),
                                      mode='w')
    log_console.setLevel(logging.DEBUG)
    log_console.setFormatter(logging.Formatter('%(message)s'))
    logging.getLogger('').addHandler(log_console)
    logging.info('\nGSFLOW Temperature Adjustment Parameters')

    # Units
    temp_obs_units = support.get_param('temp_obs_units', 'C',
                                       inputs_cfg).upper()
    temp_units_list = ['C', 'F', 'K']
    # Compare against the upper case of the values in the list
    #   but don't modify the acceptable units list
    if temp_obs_units not in temp_units_list:
        logging.error('\nERROR: Invalid observed temperature units ({})\n  '
                      'Valid units are: {}'.format(temp_obs_units,
                                                   ', '.join(temp_units_list)))
        sys.exit()

    # Check input paths
    if not arcpy.Exists(hru.polygon_path):
        logging.error('\nERROR: Fishnet ({}) does not exist'.format(
            hru.polygon_path))
        sys.exit()

    # Temperature calculation method
    try:
        temp_calc_method = inputs_cfg.get('INPUTS',
                                          'temperature_calc_method').upper()
    except:
        temp_calc_method = '1STA'
        logging.info('  Defaulting temperature_calc_method = {}'.format(
            temp_calc_method))

    temp_calc_options = ['ZONES', '1STA', 'LAPSE']
    if temp_calc_method not in temp_calc_options:
        logging.error(
            '\nERROR: Invalid temperature calculation method ({})\n  '
            'Valid methods are: {}'.format(temp_calc_method,
                                           ', '.join(temp_calc_options)))
        sys.exit()
    if temp_calc_method == 'LAPSE':
        logging.warning(
            '\nWARNING: If temperature calculation set to LAPSE,'
            '\n  it is not necessary to run the temp_adjust_parameters.py'
            '\n  Exiting')
        return False

    if temp_calc_method == 'ZONES':
        temp_zone_orig_path = inputs_cfg.get('INPUTS', 'temp_zone_path')
        try:
            temp_zone_id_field = inputs_cfg.get('INPUTS', 'temp_zone_id_field')
        except:
            logging.error(
                '\nERROR: temp_zone_id_field must be set in INI to apply '
                'zone specific temperature adjustments\n')
            sys.exit()

        try:
            temp_hru_id_field = inputs_cfg.get('INPUTS', 'temp_hru_id_field')
        except:
            temp_hru_id_field = None
            logging.warning(
                '  temp_hru_id_field was not set in the INI file\n'
                '  Temperature adjustments will not be changed to match station '
                'values')

        # Field name for TSTA hard coded, but could be changed to be read from
        # config file like temp_zone
        hru_tsta_field = 'HRU_TSTA'

        try:
            tmax_obs_field_fmt = inputs_cfg.get('INPUTS',
                                                'tmax_obs_field_format')
        except:
            tmax_obs_field_fmt = 'TMAX_{:02d}'
            logging.info('  Defaulting tmax_obs_field_format = {}'.format(
                tmax_obs_field_fmt))

        try:
            tmin_obs_field_fmt = inputs_cfg.get('INPUTS',
                                                'temp_obs_field_format')
        except:
            tmin_obs_field_fmt = 'TMIN_{:02d}'
            logging.info('  Defaulting tmin_obs_field_format = {}'.format(
                tmin_obs_field_fmt))

        if not arcpy.Exists(temp_zone_orig_path):
            logging.error(
                '\nERROR: Temperature Zone ({}) does not exist'.format(
                    temp_zone_orig_path))
            sys.exit()
        # temp_zone_path must be a polygon shapefile
        if arcpy.Describe(temp_zone_orig_path).datasetType != 'FeatureClass':
            logging.error(
                '\nERROR: temp_zone_path must be a polygon shapefile')
            sys.exit()

        # Check temp_zone_id_field
        if temp_zone_id_field.upper() in ['FID', 'OID']:
            temp_zone_id_field = arcpy.Describe(
                temp_zone_orig_path).OIDFieldName
            logging.warning('\n  NOTE: Using {} to set {}\n'.format(
                temp_zone_id_field, hru.temp_zone_id_field))
        elif not arcpy.ListFields(temp_zone_orig_path, temp_zone_id_field):
            logging.error(
                '\nERROR: temp_zone_id_field field {} does not exist\n'.format(
                    temp_zone_id_field))
            sys.exit()
        # Need to check that field is an int type
        # Should we only check active cells (HRU_TYPE > 0)?
        elif not [
                f.type for f in arcpy.Describe(temp_zone_orig_path).fields
                if (f.name == temp_zone_id_field
                    and f.type in ['SmallInteger', 'Integer'])
        ]:
            logging.error(
                '\nERROR: temp_zone_id_field field {} must be an integer type\n'
                .format(temp_zone_id_field))
            sys.exit()
        # Need to check that field values are all positive
        # Should we only check active cells (HRU_TYPE > 0)?
        elif min([
                row[0] for row in arcpy.da.SearchCursor(
                    temp_zone_orig_path, [temp_zone_id_field])
        ]) <= 0:
            logging.error(
                '\nERROR: temp_zone_id_field values cannot be negative\n')
            sys.exit()

        # Check hru_tsta_field
        if not arcpy.ListFields(temp_zone_orig_path, hru_tsta_field):
            logging.error(
                '\nERROR: hru_tsta_field field {} does not exist\n'.format(
                    hru_tsta_field))
            sys.exit()
        # Need to check that field is an int type
        # Only check active cells (HRU_TYPE >0)?!
        elif not [
                f.type for f in arcpy.Describe(temp_zone_orig_path).fields
                if (f.name == hru_tsta_field
                    and f.type in ['SmallInteger', 'Integer'])
        ]:
            logging.error(
                '\nERROR: hru_tsta_field field {} must be an integer type\n'.
                format(hru_tsta_field))
            sys.exit()
        # Need to check that field values are all positive
        # Only check active cells (HRU_TYPE >0)?!
        elif min([
                row[0] for row in arcpy.da.SearchCursor(
                    temp_zone_orig_path, [hru_tsta_field])
        ]) <= 0:
            logging.error(
                '\nERROR: hru_tsta_field values cannot be negative\n')
            sys.exit()

        # Check temp_hru_id_field
        # temp_hru_id values are checked later
        if temp_hru_id_field is not None:
            if not arcpy.ListFields(temp_zone_orig_path, temp_hru_id_field):
                logging.error(
                    '\nERROR: temp_hru_id_field field {} does not exist\n'.
                    format(temp_hru_id_field))
                sys.exit()
            # Need to check that field is an int type
            elif not [
                    f.type for f in arcpy.Describe(temp_zone_orig_path).fields
                    if (f.name == temp_hru_id_field
                        and f.type in ['SmallInteger', 'Integer'])
            ]:
                logging.error(
                    '\nERROR: temp_hru_id_field field {} must be an integer type\n'
                    .format(temp_hru_id_field))
                sys.exit()
            # Need to check that field values are not negative (0 is okay)
            elif min([
                    row[0] for row in arcpy.da.SearchCursor(
                        temp_zone_orig_path, [temp_hru_id_field])
            ]) < 0:
                logging.error(
                    '\nERROR: temp_hru_id_field values cannot be negative\n')
                sys.exit()
    elif temp_calc_method == '1STA':
        # If a zone shapefile is not used, temperature must be set manually
        tmax_obs_list = inputs_cfg.get('INPUTS', 'tmax_obs_list')
        tmin_obs_list = inputs_cfg.get('INPUTS', 'tmin_obs_list')

        # Check that values are floats
        try:
            tmax_obs_list = map(float, tmax_obs_list.split(','))
        except ValueError:
            logging.error('\nERROR: tmax_obs_list (mean monthly tmax) '
                          'values could not be parsed as floats\n')
            sys.exit()
        try:
            tmin_obs_list = map(float, tmin_obs_list.split(','))
        except ValueError:
            logging.error('\nERROR: tmin_obs_list (mean monthly tmin) '
                          'values could not be parsed as floats\n')
            sys.exit()

        # Check that there are 12 values
        if len(tmax_obs_list) != 12:
            logging.error('\nERROR: There must be exactly 12 mean monthly '
                          'observed tmax values based to tmax_obs_list\n')
            sys.exit()
        logging.info(
            '  Observed mean monthly tmax ({}):\n    {}\n'
            '    (Script will assume these are listed in month order, '
            'i.e. Jan, Feb, ...)'.format(temp_obs_units,
                                         ', '.join(map(str, tmax_obs_list))))

        if len(tmin_obs_list) != 12:
            logging.error('\nERROR: There must be exactly 12 mean monthly '
                          'observed tmin values based to tmin_obs_list\n')
            sys.exit()
        logging.info(
            '  Observed mean monthly tmin ({}):\n    {}\n'
            '    (Script will assume these are listed in month order, '
            'i.e. Jan, Feb, ...)'.format(temp_obs_units,
                                         ', '.join(map(str, tmin_obs_list))))

        # Check if all the values are 0
        if tmax_obs_list == ([0.0] * 12):
            logging.error(
                '\nERROR: The observed tmax values are all 0.\n'
                '  To compute tmax adjustments, please set the tmax_obs_list '
                'parameter in the INI with\n  observed mean monthly tmax '
                'values (i.e. from a weather station)')
            sys.exit()
        if tmin_obs_list == ([0.0] * 12):
            logging.error(
                '\nERROR: The observed tmin values are all 0.\n'
                '  To compute tmin adjustments, please set the tmin_obs_list '
                'parameter in the INI with\n  observed mean monthly tmin '
                'values (i.e. from a weather station)')
            sys.exit()

        # Get the temperature HRU ID
        try:
            temp_hru_id = inputs_cfg.getint('INPUTS', 'temp_hru_id')
        except:
            temp_hru_id = 0

        # Check that the temp_hru_id is a valid cell hru_id
        # If temp_hru_id is 0, temperature adjustments will not be adjusted
        if temp_hru_id > 0:
            # Check that HRU_ID is valid
            logging.info('    Temperature HRU_ID: {}'.format(temp_hru_id))
            arcpy.MakeTableView_management(
                hru.polygon_path, "layer",
                "{} = {}".format(hru.id_field, temp_hru_id))
            if (temp_hru_id != 0 and int(
                    arcpy.GetCount_management("layer").getOutput(0)) == 0):
                logging.error(
                    '\nERROR: temp_hru_id {0} is not a valid cell hru_id'
                    '\nERROR: temp adjustments will NOT be forced to 1'
                    ' at cell {0}\n'.format(temp_hru_id))
                temp_hru_id = 0
            arcpy.Delete_management("layer")
        else:
            logging.info(
                '  Temperatures adjustments will not be adjusted to match '
                'station values\n    (temp_hru_id = 0)')

        # Could add a second check that HRU_TSTA has values >0

    # Build output folders if necessary
    temp_adj_temp_ws = os.path.join(hru.param_ws, 'temp_adjust')
    if not os.path.isdir(temp_adj_temp_ws):
        os.mkdir(temp_adj_temp_ws)
    temp_zone_path = os.path.join(temp_adj_temp_ws, 'temp_zone.shp')
    # temp_zone_clip_path = os.path.join(temp_adj_temp_ws, 'temp_zone_clip.shp')

    # Set ArcGIS environment variables
    arcpy.CheckOutExtension('Spatial')
    env.overwriteOutput = True
    # env.pyramid = 'PYRAMIDS -1'
    env.pyramid = 'PYRAMIDS 0'
    env.workspace = hru.param_ws
    env.scratchWorkspace = hru.scratch_ws

    # Set month list based on flags
    month_list = range(1, 13)
    tmax_field_list = [tmax_field_fmt.format(m) for m in month_list]
    tmin_field_list = [tmin_field_fmt.format(m) for m in month_list]
    tmax_adj_field_list = [tmax_adj_field_fmt.format(m) for m in month_list]
    tmin_adj_field_list = [tmin_adj_field_fmt.format(m) for m in month_list]

    # Check fields
    logging.info('\nAdding temperature adjust fields if necessary')
    # Temperature zone fields
    support.add_field_func(hru.polygon_path, hru.temp_zone_id_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.hru_tsta_field, 'SHORT')
    # Temperature adjustment fields
    for tmax_adj_field in tmax_adj_field_list:
        support.add_field_func(hru.polygon_path, tmax_adj_field, 'DOUBLE')
    for tmin_adj_field in tmin_adj_field_list:
        support.add_field_func(hru.polygon_path, tmin_adj_field, 'DOUBLE')

    # Calculate temperature zone ID
    if temp_calc_method == 'ZONES':
        logging.info('\nCalculating cell HRU Temperature Zone ID')
        temp_zone_desc = arcpy.Describe(temp_zone_orig_path)
        temp_zone_sr = temp_zone_desc.spatialReference
        logging.debug('  Zones:      {}'.format(temp_zone_orig_path))
        logging.debug('  Projection: {}'.format(temp_zone_sr.name))
        logging.debug('  GCS:        {}'.format(temp_zone_sr.GCS.name))

        # Reset temp_ZONE_ID
        logging.info('  Resetting {} to 0'.format(hru.temp_zone_id_field))
        arcpy.CalculateField_management(hru.polygon_path,
                                        hru.temp_zone_id_field, 0, 'PYTHON')

        # If temp_zone spat_ref doesn't match hru_param spat_ref
        # Project temp_zone to hru_param spat ref
        # Otherwise, read temp_zone directly
        if hru.sr.name != temp_zone_sr.name:
            logging.info('  Projecting temperature zones...')
            # Set preferred transforms
            transform_str = support.transform_func(hru.sr, temp_zone_sr)
            logging.debug('    Transform: {}'.format(transform_str))
            # Project temp_zone shapefile
            arcpy.Project_management(temp_zone_orig_path, temp_zone_path,
                                     hru.sr, transform_str, temp_zone_sr)
            del transform_str
        else:
            arcpy.Copy_management(temp_zone_orig_path, temp_zone_path)

        # # Remove all unnecessary fields
        # for field in arcpy.ListFields(temp_zone_path):
        #     skip_field_list = temp_obs_field_list + [temp_zone_id_field, 'Shape']
        #     if field.name not in skip_field_list:
        #         try:
        #             arcpy.DeleteField_management(temp_zone_path, field.name)
        #         except:
        #             pass

        # Set temperature zone ID
        logging.info('  Setting {}'.format(hru.temp_zone_id_field))
        support.zone_by_centroid_func(temp_zone_path, hru.temp_zone_id_field,
                                      temp_zone_id_field, hru.polygon_path,
                                      hru.point_path, hru)
        # support.zone_by_area_func(
        #    temp_zone_layer, hru.temp_zone_id_field, temp_zone_id_field,
        #    hru.polygon_path, hru, hru_area_field, None, 50)

        # Set HRU_TSTA
        logging.info('  Setting {}'.format(hru.hru_tsta_field))
        support.zone_by_centroid_func(temp_zone_path, hru.hru_tsta_field,
                                      hru_tsta_field, hru.polygon_path,
                                      hru.point_path, hru)

        del temp_zone_desc, temp_zone_sr
    elif temp_calc_method == '1STA':
        # Set all cells to zone 1
        arcpy.CalculateField_management(hru.polygon_path,
                                        hru.temp_zone_id_field, 1, 'PYTHON')

    # Calculate adjustments
    logging.info('\nCalculating mean monthly temperature adjustments')
    if temp_calc_method == 'ZONES':
        # Read mean monthly values for each zone
        tmax_obs_dict = dict()
        tmin_obs_dict = dict()
        tmax_obs_field_list = [
            tmax_obs_field_fmt.format(m) for m in month_list
        ]
        tmin_obs_field_list = [
            tmin_obs_field_fmt.format(m) for m in month_list
        ]
        tmax_fields = [temp_zone_id_field] + tmax_obs_field_list
        tmin_fields = [temp_zone_id_field] + tmin_obs_field_list
        logging.debug('  Tmax Obs. Fields: {}'.format(', '.join(tmax_fields)))
        logging.debug('  Tmin Obs. Fields: {}'.format(', '.join(tmax_fields)))

        with arcpy.da.SearchCursor(temp_zone_path, tmax_fields) as s_cursor:
            for row in s_cursor:
                tmax_obs_dict[int(row[0])] = map(float, row[1:13])
        with arcpy.da.SearchCursor(temp_zone_path, tmin_fields) as s_cursor:
            for row in s_cursor:
                tmin_obs_dict[int(row[0])] = map(float, row[1:13])

        # Convert values to Celsius if necessary to match PRISM
        if temp_obs_units == 'F':
            tmax_obs_dict = {
                z: [(t - 32) * (5.0 / 9) for t in t_list]
                for z, t_list in tmax_obs_dict.items()
            }
            tmin_obs_dict = {
                z: [(t - 32) * (5.0 / 9) for t in t_list]
                for z, t_list in tmin_obs_dict.items()
            }
        elif temp_obs_units == 'K':
            tmax_obs_dict = {
                z: [(t - 273.15) for t in t_list]
                for z, t_list in tmax_obs_dict.items()
            }
            tmin_obs_dict = {
                z: [(t - 273.15) for t in t_list]
                for z, t_list in tmin_obs_dict.items()
            }

        tmax_zone_list = sorted(tmax_obs_dict.keys())
        tmin_zone_list = sorted(tmin_obs_dict.keys())
        logging.debug('  Tmax Zones: {}'.format(tmax_zone_list))
        logging.debug('  Tmin Zones: {}'.format(tmin_zone_list))

        # Print the observed temperature values
        logging.debug('  Observed Tmax')
        for zone, tmax_obs in tmax_obs_dict.items():
            logging.debug('    {}: {}'.format(
                zone, ', '.join(['{:.2f}'.format(x) for x in tmax_obs])))
        logging.debug('  Observed Tmin')
        for zone, tmin_obs in tmin_obs_dict.items():
            logging.debug('    {}: {}'.format(
                zone, ', '.join(['{:.2f}'.format(x) for x in tmin_obs])))

        # Default all zones to an adjustment of 0
        tmax_adj_dict = {z: [0] * 12 for z in tmax_zone_list}
        tmin_adj_dict = {z: [0] * 12 for z in tmin_zone_list}

        # Get list of HRU_IDs for each zone
        fields = [hru.temp_zone_id_field, hru.id_field]
        zone_hru_id_dict = defaultdict(list)
        with arcpy.da.SearchCursor(hru.polygon_path, fields) as s_cursor:
            for row in s_cursor:
                zone_hru_id_dict[int(row[0])].append(int(row[1]))

        # Check that TEMP_HRU_IDs are in the correct zone
        # Default all temperature zone HRU IDs to 0
        temp_hru_id_dict = {z: 0 for z in tmax_zone_list}
        if temp_hru_id_field is not None:
            fields = [temp_zone_id_field, temp_hru_id_field]
            logging.debug(
                '  Temp Zone ID field: {}'.format(temp_zone_id_field))
            logging.debug('  Temp HRU ID field: {}'.format(temp_hru_id_field))
            with arcpy.da.SearchCursor(temp_zone_path, fields) as s_cursor:
                for row in s_cursor:
                    temp_zone = int(row[0])
                    hru_id = int(row[1])
                    if hru_id == 0 or hru_id in zone_hru_id_dict[temp_zone]:
                        temp_hru_id_dict[temp_zone] = hru_id
                        logging.debug('    {}: {}'.format(temp_zone, hru_id))
                    else:
                        logging.error(
                            '\nERROR: HRU_ID {} is not in temperature ZONE {}'.
                            format(hru_id, temp_hru_id_dict[temp_zone]))
                        sys.exit()

        # Get gridded tmax values for each TEMP_HRU_ID
        fields = [hru.temp_zone_id_field, hru.id_field] + tmax_field_list
        with arcpy.da.SearchCursor(hru.polygon_path, fields) as s_cursor:
            for row in s_cursor:
                temp_zone = int(row[0])
                hru_id = int(row[1])
                if hru_id == 0:
                    pass
                elif hru_id in temp_hru_id_dict.values():
                    tmax_gridded_list = map(float, row[2:14])
                    tmax_obs_list = tmax_obs_dict[temp_zone]
                    tmax_adj_list = [
                        float(o) - t
                        for o, t in zip(tmax_obs_list, tmax_gridded_list)
                    ]
                    tmax_adj_dict[temp_zone] = tmax_adj_list

        # Get gridded tmin values for each TEMP_HRU_ID
        fields = [hru.temp_zone_id_field, hru.id_field] + tmin_field_list
        with arcpy.da.SearchCursor(hru.polygon_path, fields) as s_cursor:
            for row in s_cursor:
                temp_zone = int(row[0])
                hru_id = int(row[1])
                if hru_id == 0:
                    pass
                elif hru_id in temp_hru_id_dict.values():
                    tmin_gridded_list = map(float, row[2:14])
                    tmin_obs_list = tmin_obs_dict[temp_zone]
                    tmin_adj_list = [
                        float(o) - t
                        for o, t in zip(tmin_obs_list, tmin_gridded_list)
                    ]
                    tmin_adj_dict[temp_zone] = tmin_adj_list
        del temp_hru_id_dict, zone_hru_id_dict, fields

        logging.debug('  Tmax Adjustment Factors:')
        for k, v in tmax_adj_dict.items():
            logging.debug('    {}: {}'.format(
                k, ', '.join(['{:.3f}'.format(x) for x in v])))

        logging.debug('  Tmin Adjustment Factors:')
        for k, v in tmin_adj_dict.items():
            logging.debug('    {}: {}'.format(
                k, ', '.join(['{:.3f}'.format(x) for x in v])))

        logging.debug('\nWriting adjustment values to hru_params')
        fields = [hru.temp_zone_id_field]
        fields.extend(tmax_field_list + tmax_adj_field_list)
        fields.extend(tmin_field_list + tmin_adj_field_list)
        with arcpy.da.UpdateCursor(hru.polygon_path, fields) as u_cursor:
            for row in u_cursor:
                zone = int(row[0])
                for i, month in enumerate(month_list):
                    tmax_i = fields.index(tmax_field_fmt.format(month))
                    tmax_adj_i = fields.index(tmax_adj_field_fmt.format(month))
                    row[tmax_adj_i] = (row[tmax_i] - tmax_obs_dict[zone][i] +
                                       tmax_adj_dict[zone][i])

                    tmin_i = fields.index(tmin_field_fmt.format(month))
                    tmin_adj_i = fields.index(tmin_adj_field_fmt.format(month))
                    row[tmin_adj_i] = (row[tmin_i] - tmin_obs_dict[zone][i] +
                                       tmin_adj_dict[zone][i])
                u_cursor.updateRow(row)
            del row

    elif temp_calc_method == '1STA':
        # Get gridded temperature at temp_HRU_ID
        tmax_fields = [hru.id_field] + tmax_field_list
        tmin_fields = [hru.id_field] + tmin_field_list
        logging.debug('  Tmax Fields: {}'.format(', '.join(tmax_field_list)))
        logging.debug('  Tmin Fields: {}'.format(', '.join(tmin_field_list)))

        # Convert values to Celsius if necessary to match PRISM
        if temp_obs_units == 'F':
            tmax_obs_list = [(t - 32) * (5.0 / 9) for t in tmax_obs_list]
            tmin_obs_list = [(t - 32) * (5.0 / 9) for t in tmin_obs_list]
        elif temp_obs_units == 'K':
            tmax_obs_list = [t - 273.15 for t in tmax_obs_list]
            tmin_obs_list = [t - 273.15 for t in tmin_obs_list]
        if temp_obs_units != 'C':
            logging.info('\nConverted Mean Monthly Tmax ({}):\n  {}'.format(
                temp_obs_units, ', '.join(map(str, tmax_obs_list))))
            logging.info('Converted Mean Monthly Tmin ({}):\n  {}'.format(
                temp_obs_units, ', '.join(map(str, tmin_obs_list))))

        # Scale all adjustments so gridded temperature will match observed
        # temperature at target cell
        if temp_hru_id != 0:
            tmax_gridded_list = map(
                float,
                arcpy.da.SearchCursor(
                    hru.polygon_path, tmax_fields,
                    '"{}" = {}'.format(hru.id_field, temp_hru_id)).next()[1:])
            logging.debug('  Gridded Tmax: {}'.format(', '.join(
                ['{:.2f}'.format(p) for p in tmax_gridded_list])))

            tmin_gridded_list = map(
                float,
                arcpy.da.SearchCursor(
                    hru.polygon_path, tmin_fields,
                    '"{}" = {}'.format(hru.id_field, temp_hru_id)).next()[1:])
            logging.debug('  Gridded Tmin: {}'.format(', '.join(
                ['{:.2f}'.format(p) for p in tmin_gridded_list])))

            # Difference of MEASURED or OBSERVED TEMP to GRIDDED TEMP
            tmax_adj_list = [
                float(o) - t for o, t in zip(tmax_obs_list, tmax_gridded_list)
            ]
            logging.info('  Obs./Gridded: {}'.format(', '.join(
                ['{:.3f}'.format(p) for p in tmax_adj_list])))

            tmin_adj_list = [
                float(o) - t for o, t in zip(tmin_obs_list, tmin_gridded_list)
            ]
            logging.info('  Obs./Gridded: {}'.format(', '.join(
                ['{:.3f}'.format(p) for p in tmin_adj_list])))
        else:
            tmax_adj_list = [0 for p in tmax_obs_list]
            tmin_adj_list = [0 for p in tmin_obs_list]

        # Use single mean monthly tmax for all cells
        # Assume tmax_obs_list is in month order
        fields = tmax_field_list + tmax_adj_field_list
        with arcpy.da.UpdateCursor(hru.polygon_path, fields) as u_cursor:
            for row in u_cursor:
                for i, month in enumerate(month_list):
                    tmax_i = fields.index(tmax_field_fmt.format(month))
                    tmax_adj_i = fields.index(tmax_adj_field_fmt.format(month))
                    row[tmax_adj_i] = (row[tmax_i] - tmax_obs_list[i] +
                                       tmax_adj_list[i])
                u_cursor.updateRow(row)
            del row

        # Use single mean monthly tmax for all cells
        # Assume tmax_obs_list is in month order
        fields = tmin_field_list + tmin_adj_field_list
        with arcpy.da.UpdateCursor(hru.polygon_path, fields) as u_cursor:
            for row in u_cursor:
                for i, month in enumerate(month_list):
                    tmin_i = fields.index(tmin_field_fmt.format(month))
                    tmin_adj_i = fields.index(tmin_adj_field_fmt.format(month))
                    row[tmin_adj_i] = (row[tmin_i] - tmin_obs_list[i] +
                                       tmin_adj_list[i])
                u_cursor.updateRow(row)
            del row
예제 #13
0
def ppt_ratio_parameters(config_path, overwrite_flag=False, debug_flag=False):
    """Calculate GSFLOW PPT Ratio Parameters

    Args:
        config_file (str): Project config file path
        ovewrite_flag (bool): if True, overwrite existing files
        debug_flag (bool): if True, enable debug level logging

    Returns:
        None
    """

    # Hardcoded HRU field formats for now
    ppt_field_format = 'PPT_{:02d}'
    ratio_field_format = 'PPT_RT_{:02d}'

    # Initialize hru_parameters class
    hru = support.HRUParameters(config_path)

    # Open input parameter config file
    inputs_cfg = ConfigParser.ConfigParser()
    try:
        inputs_cfg.readfp(open(config_path))
    except Exception as e:
        logging.error('\nERROR: Config file could not be read, '
                      'is not an input file, or does not exist\n'
                      '  config_file = {}\n'
                      '  Exception: {}\n'.format(config_path, e))
        sys.exit()

    # Log DEBUG to file
    log_file_name = 'ppt_ratio_parameters_log.txt'
    log_console = logging.FileHandler(filename=os.path.join(
        hru.log_ws, log_file_name),
                                      mode='w')
    log_console.setLevel(logging.DEBUG)
    log_console.setFormatter(logging.Formatter('%(message)s'))
    logging.getLogger('').addHandler(log_console)
    logging.info('\nGSFLOW PPT Ratio Parameters')

    # Units
    ppt_obs_units = support.get_param('ppt_obs_units', 'mm',
                                      inputs_cfg).lower()
    ppt_units_list = ['mm', 'cm', 'm', 'in', 'ft']
    # Compare against the upper case of the values in the list
    #   but don't modify the acceptable units list
    if ppt_obs_units not in ppt_units_list:
        logging.warning(
            ('WARNING: Invalid PPT obs. units ({})\n  '
             'Valid units are: {}').format(ppt_obs_units,
                                           ', '.join(ppt_units_list)))
    # Convert units while reading obs values
    if ppt_obs_units == 'mm':
        units_factor = 1
    elif ppt_obs_units == 'cm':
        units_factor = 10
    elif ppt_obs_units == 'm':
        units_factor = 1000
    elif ppt_obs_units == 'in':
        units_factor = 25.4
    elif ppt_obs_units == 'ft':
        units_factor = 304.8
    else:
        units_factor = 1

    # Check input paths
    if not arcpy.Exists(hru.polygon_path):
        logging.error('\nERROR: Fishnet ({}) does not exist'.format(
            hru.polygon_path))
        sys.exit()

    # PPT Zones
    set_ppt_zones_flag = inputs_cfg.getboolean('INPUTS', 'set_ppt_zones_flag')
    if set_ppt_zones_flag:
        ppt_zone_orig_path = inputs_cfg.get('INPUTS', 'ppt_zone_path')
        try:
            ppt_zone_field = inputs_cfg.get('INPUTS', 'ppt_zone_field')
        except:
            logging.error(
                '\nERROR: ppt_zone_field must be set in INI to apply '
                'zone specific ppt ratios\n')
            sys.exit()
        try:
            ppt_hru_id_field = inputs_cfg.get('INPUTS', 'ppt_hru_id_field')
        except:
            ppt_hru_id_field = None
            logging.warning(
                '  ppt_hru_id_field was not set in the INI file\n'
                '  PPT ratios will not be adjusted to match station values'.
                format(ppt_zone_field, hru.ppt_zone_id_field))

        # Field name for PSTA hard coded, but could be changed to be read from
        # config file like ppt_zone
        hru_psta_field = 'HRU_PSTA'

        try:
            ppt_obs_field_format = inputs_cfg.get('INPUTS',
                                                  'ppt_obs_field_format')
        except:
            ppt_obs_field_format = 'PPT_{:02d}'
            logging.info('  Defaulting ppt_obs_field_format = {}'.format(
                ppt_obs_field_format))

        if not arcpy.Exists(ppt_zone_orig_path):
            logging.error('\nERROR: PPT Zone ({}) does not exist'.format(
                ppt_zone_orig_path))
            sys.exit()
        # ppt_zone_path must be a polygon shapefile
        if arcpy.Describe(ppt_zone_orig_path).datasetType != 'FeatureClass':
            logging.error('\nERROR: ppt_zone_path must be a polygon shapefile')
            sys.exit()

        # Check ppt_zone_field
        if ppt_zone_field.upper() in ['FID', 'OID']:
            ppt_zone_field = arcpy.Describe(ppt_zone_orig_path).OIDFieldName
            logging.warning('\n  NOTE: Using {} to set {}\n'.format(
                ppt_zone_field, hru.ppt_zone_id_field))
        elif not arcpy.ListFields(ppt_zone_orig_path, ppt_zone_field):
            logging.error(
                '\nERROR: ppt_zone_field field {} does not exist\n'.format(
                    ppt_zone_field))
            sys.exit()
        # Need to check that field is an int type
        # Only check active cells (HRU_TYPE >0)?!
        elif not [
                f.type for f in arcpy.Describe(ppt_zone_orig_path).fields
                if (f.name == ppt_zone_field
                    and f.type in ['SmallInteger', 'Integer'])
        ]:
            logging.error(
                '\nERROR: ppt_zone_field field {} must be an integer type\n'.
                format(ppt_zone_field))
            sys.exit()
        # Need to check that field values are all positive
        # Only check active cells (HRU_TYPE >0)?!
        elif min([
                row[0] for row in arcpy.da.SearchCursor(
                    ppt_zone_orig_path, [ppt_zone_field])
        ]) <= 0:
            logging.error(
                '\nERROR: ppt_zone_field values must be positive\n'.format(
                    ppt_zone_field))
            sys.exit()

        # Check hru_psta_field
        if not arcpy.ListFields(ppt_zone_orig_path, hru_psta_field):
            logging.error(
                '\nERROR: hru_psta_field field {} does not exist\n'.format(
                    hru_psta_field))
            sys.exit()
        # Need to check that field is an int type
        # Only check active cells (HRU_TYPE >0)?!
        elif not [
                f.type for f in arcpy.Describe(ppt_zone_orig_path).fields
                if (f.name == hru_psta_field
                    and f.type in ['SmallInteger', 'Integer'])
        ]:
            logging.error(
                '\nERROR: hru_psta_field field {} must be an integer type\n'.
                format(hru_psta_field))
            sys.exit()
        # Need to check that field values are all positive
        # Only check active cells (HRU_TYPE >0)?!
        elif min([
                row[0] for row in arcpy.da.SearchCursor(
                    ppt_zone_orig_path, [hru_psta_field])
        ]) <= 0:
            logging.error(
                '\nERROR: hru_psta_field values must be positive\n'.format(
                    hru_psta_field))
            sys.exit()

        # Check ppt_hru_id_field
        # ppt_hru_id values are checked later
        if ppt_hru_id_field is not None:
            if not arcpy.ListFields(ppt_zone_orig_path, ppt_hru_id_field):
                logging.error(
                    '\nERROR: ppt_hru_id_field field {} does not exist\n'.
                    format(ppt_hru_id_field))
                sys.exit()
            # Need to check that field is an int type
            elif not [
                    f.type for f in arcpy.Describe(ppt_zone_orig_path).fields
                    if (f.name == ppt_hru_id_field
                        and f.type in ['SmallInteger', 'Integer'])
            ]:
                logging.error(
                    '\nERROR: ppt_hru_id_field field {} must be an integer type\n'
                    .format(ppt_hru_id_field))
                sys.exit()
            # Need to check that field values are not negative (0 is okay)
            elif min([
                    row[0] for row in arcpy.da.SearchCursor(
                        ppt_zone_orig_path, [ppt_hru_id_field])
            ]) < 0:
                logging.error(
                    '\nERROR: ppt_hru_id_field values cannot be negative\n'.
                    format(ppt_hru_id_field))
                sys.exit()
    else:
        # If a zone shapefile is not used, PPT must be set manually
        ppt_obs_list = inputs_cfg.get('INPUTS', 'ppt_obs_list')

        # Check that values are floats
        try:
            ppt_obs_list = map(float, ppt_obs_list.split(','))
        except ValueError:
            logging.error('\nERROR: ppt_obs_list (mean monthly precipitation) '
                          'values could not be parsed as floats\n')
            sys.exit()

        # Check that there are 12 values
        if len(ppt_obs_list) != 12:
            logging.error(
                '\nERROR: There must be exactly 12 mean monthly '
                'observed precipitation values based to ppt_obs_list\n')
            sys.exit()
        logging.info(('  Observed Mean Monthly PPT ({}):\n    {}\n    (Script '
                      'will assume these are listed in month order, i.e. Jan, '
                      'Feb, ...)').format(ppt_obs_units, ppt_obs_list))

        # Check if all the values are 0
        if ppt_obs_list == ([0.0] * 12):
            logging.error(
                '\nERROR: The observed precipitation values are all 0.\n'
                '  To compute PPT ratios, please set the ppt_obs_list '
                'parameter in the INI with\n  observed mean monthly PPT '
                'values (i.e. from a weather station)')
            sys.exit()

        # Adjust units (DEADBEEF - this might be better later on)
        if units_factor != 1:
            ppt_obs_list = [p * units_factor for p in ppt_obs_list]
            logging.info('\n  Converted Mean Monthly PPT ({}):\n    {}'.format(
                ppt_obs_units, ppt_obs_list))

        # Get the PPT HRU ID
        try:
            ppt_hru_id = inputs_cfg.getint('INPUTS', 'ppt_hru_id')
        except:
            ppt_hru_id = 0

        # Check that the ppt_hru_id is a valid cell hru_id
        # If ppt_hru_id is 0, PPT ratios will not be adjusted
        if ppt_hru_id > 0:
            # Check that HRU_ID is valid
            logging.info('    PPT HRU_ID: {}'.format(ppt_hru_id))
            arcpy.MakeTableView_management(
                hru.polygon_path, "layer",
                "{} = {}".format(hru.id_field, ppt_hru_id))
            if (ppt_hru_id != 0 and int(
                    arcpy.GetCount_management("layer").getOutput(0)) == 0):
                logging.error(
                    ('\nERROR: ppt_hru_id {} is not a valid cell hru_id'
                     '\nERROR: ppt_ratios will NOT be forced to 1'
                     ' at cell {}\n').format(ppt_hru_id))
                ppt_hru_id = 0
            arcpy.Delete_management("layer")
        else:
            logging.info(
                '  PPT ratios will not be adjusted to match station values\n'
                '    (ppt_hru_id = 0)')

        # Could add a second check that HRU_PSTA has values >0

    # Build output folders if necesssary
    ppt_ratio_temp_ws = os.path.join(hru.param_ws, 'ppt_ratio_temp')
    if not os.path.isdir(ppt_ratio_temp_ws):
        os.mkdir(ppt_ratio_temp_ws)
    ppt_zone_path = os.path.join(ppt_ratio_temp_ws, 'ppt_zone.shp')
    # ppt_zone_clip_path = os.path.join(ppt_ratio_temp_ws, 'ppt_zone_clip.shp')

    # Set ArcGIS environment variables
    arcpy.CheckOutExtension('Spatial')
    env.overwriteOutput = True
    # env.pyramid = 'PYRAMIDS -1'
    env.pyramid = 'PYRAMIDS 0'
    env.workspace = hru.param_ws
    env.scratchWorkspace = hru.scratch_ws

    # Set month list based on flags
    month_list = range(1, 13)
    ppt_field_list = [ppt_field_format.format(m) for m in month_list]
    ratio_field_list = [ratio_field_format.format(m) for m in month_list]

    # Check fields
    logging.info('\nAdding PPT fields if necessary')
    # PPT zone fields
    support.add_field_func(hru.polygon_path, hru.ppt_zone_id_field, 'LONG')
    # PPT ratio fields
    for ppt_field in ppt_field_list:
        support.add_field_func(hru.polygon_path, ppt_field, 'DOUBLE')

    # Calculate PPT zone ID
    if set_ppt_zones_flag:
        logging.info('\nCalculating cell HRU PPT zone ID')
        ppt_zone_desc = arcpy.Describe(ppt_zone_orig_path)
        ppt_zone_sr = ppt_zone_desc.spatialReference
        logging.debug('  PPT zones: {}'.format(ppt_zone_orig_path))
        logging.debug('  PPT zones spat. ref.:  {}'.format(ppt_zone_sr.name))
        logging.debug('  PPT zones GCS:         {}'.format(
            ppt_zone_sr.GCS.name))
        # Reset PPT_ZONE_ID
        if set_ppt_zones_flag:
            logging.info('  Resetting {} to 0'.format(hru.ppt_zone_id_field))
            arcpy.CalculateField_management(hru.polygon_path,
                                            hru.ppt_zone_id_field, 0, 'PYTHON')
        # If ppt_zone spat_ref doesn't match hru_param spat_ref
        # Project ppt_zone to hru_param spat ref
        # Otherwise, read ppt_zone directly
        if hru.sr.name != ppt_zone_sr.name:
            logging.info('  Projecting PPT zones...')
            # Set preferred transforms
            transform_str = support.transform_func(hru.sr, ppt_zone_sr)
            logging.debug('    Transform: {}'.format(transform_str))
            # Project ppt_zone shapefile
            arcpy.Project_management(ppt_zone_orig_path, ppt_zone_path, hru.sr,
                                     transform_str, ppt_zone_sr)
            del transform_str
        else:
            arcpy.Copy_management(ppt_zone_orig_path, ppt_zone_path)

        # # Remove all unnecesary fields
        # for field in arcpy.ListFields(ppt_zone_path):
        #     skip_field_list = ppt_obs_field_list + [ppt_zone_field, 'Shape']
        #     if field.name not in skip_field_list:
        #         try:
        #             arcpy.DeleteField_management(ppt_zone_path, field.name)
        #         except:
        #             pass

        # Set ppt zone ID
        logging.info('  Setting {}'.format(hru.ppt_zone_id_field))
        support.zone_by_centroid_func(ppt_zone_path, hru.ppt_zone_id_field,
                                      ppt_zone_field, hru.polygon_path,
                                      hru.point_path, hru)
        # support.zone_by_area_func(
        #    ppt_zone_layer, hru.ppt_zone_id_field, ppt_zone_field,
        #    hru.polygon_path, hru, hru_area_field, None, 50)

        # Set HRU_PSTA
        logging.info('  Setting {}'.format(hru.hru_psta_field))
        support.zone_by_centroid_func(ppt_zone_path, hru.hru_psta_field,
                                      hru_psta_field, hru.polygon_path,
                                      hru.point_path, hru)

        # Cleanup
        del ppt_zone_desc, ppt_zone_sr
    else:
        # Set all cells to PPT zone 1
        arcpy.CalculateField_management(hru.polygon_path,
                                        hru.ppt_zone_id_field, 1, 'PYTHON')

    # Calculate PPT ratios
    logging.info('\nCalculating mean monthly PPT ratios')
    if set_ppt_zones_flag:
        # Read mean monthly PPT values for each zone
        ppt_obs_dict = dict()
        ppt_obs_field_list = [
            ppt_obs_field_format.format(m) for m in month_list
        ]
        fields = [ppt_zone_field] + ppt_obs_field_list
        logging.debug('  Obs. Fields: {}'.format(', '.join(fields)))
        with arcpy.da.SearchCursor(ppt_zone_path, fields) as s_cursor:
            for row in s_cursor:
                # Convert units while reading obs values
                ppt_obs_dict[int(row[0])] = map(
                    lambda x: float(x) * units_factor, row[1:13])
                # ppt_obs_dict[row[0]] = map(float, row[1:13])
        ppt_zone_list = sorted(ppt_obs_dict.keys())
        logging.debug('  PPT Zones: {}'.format(ppt_zone_list))

        # Print the observed PPT values
        logging.debug('  Observed PPT')
        for zone, ppt_obs in ppt_obs_dict.items():
            logging.debug('    {}: {}'.format(
                zone, ', '.join(['{:.2f}'.format(x) for x in ppt_obs])))

        # Default all zones to a PPT ratio of 1
        ppt_ratio_dict = {z: [1] * 12 for z in ppt_zone_list}
        # ppt_ratio_dict[0] = [1] * 12
        # ppt_ratio_dict[0] = 1

        # Get list of HRU_IDs for each PPT Zone
        fields = [hru.ppt_zone_id_field, hru.id_field]
        zone_hru_id_dict = defaultdict(list)
        with arcpy.da.SearchCursor(hru.polygon_path, fields) as s_cursor:
            for row in s_cursor:
                zone_hru_id_dict[int(row[0])].append(int(row[1]))

        # Check that PPT_HRU_IDs are in the correct zone
        # Default all PPT Zone HRU IDs to 0
        ppt_hru_id_dict = {z: 0 for z in ppt_zone_list}
        if ppt_hru_id_field is not None:
            fields = [ppt_zone_field, ppt_hru_id_field]
            logging.debug('  PPT Zone ID field: {}'.format(ppt_zone_field))
            logging.debug('  PPT HRU ID field: {}'.format(ppt_hru_id_field))
            with arcpy.da.SearchCursor(ppt_zone_path, fields) as s_cursor:
                for row in s_cursor:
                    ppt_zone = int(row[0])
                    hru_id = int(row[1])
                    if hru_id == 0 or hru_id in zone_hru_id_dict[ppt_zone]:
                        ppt_hru_id_dict[ppt_zone] = hru_id
                        logging.debug('    {}: {}'.format(ppt_zone, hru_id))
                    else:
                        logging.error(
                            '\nERROR: HRU_ID {} is not in PPT ZONE {}'.format(
                                hru_id, ppt_hru_id_dict[ppt_zone]))
                        sys.exit()

        # Get gridded PPT values for each PPT_HRU_ID
        fields = [hru.ppt_zone_id_field, hru.id_field] + ppt_field_list
        # ppt_ratio_dict = dict()
        with arcpy.da.SearchCursor(hru.polygon_path, fields) as s_cursor:
            for row in s_cursor:
                ppt_zone = int(row[0])
                hru_id = int(row[1])
                if hru_id == 0:
                    pass
                elif hru_id in ppt_hru_id_dict.values():
                    ppt_gridded_list = map(float, row[2:14])
                    ppt_obs_list = ppt_obs_dict[ppt_zone]
                    ppt_ratio_list = [
                        float(o) / p if p > 0 else 0
                        for o, p in zip(ppt_obs_list, ppt_gridded_list)
                    ]
                    ppt_ratio_dict[ppt_zone] = ppt_ratio_list
        del ppt_hru_id_dict, zone_hru_id_dict, fields

        logging.debug('  PPT Ratio Adjustment Factors:')
        for k, v in ppt_ratio_dict.items():
            logging.debug('    {}: {}'.format(
                k, ', '.join(['{:.3f}'.format(x) for x in v])))

        # DEADBEEF - ZONE_VALUE is calculated in zone_by_centroid_func
        # There is probably a cleaner way of linking these two
        fields = [hru.ppt_zone_id_field] + ppt_field_list + ratio_field_list
        with arcpy.da.UpdateCursor(hru.polygon_path, fields) as u_cursor:
            for row in u_cursor:
                ppt_zone = int(row[0])
                for i, month in enumerate(month_list):
                    ppt_i = fields.index(ppt_field_format.format(month))
                    ratio_i = fields.index(ratio_field_format.format(month))

                    if ppt_zone in ppt_zone_list:
                        ppt_obs = ppt_obs_dict[ppt_zone][i]
                    else:
                        ppt_obs = 0

                    if ppt_obs > 0:
                        row[ratio_i] = (ppt_ratio_dict[ppt_zone][i] *
                                        row[ppt_i] / ppt_obs)
                    else:
                        row[ratio_i] = 0
                u_cursor.updateRow(row)
            del row
    else:
        # Get gridded precip at PPT_HRU_ID
        fields = [hru.id_field] + ppt_field_list
        logging.debug('  Fields: {}'.format(', '.join(fields)))

        # Scale all ratios so gridded PPT will match observed PPT at target cell
        if ppt_hru_id != 0:
            ppt_gridded_list = map(
                float,
                arcpy.da.SearchCursor(
                    hru.polygon_path, fields,
                    '"{}" = {}'.format(hru.id_field, ppt_hru_id)).next()[1:])
            logging.info('  Gridded PPT: {}'.format(', '.join(
                ['{:.2f}'.format(p) for p in ppt_gridded_list])))
            # Ratio of MEASURED or OBSERVED PPT to GRIDDED PPT
            # This will be multiplied by GRIDDED/OBSERVED below
            ppt_ratio_list = [
                float(o) / p if p > 0 else 0
                for o, p in zip(ppt_obs_list, ppt_gridded_list)
            ]
            logging.info('  Obs./Gridded: {}'.format(', '.join(
                ['{:.3f}'.format(p) for p in ppt_ratio_list])))
        else:
            ppt_ratio_list = [1 for p in ppt_obs_list]

        # Use single mean monthly PPT for all cells
        # Assume ppt_obs_list is in month order
        fields = ppt_field_list + ratio_field_list
        with arcpy.da.UpdateCursor(hru.polygon_path, fields) as u_cursor:
            for row in u_cursor:
                for i, month in enumerate(month_list):
                    ppt_i = fields.index(ppt_field_format.format(month))
                    ratio_i = fields.index(ratio_field_format.format(month))

                    if ppt_obs_list[i] > 0:
                        row[ratio_i] = (ppt_ratio_list[i] * row[ppt_i] /
                                        ppt_obs_list[i])
                    else:
                        row[ratio_i] = 0
                u_cursor.updateRow(row)
            del row
예제 #14
0
def soil_raster_prep(config_path):
    """Prepare GSFLOW soil rasters

    Parameters
    ----------
    config_path : str
        Project configuration file (.ini) path.

    Returns
    -------
    None

    """
    # Initialize hru_parameters class
    hru = support.HRUParameters(config_path)

    # Open input parameter config file
    inputs_cfg = ConfigParser.ConfigParser()
    try:
        inputs_cfg.readfp(open(config_path))
    except Exception as e:
        logging.error('\nERROR: Config file could not be read, '
                      'is not an input file, or does not exist\n'
                      '  config_file = {}\n'
                      '  Exception: {}\n'.format(config_path, e))
        sys.exit()

    # Log DEBUG to file
    log_file_name = 'soil_prep_log.txt'
    log_console = logging.FileHandler(filename=os.path.join(
        hru.log_ws, log_file_name),
                                      mode='w')
    log_console.setLevel(logging.DEBUG)
    log_console.setFormatter(logging.Formatter('%(message)s'))
    logging.getLogger('').addHandler(log_console)
    logging.info('\nPrepare GSFLOW Soil Rasters')

    soil_orig_ws = inputs_cfg.get('INPUTS', 'soil_orig_folder')
    awc_name = inputs_cfg.get('INPUTS', 'awc_name')
    clay_pct_name = inputs_cfg.get('INPUTS', 'clay_pct_name')
    sand_pct_name = inputs_cfg.get('INPUTS', 'sand_pct_name')
    soil_proj_method = 'NEAREST'
    soil_cs = inputs_cfg.getint('INPUTS', 'soil_cellsize')
    fill_soil_nodata_flag = inputs_cfg.getboolean('INPUTS',
                                                  'fill_soil_nodata_flag')

    # Use Ksat to calculate ssr2gw_rate and slowcoef_lin
    ksat_name = inputs_cfg.get('INPUTS', 'ksat_name')

    # Read and apply soil depth raster
    # Otherwise soil depth will only be derived from rooting depth
    try:
        soil_depth_flag = inputs_cfg.getboolean('INPUTS', 'soil_depth_flag')
    except ConfigParser.NoOptionError:
        soil_depth_flag = False
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'soil_depth_flag', soil_depth_flag))
    if soil_depth_flag:
        soil_depth_name = inputs_cfg.get('INPUTS', 'soil_depth_name')

    # Use geology based multipliers to adjust ssr2gw_rate
    # Otherwise default value set in config file will be used
    try:
        ssr2gw_mult_flag = inputs_cfg.getboolean('INPUTS', 'ssr2gw_mult_flag')
    except ConfigParser.NoOptionError:
        ssr2gw_mult_flag = False
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'ssr2gw_mult_flag', ssr2gw_mult_flag))
    if ssr2gw_mult_flag:
        ssr2gw_mult_name = inputs_cfg.get('INPUTS', 'ssr2gw_mult_name')

    # Check input paths
    if not arcpy.Exists(hru.polygon_path):
        logging.error('\nERROR: Fishnet ({}) does not exist'.format(
            hru.polygon_path))
        sys.exit()

    # All of the soil rasters must exist
    awc_orig_path = os.path.join(soil_orig_ws, awc_name)
    clay_pct_orig_path = os.path.join(soil_orig_ws, clay_pct_name)
    sand_pct_orig_path = os.path.join(soil_orig_ws, sand_pct_name)
    ksat_orig_path = os.path.join(soil_orig_ws, ksat_name)
    if soil_depth_flag:
        soil_depth_orig_path = os.path.join(soil_orig_ws, soil_depth_name)
    if ssr2gw_mult_flag:
        ssr2gw_mult_orig_path = os.path.join(soil_orig_ws, ssr2gw_mult_name)

    # Check that either the original or projected/clipped raster exists
    if not arcpy.Exists(awc_orig_path):
        logging.error('\nERROR: AWC raster does not exist')
        sys.exit()
    if not arcpy.Exists(clay_pct_orig_path):
        logging.error('\nERROR: Clay raster does not exist')
        sys.exit()
    if not arcpy.Exists(sand_pct_orig_path):
        logging.error('\nERROR: Sand raster does not exist')
        sys.exit()
    if not arcpy.Exists(ksat_orig_path):
        logging.error('\nERROR: Ksat raster does not exist')
        sys.exit()
    if soil_depth_flag and not arcpy.Exists(soil_depth_orig_path):
        logging.error('\nERROR: Soil depth raster does not exist')
        sys.exit()
    if ssr2gw_mult_flag and not arcpy.Exists(ssr2gw_mult_orig_path):
        logging.error('\nERROR: Geology based raster for ssr2gw multiplier '
                      'does not exist')
        sys.exit()

    # Check other inputs
    if soil_cs <= 0:
        logging.error('\nERROR: soil cellsize must be greater than 0')
        sys.exit()
    soil_proj_method_list = ['BILINEAR', 'CUBIC', 'NEAREST']
    if soil_proj_method.upper() not in soil_proj_method_list:
        logging.error('\nERROR: Soil projection method must be: {}'.format(
            ', '.join(soil_proj_method_list)))
        sys.exit()

    # Build output folder if necessary
    soil_temp_ws = os.path.join(hru.param_ws, 'soil_rasters')
    if not os.path.isdir(soil_temp_ws):
        os.mkdir(soil_temp_ws)
    # Output paths
    awc_path = os.path.join(soil_temp_ws, 'awc.img')
    clay_pct_path = os.path.join(soil_temp_ws, 'clay_pct.img')
    sand_pct_path = os.path.join(soil_temp_ws, 'sand_pct.img')
    ksat_path = os.path.join(soil_temp_ws, 'ksat.img')
    soil_depth_path = os.path.join(soil_temp_ws, 'soil_depth.img')
    ssr2gw_mult_path = os.path.join(soil_temp_ws, 'ssr2gw_mult.img')

    # Set ArcGIS environment variables
    arcpy.CheckOutExtension('Spatial')
    env.overwriteOutput = True
    env.pyramid = 'PYRAMIDS -1'
    # env.pyramid = 'PYRAMIDS 0'
    env.workspace = soil_temp_ws
    env.scratchWorkspace = hru.scratch_ws

    # Available Water Capacity (AWC)
    logging.info('\nProjecting/clipping AWC raster')
    soil_orig_sr = arcpy.sa.Raster(awc_orig_path).spatialReference
    logging.debug('  AWC GCS:  {}'.format(soil_orig_sr.GCS.name))
    # Remove existing projected raster
    if arcpy.Exists(awc_path):
        arcpy.Delete_management(awc_path)
    # Set preferred transforms
    transform_str = support.transform_func(hru.sr, soil_orig_sr)
    logging.debug('  Transform: {}'.format(transform_str))
    logging.debug('  Projection method: NEAREST')
    # Project soil raster
    # DEADBEEF - Arc10.2 ProjectRaster does not honor extent
    support.project_raster_func(awc_orig_path, awc_path, hru.sr,
                                soil_proj_method, soil_cs, transform_str,
                                '{} {}'.format(hru.ref_x,
                                               hru.ref_y), soil_orig_sr, hru)
    # env.extent = hru.extent
    # arcpy.ProjectRaster_management(
    #    awc_orig_path, awc_path, hru.sr,
    #    soil_proj_method, soil_cs, transform_str,
    #    '{} {}'.format(hru.ref_x, hru.ref_y),
    #    soil_orig_sr)
    # arcpy.ClearEnvironment('extent')

    # Percent clay
    logging.info('Projecting/clipping clay raster')
    soil_orig_sr = arcpy.sa.Raster(clay_pct_orig_path).spatialReference
    logging.debug('  Clay GCS: {}'.format(soil_orig_sr.GCS.name))
    # Remove existing projected raster
    if arcpy.Exists(clay_pct_path):
        arcpy.Delete_management(clay_pct_path)
    # Set preferred transforms
    transform_str = support.transform_func(hru.sr, soil_orig_sr)
    logging.debug('  Transform: {}'.format(transform_str))
    logging.debug('  Projection method: NEAREST')
    # Project soil raster
    # DEADBEEF - Arc10.2 ProjectRaster does not extent
    support.project_raster_func(clay_pct_orig_path, clay_pct_path, hru.sr,
                                soil_proj_method, soil_cs, transform_str,
                                '{} {}'.format(hru.ref_x,
                                               hru.ref_y), soil_orig_sr, hru)
    # env.extent = hru.extent
    # arcpy.ProjectRaster_management(
    #    clay_pct_orig_path, clay_pct_path, hru.sr,
    #    soil_proj_method, soil_cs, transform_str,
    #    '{} {}'.format(hru.ref_x, hru.ref_y),
    #    soil_orig_sr)
    # arcpy.ClearEnvironment('extent')

    # Percent sand
    logging.info('Projecting/clipping sand raster')
    soil_orig_sr = arcpy.sa.Raster(sand_pct_orig_path).spatialReference
    logging.debug('  Sand GCS: {}'.format(soil_orig_sr.GCS.name))
    # Remove existing projected raster
    if arcpy.Exists(sand_pct_path):
        arcpy.Delete_management(sand_pct_path)
    # Set preferred transforms
    transform_str = support.transform_func(hru.sr, soil_orig_sr)
    logging.debug('  Transform: {}'.format(transform_str))
    logging.debug('  Projection method: NEAREST')
    # Project soil raster
    # DEADBEEF - Arc10.2 ProjectRaster does not honor extent
    support.project_raster_func(sand_pct_orig_path, sand_pct_path, hru.sr,
                                soil_proj_method, soil_cs, transform_str,
                                '{} {}'.format(hru.ref_x,
                                               hru.ref_y), soil_orig_sr, hru)
    # env.extent = hru.extent
    # arcpy.ProjectRaster_management(
    #    sand_pct_orig_path, sand_pct_path, hru.sr,
    #    soil_proj_method, soil_cs, transform_str,
    #    '{} {}'.format(hru.ref_x, hru.ref_y),
    #    soil_orig_sr)
    # arcpy.ClearEnvironment('extent')

    # Hydraulic conductivity
    logging.info('Projecting/clipping ksat raster')
    ksat_orig_sr = arcpy.sa.Raster(ksat_orig_path).spatialReference
    logging.debug('  Ksat GCS: {}'.format(soil_orig_sr.GCS.name))
    # Remove existing projected raster
    if arcpy.Exists(ksat_path):
        arcpy.Delete_management(ksat_path)
    # Set preferred transforms
    transform_str = support.transform_func(hru.sr, ksat_orig_sr)
    logging.debug('  Transform: {}'.format(transform_str))
    logging.debug('  Projection method: NEAREST')
    # Project ksat raster
    # DEADBEEF - Arc10.2 ProjectRaster does not honor extent
    support.project_raster_func(ksat_orig_path, ksat_path, hru.sr,
                                soil_proj_method, soil_cs, transform_str,
                                '{} {}'.format(hru.ref_x,
                                               hru.ref_y), soil_orig_sr, hru)
    # env.extent = hru.extent
    # arcpy.ProjectRaster_management(
    #    ksat_orig_path, ksat_path, hru.sr,
    #    soil_proj_method, soil_cs, transform_str,
    #    '{} {}'.format(hru.ref_x, hru.ref_y),
    #    soil_orig_sr)
    # arcpy.ClearEnvironment('extent')

    # Soil depth is only needed if clipping root depth
    if soil_depth_flag:
        logging.info('\nProjecting/clipping depth raster')
        soil_orig_sr = arcpy.sa.Raster(soil_depth_orig_path).spatialReference
        logging.debug('  Depth GCS: {}'.format(soil_orig_sr.GCS.name))
        # Remove existing projected raster
        if arcpy.Exists(soil_depth_path):
            arcpy.Delete_management(soil_depth_path)
        # Set preferred transforms
        transform_str = support.transform_func(hru.sr, soil_orig_sr)
        logging.debug('  Transform: {}'.format(transform_str))
        logging.debug('  Projection method: NEAREST')
        # Project soil raster
        # DEADBEEF - Arc10.2 ProjectRaster does not honor extent
        support.project_raster_func(soil_depth_orig_path, soil_depth_path,
                                    hru.sr, soil_proj_method, soil_cs,
                                    transform_str,
                                    '{} {}'.format(hru.ref_x, hru.ref_y),
                                    soil_orig_sr, hru)
        # env.extent = hru.extent
        # arcpy.ProjectRaster_management(
        #    soil_depth_orig_path, soil_depth_path, hru.sr,
        #    soil_proj_method, soil_cs, transform_str,
        #    '{} {}'.format(hru.ref_x, hru.ref_y),
        #    soil_orig_sr)
        # arcpy.ClearEnvironment('extent')

    # Geology based multiplier for gravity drainage (ssr2gw multiplier)
    if ssr2gw_mult_flag:
        logging.info('\nProjecting/clipping ssr2gw multiplier raster')
        soil_orig_sr = arcpy.sa.Raster(ssr2gw_mult_orig_path).spatialReference
        logging.debug('  Depth GCS: {}'.format(soil_orig_sr.GCS.name))
        # Remove existing projected raster
        if arcpy.Exists(ssr2gw_mult_path):
            arcpy.Delete_management(ssr2gw_mult_path)
        # Set preferred transforms
        transform_str = support.transform_func(hru.sr, soil_orig_sr)
        logging.debug('  Transform: {}'.format(transform_str))
        logging.debug('  Projection method: NEAREST')
        # Project soil raster
        # DEADBEEF - Arc10.2 ProjectRaster does not honor extent
        support.project_raster_func(ssr2gw_mult_orig_path, ssr2gw_mult_path,
                                    hru.sr, soil_proj_method, soil_cs,
                                    transform_str,
                                    '{} {}'.format(hru.ref_x, hru.ref_y),
                                    soil_orig_sr, hru)

    # Fill soil nodata values using nibble
    if fill_soil_nodata_flag:
        logging.info('\nFilling soil nodata values using Nibble')
        soil_raster_list = [awc_path, clay_pct_path, sand_pct_path, ksat_path]
        if soil_depth_flag:
            soil_raster_list.append(soil_depth_path)
        for soil_raster_path in soil_raster_list:
            logging.info('  {}'.format(soil_raster_path))
            # DEADBEEF - Check if there is any nodata to be filled first?
            mask_obj = arcpy.sa.Int(1000 * arcpy.sa.SetNull(
                arcpy.sa.Raster(soil_raster_path) < 0,
                arcpy.sa.Raster(soil_raster_path)))
            input_obj = arcpy.sa.Con(arcpy.sa.IsNull(mask_obj), 0, mask_obj)
            nibble_obj = 0.001 * arcpy.sa.Nibble(input_obj, mask_obj,
                                                 'ALL_VALUES')
            nibble_obj.save(soil_raster_path)
            arcpy.BuildPyramids_management(soil_raster_path)
def prms_template_fill(config_path):
    """Fill PRMS Parameter Template File

    Parameters
    ----------
    config_path : str
        Project configuration file (.ini) path.

    Returns
    -------
    None

    """
    param_formats = {1: '{:d}', 2: '{:f}', 3: '{:f}', 4: '{}'}

    # Initialize hru_parameters class
    hru = support.HRUParameters(config_path)

    # Open input parameter config file
    inputs_cfg = ConfigParser.ConfigParser()
    try:
        inputs_cfg.readfp(open(config_path))
    except Exception as e:
        logging.error('\nERROR: Config file could not be read, '
                      'is not an input file, or does not exist\n'
                      '  config_file = {}\n'
                      '  Exception: {}\n'.format(config_path, e))
        sys.exit()

    # Log DEBUG to file
    log_file_name = 'prms_template_log.txt'
    log_console = logging.FileHandler(filename=os.path.join(
        hru.log_ws, log_file_name),
                                      mode='w')
    log_console.setLevel(logging.DEBUG)
    log_console.setFormatter(logging.Formatter('%(message)s'))
    logging.getLogger('').addHandler(log_console)
    logging.info('\nFilling PRMS Parameter File Template')

    # Read parameters from config file
    hru.polygon_path = inputs_cfg.get('INPUTS', 'hru_fishnet_path')
    hru.fid_field = inputs_cfg.get('INPUTS', 'orig_fid_field')
    parameter_ws = inputs_cfg.get('INPUTS', 'parameter_folder')
    try:
        prms_parameter_ws = inputs_cfg.get('INPUTS', 'prms_parameter_folder')
    except ConfigParser.NoOptionError:
        prms_parameter_ws = inputs_cfg.get('INPUTS', 'parameter_folder')
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'prms_parameter_ws', prms_parameter_ws))
    prms_dimen_csv_path = inputs_cfg.get('INPUTS', 'prms_dimen_csv_path')
    prms_param_csv_path = inputs_cfg.get('INPUTS', 'prms_param_csv_path')

    # Get input DEM units and desired output HRU_ELEV units
    dem_units = inputs_cfg.get('INPUTS', 'dem_units').lower()
    dem_unit_types = {
        'meters': 'meter',
        'm': 'meter',
        'meter': 'meter',
        'feet': 'feet',
        'ft': 'meter',
        'foot': 'meter',
    }
    try:
        dem_units = dem_unit_types[dem_units]
    except:
        logging.error(
            '\nERROR: DEM unit "{}" is not supported\n'.format(dem_units))
        sys.exit()
    elev_units = inputs_cfg.getint('INPUTS', 'elev_units')
    elev_unit_types = {0: 'feet', 1: 'meter'}
    try:
        elev_units = elev_unit_types[elev_units]
    except:
        logging.error(
            '\nERROR: elev_units "{}" is not supported\n'.format(elev_units))
        sys.exit()
    if dem_units == 'feet' and elev_units == 'meter':
        elev_unit_scalar = 0.3048
    elif dem_units == 'meter' and elev_units == 'feet':
        elev_unit_scalar = (1.0 / 0.3048)
    else:
        elev_unit_scalar = 1.0

    # Temperature calculation method
    try:
        temp_calc_method = inputs_cfg.get('INPUTS',
                                          'temperature_calc_method').upper()
    except:
        temp_calc_method = '1STA'
        logging.info('  Defaulting temperature_calc_method = {}'.format(
            temp_calc_method))
    temp_calc_options = ['ZONES', 'LAPSE', '1STA']
    if temp_calc_method not in temp_calc_options:
        logging.error(
            '\nERROR: Invalid temperature calculation method ({})\n  '
            'Valid methods are: {}'.format(temp_calc_method,
                                           ', '.join(temp_calc_options)))
        sys.exit()

    # Write parameter/dimensions to separate files based on "PARAM_FILE"
    #   value in prms_parameters.csv and prms_dimensions.csv
    try:
        single_param_file_flag = inputs_cfg.getboolean(
            'INPUTS', 'single_param_file_flag')
    except ConfigParser.NoOptionError:
        single_param_file_flag = False
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'single_param_file_flag', single_param_file_flag))
    if single_param_file_flag:
        try:
            single_param_file_name = inputs_cfg.get('INPUTS',
                                                    'single_param_file_name')
        except ConfigParser.NoOptionError:
            single_param_file_name = 'prms_inputs.param'
            logging.info('  Missing INI parameter, setting {} = {}'.format(
                'single_param_file_name', single_param_file_name))

    # Write nhru gridded parameters as single column or array
    try:
        param_column_flag = inputs_cfg.getboolean('INPUTS',
                                                  'param_column_flag')
    except ConfigParser.NoOptionError:
        param_column_flag = False
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'param_column_flag', param_column_flag))

    # Scratch workspace
    try:
        scratch_name = inputs_cfg.get('INPUTS', 'scratch_name')
    except ConfigParser.NoOptionError:
        scratch_name = 'in_memory'
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'scratch_name', scratch_name))

    # Cascades
    crt_ws = os.path.join(parameter_ws, 'cascade_work')
    gw_ws = os.path.join(parameter_ws, 'cascade_gw_work')
    crt_dimension_path = os.path.join(crt_ws, 'parameter_dimensions.txt')
    crt_parameter_path = os.path.join(crt_ws, 'cascade.param')
    crt_gw_dimension_path = os.path.join(gw_ws, 'parameter_dimensions.txt')
    crt_gw_parameter_path = os.path.join(gw_ws, 'groundwater_cascade.param')

    # Strings to search PRMS parameter file for
    # Newline character is required after title
    file_header_str = 'PRMS parameter file generated with gsflow-arcpy-tools version X\n'
    # file_header_str = 'Default file generated by model\nVersion: 1.7'
    dimen_header_str = '** Dimensions **'
    param_header_str = '** Parameters **'
    break_str = '####'

    # Check input paths
    if not arcpy.Exists(hru.polygon_path):
        logging.error('\nERROR: The fishnet does not exist\n  {}'.format(
            hru.polygon_path))
        sys.exit()
    # if not os.path.isfile(prms_template_path):
    #    logging.error('\nERROR: The template parameter file does not exist\n')
    #    sys.exit()
    if not os.path.isfile(prms_dimen_csv_path):
        logging.error(
            '\nERROR: The dimensions CSV file does not exist\n  {}'.format(
                prms_dimen_csv_path))
        sys.exit()
    if not os.path.isfile(prms_param_csv_path):
        logging.error(
            '\nERROR: The parameters CSV file does not exist\n  {}'.format(
                prms_param_csv_path))
        sys.exit()

    if not os.path.isdir(crt_ws):
        logging.error(
            '\nERROR: Cascades folder does not exist'
            '\nERROR:   {}'
            '\nERROR: Try re-running CRT using stream_parameters.py\n'.format(
                crt_ws))
        sys.exit()
    elif not os.path.isfile(crt_dimension_path):
        logging.error(
            '\nERROR: Cascades dimension file does not exist'
            '\nERROR:   {}'
            '\nERROR: Try re-running CRT using stream_parameters.py\n'.format(
                crt_dimension_path))
        sys.exit()
    elif not os.path.isfile(crt_parameter_path):
        logging.error(
            '\nERROR: Cascades parameter file does not exist'
            '\nERROR:   {}'
            '\nERROR: Try re-running CRT using stream_parameters.py\n'.format(
                crt_parameter_path))
        sys.exit()

    if not os.path.isdir(gw_ws):
        logging.error(
            '\nERROR: Groundwater cascades folder does not exist'
            '\nERROR:   {}'
            '\nERROR: Try re-running CRT using stream_parameters.py\n'.format(
                gw_ws))
        sys.exit()
    elif not os.path.isfile(crt_gw_dimension_path):
        logging.error(
            '\nERROR: Groundwater cascades dimension file does not exist'
            '\nERROR:   {}'
            '\nERROR: Try re-running CRT using stream_parameters.py\n'.format(
                crt_gw_dimension_path))
        sys.exit()
    elif not os.path.isfile(crt_gw_parameter_path):
        logging.error(
            '\nERROR: Groundwater cascades parameter file does not exist'
            '\nERROR:   {}'
            '\nERROR: Try re-running CRT using stream_parameters\n'.format(
                crt_gw_parameter_path))
        sys.exit()

    # Get number of cells in fishnet
    fishnet_count = int(
        arcpy.GetCount_management(hru.polygon_path).getOutput(0))
    logging.info('  Fishnet cells: {}'.format(fishnet_count))

    # Read in dimensions from CSV
    logging.info('\nReading dimensions CSV')
    dimen_names = dict()
    dimen_files = dict()
    dimen_sizes = dict()
    with open(prms_dimen_csv_path, 'r') as input_f:
        dimen_lines = input_f.readlines()
    input_f.close()
    # Dimensions can be set to a value, a field, or not set
    dimen_lines = [l.strip().split(',') for l in dimen_lines]
    header = dimen_lines[0]
    for line in dimen_lines[1:]:
        dimen_name = line[header.index('NAME')]
        dimen_names[dimen_name] = dimen_name
        logging.debug('  {}'.format(dimen_name))

        # What should the default parameter file name be if not set?
        if single_param_file_flag:
            dimen_file = os.path.join(prms_parameter_ws,
                                      single_param_file_name)
        elif 'PARAM_FILE' not in header:
            dimen_file = os.path.join(prms_parameter_ws, 'prms_inputs.param')
            logging.info('  PARAM_FILE field not in dimensions CSV\n'
                         '  Defaulting to {}'.format(dimen_file))
        elif line[header.index('PARAM_FILE')] == '':
            dimen_file = os.path.join(prms_parameter_ws, 'prms_inputs.param')
            logging.info('  PARAM_FILE value not set for dimension: {}\n'
                         '  Defaulting to {}'.format(dimen_name, dimen_file))
        else:
            dimen_file = os.path.join(
                prms_parameter_ws, line[header.index('PARAM_FILE')] + '.param')
        dimen_files[dimen_name] = dimen_file

        dimen_size = line[header.index('SIZE')]
        if dimen_size.lower() in ['calculated', 'config_file']:
            dimen_sizes[dimen_name] = dimen_size
        elif not dimen_size:
            dimen_sizes[dimen_name] = ''
        else:
            # Don't force to integer type unless necessary since values are
            # written back out as strings
            dimen_sizes[dimen_name] = dimen_size
            # dimen_sizes[dimen_name] = int(dimen_size)
        del dimen_size

    # Set CALCULATED dimension values
    # These parameters equal the fishnet cell count
    for dimen_name in ['ngw', 'ngwcell', 'nhru', 'nhrucell', 'nssr']:
        if dimen_sizes[dimen_name].lower() == 'calculated':
            dimen_sizes[dimen_name] = fishnet_count
            logging.info('  {} = {}'.format(dimen_name,
                                            dimen_sizes[dimen_name]))

    # Getting number of lakes
    if dimen_sizes['nlake'].lower() == 'calculated':
        logging.info('\nCalculating number of lakes')
        #logging.info('  Lake cells are {} >= 0'.format(hru.lake_id_field))
        value_fields = (hru.id_field, hru.lake_id_field)
        with arcpy.da.SearchCursor(hru.polygon_path, value_fields) as s_cursor:
            dimen_sizes['nlake'] = max(
                list([int(row[1]) for row in s_cursor if int(row[1]) >= 0]))
        logging.info('  nlakes = {}'.format(dimen_sizes['nlake']))

    # Getting number of lake cells
    if dimen_sizes['nlake_hrus'].lower() == 'calculated':
        logging.info('\nCalculating number of lake cells')
        logging.info('  Lake cells are {} >= 0'.format(hru.lake_id_field))
        value_fields = (hru.id_field, hru.lake_id_field)
        with arcpy.da.SearchCursor(hru.polygon_path, value_fields) as s_cursor:
            dimen_sizes['nlake_hrus'] = len(
                list([int(row[1]) for row in s_cursor if int(row[1]) >= 0]))
        logging.info('  nlake cells = {}'.format(dimen_sizes['nlake_hrus']))

    # Getting number of stream cells
    if dimen_sizes['nreach'].lower() == 'calculated':
        logging.info('Calculating number of stream cells')
        logging.info('  Stream cells are {} >= 0'.format(hru.krch_field))
        value_fields = (hru.id_field, hru.krch_field)
        with arcpy.da.SearchCursor(hru.polygon_path, value_fields) as s_cursor:
            dimen_sizes['nreach'] = len(
                list([int(row[1]) for row in s_cursor if int(row[1]) > 0]))
        logging.info('  nreach = {}'.format(dimen_sizes['nreach']))

    # Getting number of stream segments
    if dimen_sizes['nsegment'].lower() == 'calculated':
        logging.info('Calculating number of unique stream segments')
        logging.info('  Stream segments are {} >= 0'.format(hru.iseg_field))
        value_fields = (hru.id_field, hru.iseg_field)
        with arcpy.da.SearchCursor(hru.polygon_path, value_fields) as s_cursor:
            dimen_sizes['nsegment'] = len(
                list(set([int(row[1]) for row in s_cursor
                          if int(row[1]) > 0])))
        logging.info('  nsegment = {}'.format(dimen_sizes['nsegment']))

    # Getting number of subbasins
    if dimen_sizes['nsub'].lower() == 'calculated':
        logging.info('Calculating number of unique subbasins')
        logging.info('  Subbasins are {} >= 0'.format(hru.subbasin_field))
        value_fields = (hru.id_field, hru.subbasin_field)
        with arcpy.da.SearchCursor(hru.polygon_path, value_fields) as s_cursor:
            dimen_sizes['nsub'] = len(
                list(set([int(row[1]) for row in s_cursor
                          if int(row[1]) > 0])))
        logging.info('  nsub = {}'.format(dimen_sizes['nsub']))

    # Read in CRT cascade dimensions
    if dimen_sizes['ncascade'].lower() == 'calculated':
        logging.info('\nReading CRT dimensions')
        logging.debug('  {}'.format(crt_dimension_path))
        with open(crt_dimension_path, 'r') as input_f:
            crt_dimen_lines = [line.strip() for line in input_f.readlines()]
        input_f.close()
        if not crt_dimen_lines:
            logging.error('\nERROR: The CRT dimensions file is empty\n')
            sys.exit()
        crt_dimen_break_i_list = [
            i for i, x in enumerate(crt_dimen_lines) if x == break_str
        ]
        for i in crt_dimen_break_i_list:
            if crt_dimen_lines[i + 1] not in ['ncascade']:
                continue
            logging.info('  {} = {}'.format(crt_dimen_lines[i + 1],
                                            crt_dimen_lines[i + 2]))
            dimen_sizes[crt_dimen_lines[i + 1]] = int(crt_dimen_lines[i + 2])
        del crt_dimen_lines, crt_dimen_break_i_list

    # Read in CRT groundwater cascade dimensions
    if dimen_sizes['ncascdgw'].lower() == 'calculated':
        logging.info('\nReading CRT groundwater cascade dimensions')
        logging.debug('  {}'.format(crt_gw_dimension_path))
        with open(crt_gw_dimension_path, 'r') as input_f:
            crt_dimen_lines = [line.strip() for line in input_f.readlines()]
        input_f.close()
        if not crt_dimen_lines:
            logging.error(
                '\nERROR: The CRT groundwater dimensions file is empty\n')
            sys.exit()
        crt_dimen_break_i_list = [
            i for i, x in enumerate(crt_dimen_lines) if x == break_str
        ]
        for i in crt_dimen_break_i_list:
            if crt_dimen_lines[i + 1] not in ['ncascdgw']:
                continue
            logging.info('  {} = {}'.format(crt_dimen_lines[i + 1],
                                            crt_dimen_lines[i + 2]))
            dimen_sizes[crt_dimen_lines[i + 1]] = int(crt_dimen_lines[i + 2])
        del crt_dimen_lines, crt_dimen_break_i_list

    # Set CONFIG file dimension values
    config_file_dimensions = [
        d_name for d_name, d_size in sorted(dimen_sizes.items())
        if type(d_size) is str and d_size.lower() == 'config_file'
    ]
    if config_file_dimensions:
        logging.info('Reading configuration file dimensions')
        for dimen_name in config_file_dimensions:
            logging.info('  {}'.format(dimen_name))
            try:
                dimen_sizes[dimen_name] = inputs_cfg.getint(
                    'INPUTS', dimen_name)
            except ConfigParser.NoOptionError:
                logging.error(
                    '  Dimension set to "config_file" in {} but not found in '
                    'config file, exiting'.format(
                        os.path.basename(prms_dimen_csv_path)))

    # Link HRU fishnet field names to parameter names in '.param'
    param_names = dict()
    param_files = dict()
    param_dimen_counts = dict()
    param_dimen_names = dict()
    param_value_counts = dict()
    param_types = dict()
    param_defaults = dict()
    param_values = defaultdict(dict)

    # Read in parameters from CSV
    logging.info('\nReading parameters CSV')
    with open(prms_param_csv_path, 'r') as input_f:
        param_lines = input_f.readlines()
    input_f.close()
    param_lines = [l.strip().split(',') for l in param_lines]
    header = param_lines[0]
    for line in param_lines[1:]:
        # Get parameters from CSV line
        param_name = line[header.index('NAME')]
        logging.debug('  {}'.format(param_name))
        # This assumes multiple dimensions are separated by semicolon
        dimen_names = line[header.index('DIMENSION_NAMES')].split(';')

        # What should the default parameter file name be if not set?
        if single_param_file_flag:
            param_file = os.path.join(prms_parameter_ws,
                                      single_param_file_name)
        elif 'PARAM_FILE' not in header:
            param_file = os.path.join(prms_parameter_ws, 'prms_inputs.param')
            logging.info('  PARAM_FILE field not in parameters CSV\n'
                         '  Defaulting to {}'.format(param_file))
        elif line[header.index('PARAM_FILE')] == '':
            param_file = os.path.join(prms_parameter_ws, 'prms_inputs.param')
            logging.info('  PARAM_FILE value not set for parameter: {}\n'
                         '  Defaulting to {}'.format(param_name, param_file))
        else:
            param_file = os.path.join(
                prms_parameter_ws, line[header.index('PARAM_FILE')] + '.param')

        # Check that parameter type is 1, 2, 3, or 4
        param_type = int(line[header.index('TYPE')])
        if param_type not in [1, 2, 3, 4]:
            logging.error('\nERROR: Parameter type {} is invalid'
                          '\nERROR: {}'.format(param_type, line))
            sys.exit()

        # This will initially read defaults in as a list
        param_default = line[header.index('DEFAULT_VALUE'):]

        # Removing empty strings avoids checking ints/floats
        param_default = [l for l in param_default if l]

        # For empty lists, set to none
        if not param_default:
            param_default = None
        # For single value lists, get first value
        # Check that param_default is a number or field name
        elif len(param_default) == 1:
            param_default = param_default[0]
            if isfloat(param_default) and param_type == 1:
                param_default = int(param_default)
            elif isfloat(param_default) and param_type in [2, 3]:
                param_default = float(param_default)
            elif param_default.lower() in [
                    'calculated', 'config_file', 'crt_file'
            ]:
                pass
            elif arcpy.ListFields(hru.polygon_path, param_default):
                pass
            else:
                logging.error('\nERROR: Default value {} was not parsed'
                              '\nERROR: {}'.format(param_default, line))
                sys.exit()
        # For multi-value lists, convert values to int/float
        elif len(param_default) >= 2:
            if param_type == 1:
                param_default = map(int, param_default)
            elif param_type in [2, 3]:
                param_default = map(float, param_default)
            else:
                logging.error('\nERROR: Default value {} was not parsed'
                              '\nERROR: {}'.format(param_default, line))
                sys.exit()

        # Check that dimension names are valid
        for dimen_name in dimen_names:
            if dimen_name not in dimen_sizes.keys():
                logging.error('\nERROR: The dimension {} is not set in the '
                              'dimension CSV file'.format(dimen_name))
                sys.exit()

        # Calculate number of dimensions
        dimen_count = str(len(dimen_names))

        # Calculate number of values
        values_count = prod(
            [int(dimen_sizes[dn]) for dn in dimen_names if dimen_sizes[dn]])

        # Write parameter to dictionaries
        param_names[param_name] = param_name
        param_files[param_name] = param_file
        param_dimen_counts[param_name] = dimen_count
        param_dimen_names[param_name] = dimen_names
        param_value_counts[param_name] = values_count
        param_types[param_name] = param_type
        param_defaults[param_name] = param_default

    # Apply default values to full dimension
    logging.info('\nSetting static parameters from defaults')
    for param_name, param_default in param_defaults.items():
        param_value_count = param_value_counts[param_name]
        # Skip if not set
        if param_default is None:
            continue
        # Skip if still a string (field names)
        elif type(param_default) is str:
            continue
        # For float/int, apply default across dimension size
        elif type(param_default) is float or type(param_default) is int:
            for i in range(param_value_count):
                param_values[param_name][i] = param_default
        # For lists of floats, match up one-to-one for now
        elif len(param_default) == param_value_count:
            for i in range(param_value_count):
                param_values[param_name][i] = param_default[i]
        else:
            logging.error('\nERROR: The default value(s) ({0}) could not be '
                          'broadcast to the dimension length ({1})'.format(
                              param_default, param_value_count))
            sys.exit()

    # Set CONFIG file parameter values
    config_file_parameters = [
        p_name for p_name, p_value in sorted(param_defaults.items())
        if type(p_value) is str and p_value.lower() == 'config_file'
    ]
    if config_file_parameters:
        logging.info('Reading configuration file parameters')
        for param_name in config_file_parameters:
            logging.info('  {}'.format(param_name))
            try:
                values = inputs_cfg.get('INPUTS', param_name)
            except ConfigParser.NoOptionError:
                logging.error(
                    '  Parameter set to "config_file" in {} but not found in '
                    'config file, exiting'.format(
                        os.path.basename(prms_dimen_csv_path)))

            # Convert comma separate strings to lists
            param_values[param_name] = {
                i: v
                for i, v in enumerate(values.split(','))
            }

            # Convert the strings to the appropriate type
            if param_types[param_name] == 1:
                param_values[param_name] = {
                    k: int(v)
                    for k, v in param_values[param_name].items()
                }
            elif param_types[param_name] in [2, 3]:
                param_values[param_name] = {
                    k: float(v)
                    for k, v in param_values[param_name].items()
                }

            # Try and honor dimension value from CSV
            # Repeat values if actual value count doesn't match expected count
            #   (from dimensions)
            # For now, only apply to INI parameters with a single value
            #   and dimensions greater than 1
            param_value_count = param_value_counts[param_name]
            if ((len(param_values[param_name]) != param_value_count)
                    and (len(param_values[param_name]) == 1)
                    and (param_value_count > 1)):
                value = param_values[param_name].copy()
                param_values[param_name] = {}
                for i in range(param_value_count):
                    param_values[param_name][i] = value[0]

    # Read in HRU parameter data from fishnet polygon
    logging.info('\nReading in variable parameters from fishnet')
    param_fields = {
        k: v
        for k, v in param_defaults.items()
        if (type(v) is str
            and v.lower() not in ['calculated', 'config_file', 'crt_file'])
    }
    value_fields = param_fields.values()

    # Use HRU_ID to uniquely identify each cell
    if hru.id_field not in value_fields:
        value_fields.append(hru.id_field)
    hru_id_i = value_fields.index(hru.id_field)

    # Read in each cell parameter value
    with arcpy.da.SearchCursor(hru.polygon_path, value_fields) as s_cursor:
        for row in s_cursor:
            for field_i, (param, field) in enumerate(param_fields.items()):
                if param_types[param] == 1:
                    param_values[param][row[hru_id_i]] = int(row[field_i])
                elif param_types[param] in [2, 3]:
                    param_values[param][row[hru_id_i]] = float(row[field_i])
                elif param_types[param] == 4:
                    param_values[param][row[hru_id_i]] = row[field_i]
                # param_values[param][row[hru_id_i]] = row[field_i]

    # Calculate number of columns
    with arcpy.da.SearchCursor(hru.polygon_path,
                               (hru.id_field, hru.col_field)) as s_cursor:
        ncol = len(list(set([int(row[1]) for row in s_cursor])))

    # # DEADBEEF - Per Rich this is not needed anymore
    # # The following will override the parameter CSV values
    # # Calculate basin_area from active cells (land and lake)
    # logging.info('\nCalculating basin area')
    # param_names['basin_area'] = 'basin_area'
    # param_dimen_counts['basin_area'] = 1
    # param_dimen_names['basin_area'] = ['one']
    # param_value_counts['basin_area'] = dimen_sizes['one']
    # param_types['basin_area'] = 2
    # value_fields = (hru.id_field, hru.type_field, hru.area_field)
    # with arcpy.da.SearchCursor(hru.polygon_path, value_fields) as s_cursor:
    #     param_values['basin_area'][0] = sum(
    #         [float(row[2]) for row in s_cursor if int(row[1]) >= 1])
    # logging.info('  basin_area = {} acres'.format(
    #     param_values['basin_area'][0]))

    # Convert DEM_ADJ units (if necessary)
    if elev_unit_scalar != 1.0:
        logging.info('\nScaling DEM_ADJ units')
        logging.info('  DEM Units:  {}'.format(dem_units))
        logging.info('  Elev Units: {}'.format(elev_units))
        logging.info('  Multiplier: {}'.format(elev_unit_scalar))
        param_values['hru_elev'] = {
            k: v * elev_unit_scalar
            for k, v in param_values['hru_elev'].items()
        }

    # Calculate mean monthly maximum temperature for all active cells
    logging.info('\nCalculating tmax_index')
    logging.info('  Converting Celsius to Farenheit')
    param_names['tmax_index'] = 'tmax_index'
    param_dimen_counts['tmax_index'] = 1
    param_dimen_names['tmax_index'] = ['nmonths']
    param_value_counts['tmax_index'] = int(dimen_sizes['nmonths'])
    param_types['tmax_index'] = 2
    tmax_field_list = ['TMAX_{:02d}'.format(m) for m in range(1, 13)]
    for i, tmax_field in enumerate(tmax_field_list):
        tmax_values = [
            row[1] for row in arcpy.da.SearchCursor(
                hru.polygon_path, (hru.type_field, tmax_field),
                where_clause='"{}" >= 1'.format(hru.type_field))
        ]
        tmax_c = sum(tmax_values) / len(tmax_values)
        tmax_f = 1.8 * tmax_c + 32
        param_values['tmax_index'][i] = tmax_f
        logging.info('  {} = {}'.format(tmax_field,
                                        param_values['tmax_index'][i]))
        del tmax_values

    logging.info('\nCalculating tmax_adj/tmin_adj')
    param_names['tmax_adj'] = 'tmax_adj'
    param_names['tmin_adj'] = 'tmin_adj'
    param_types['tmax_adj'] = 2
    param_types['tmin_adj'] = 2
    if temp_calc_method in ['ZONES']:
        param_dimen_counts['tmax_adj'] = 2
        param_dimen_counts['tmin_adj'] = 2
        param_dimen_names['tmax_adj'] = ['nhru', 'nmonths']
        param_dimen_names['tmin_adj'] = ['nhru', 'nmonths']
        param_value_counts['tmax_adj'] = 12 * fishnet_count
        param_value_counts['tmin_adj'] = 12 * fishnet_count

        # Read the Tmax/Tmin adjust values from the shapefile
        # This could probably be simplified to a single search cursor pass
        tmax_adj_values = []
        tmin_adj_values = []
        tmax_adj_field_list = [
            'TMX_ADJ_{:02d}'.format(m) for m in range(1, 13)
        ]
        tmin_adj_field_list = [
            'TMN_ADJ_{:02d}'.format(m) for m in range(1, 13)
        ]
        for i, tmax_adj_field in enumerate(tmax_adj_field_list):
            tmax_adj_values.extend([
                float(row[1]) for row in sorted(
                    arcpy.da.SearchCursor(hru.polygon_path, (hru.id_field,
                                                             tmax_adj_field)))
            ])
        for i, tmin_adj_field in enumerate(tmin_adj_field_list):
            tmin_adj_values.extend([
                float(row[1]) for row in sorted(
                    arcpy.da.SearchCursor(hru.polygon_path, (hru.id_field,
                                                             tmin_adj_field)))
            ])
        for i, value in enumerate(tmax_adj_values):
            param_values['tmax_adj'][i] = value
        for i, value in enumerate(tmin_adj_values):
            param_values['tmin_adj'][i] = value
        del tmax_adj_values, tmin_adj_values

        # # This needs to be tested/compared with values from the above approach
        # # Process the tmax/tmin values in one pass of the search cursor
        # fields = [hru.id_field] + tmax_adj_field_list + tmin_adj_field_list
        # with arcpy.da.SearchCursor(hru.polygon_path, fields) as search_c:
        #     for r_i, row in enumerate(sorted(search_c)):
        #         for f_i in range(12):
        #             param_values['tmax_adj'][r_i * f_i] = float(row[f_i + 1])
        #             param_values['tmin_adj'][r_i * f_i] = float(row[f_i + 13])
        #         # for f_i in range(len(tmax_adj_field_list):

        # Set/override hru_tsta using HRU_TSTA field
        param_names['hru_tsta'] = 'hru_tsta'
        param_dimen_counts['hru_tsta'] = 1
        param_dimen_names['hru_tsta'] = ['nhru']
        param_value_counts['hru_tsta'] = fishnet_count
        param_types['hru_tsta'] = 1
        fields = (hru.id_field, 'HRU_TSTA')
        with arcpy.da.SearchCursor(hru.polygon_path, fields) as search_c:
            for row_i, row in enumerate(sorted(search_c)):
                param_values['hru_tsta'][row_i] = int(row[1])

        # DEADBEEF - Do these parameters need to be set or overridden
        # ntemp, elev_units, basin_tsta, hru_tlaps, tsta_elev

    elif temp_calc_method in ['1STA', 'LAPSE']:
        # Set the tmax_adj/tmin_adj dimensions
        param_dimen_counts['tmax_adj'] = 1
        param_dimen_counts['tmin_adj'] = 1
        param_dimen_names['tmax_adj'] = ['nhru']
        param_dimen_names['tmin_adj'] = ['nhru']
        param_value_counts['tmax_adj'] = fishnet_count
        param_value_counts['tmin_adj'] = fishnet_count

        # Read the tmax_adj/tmin_adj parameter values from the shapefile
        fields = (hru.id_field, 'TMAX_ADJ', 'TMIN_ADJ')
        with arcpy.da.SearchCursor(hru.polygon_path, fields) as search_c:
            for row_i, row in enumerate(sorted(search_c)):
                param_values['tmax_adj'][row_i] = float(row[1])
                param_values['tmin_adj'][row_i] = float(row[2])

    logging.info('\nCalculating rain_adj/snow_adj')
    ratio_field_list = ['PPT_RT_{:02d}'.format(m) for m in range(1, 13)]
    param_names['rain_adj'] = 'rain_adj'
    param_dimen_counts['rain_adj'] = 2
    param_dimen_names['rain_adj'] = ['nhru', 'nmonths']
    param_value_counts['rain_adj'] = 12 * fishnet_count
    param_types['rain_adj'] = 2

    param_names['snow_adj'] = 'snow_adj'
    param_dimen_counts['snow_adj'] = 2
    param_dimen_names['snow_adj'] = ['nhru', 'nmonths']
    param_value_counts['snow_adj'] = 12 * fishnet_count
    param_types['snow_adj'] = 2

    ratio_values = []
    for i, ratio_field in enumerate(ratio_field_list):
        ratio_values.extend([
            float(row[1]) for row in sorted(
                arcpy.da.SearchCursor(hru.polygon_path, (hru.id_field,
                                                         ratio_field)))
        ])
    for i, value in enumerate(ratio_values):
        param_values['rain_adj'][i] = value
        param_values['snow_adj'][i] = value
    del ratio_values

    logging.info('\nCalculating subbasin_down')
    param_names['subbasin_down'] = 'subbasin_down'
    param_dimen_counts['subbasin_down'] = 1
    param_dimen_names['subbasin_down'] = ['nsub']
    param_value_counts['subbasin_down'] = dimen_sizes['nsub']
    param_types['subbasin_down'] = 1
    # Get list of subbasins and downstream cell for each stream/lake cell
    # Downstream is calculated from flow direction
    # logging.info('Cell out-flow dictionary')
    cell_dict = dict()
    fields = [
        hru.type_field, hru.krch_field, hru.lake_id_field, hru.subbasin_field,
        hru.flow_dir_field, hru.col_field, hru.row_field, hru.id_field
    ]
    for row in arcpy.da.SearchCursor(hru.polygon_path, fields):
        # Skip inactive cells
        if int(row[0]) == 0:
            continue
        # Skip non-lake and non-stream cells
        elif (int(row[1]) == 0 and int(row[2]) == 0):
            continue
        # Read in parameters
        cell = (int(row[5]), int(row[6]))
        # support.next_row_col(FLOW_DIR, CELL)
        # HRU_ID, SUBBASIN, NEXT_CELL
        cell_dict[cell] = [
            int(row[7]),
            int(row[3]),
            support.next_row_col(int(row[4]), cell)
        ]
        del cell

    # Get subset of cells if subbasin != next_subbasin
    subbasin_list = []
    # CELL, (HRU_ID, SUBBASIN, NEXT_CELL)
    # for cell, row in cell_dict.items():
    for cell, (hru_id, subbasin, next_cell) in cell_dict.items():
        # Skip cells that are already subbasin 0 (inactive?)
        # If next cell isn't in list, assume next cell is out of the model
        #   and set exit gauge subbasin to 0
        # If the subbasin of the current cell doesn't match the subbasin
        #   of the next cell, save the down subbasin
        if subbasin == 0:
            continue
        elif next_cell not in cell_dict.keys():
            if [subbasin, 0] not in subbasin_list:
                subbasin_list.append([subbasin, 0])
        elif subbasin != cell_dict[next_cell][1]:
            subbasin_list.append([subbasin, cell_dict[next_cell][1]])
    for i, (subbasin, subbasin_down) in enumerate(sorted(subbasin_list)):
        param_values['subbasin_down'][i] = subbasin_down
        logging.debug('  {}'.format(param_values['subbasin_down'][i]))
    del subbasin_list

    # Switch SWALE points back to hru_type 1 or 2
    logging.info('\nResetting SWALE point HRU_TYPE')
    fields = [hru.type_field, hru.id_field, hru.lake_id_field]
    for row in arcpy.da.SearchCursor(hru.polygon_path, fields):
        # Skip inactive cells
        if int(row[0]) != 3:
            continue
        elif int(row[2]) > 0:
            param_values['hru_type'][row[1]] = 2
        else:
            param_values['hru_type'][row[1]] = 1

    # # DEADBEEF - lake_hru is not used in PRMS 3.0.X or gsflow
    # #   It is used in PRMS 4.0 though
    # # lake_hru parameter
    # logging.info('\nCalculating LAKE_HRU from HRU_ID for all lake HRU\'s')
    # param_names['lake_hru'] = 'lake_hru'
    # param_dimen_counts['lake_hru'] = 1
    # param_dimen_names['lake_hru'] = ['nlake']
    # param_value_counts['lake_hru'] = dimen_sizes['nlake']
    # param_types['lake_hru'] = 1
    # lake_hru_id_list = [
    #    row[1] for row in arcpy.da.SearchCursor(
    #        hru.polygon_path, (hru.type_field, hru.id_field))
    #    if int(row[0]) == 2]
    # for i,lake_hru_id in enumerate(sorted(lake_hru_id_list)):
    #    # logging.debug('  {} {}'.format(i, lake_hru_id))
    #    param_values['lake_hru'][i] = lake_hru_id

    # Read in CRT parameters
    logging.info('\nReading CRT parameters')
    with open(crt_parameter_path, 'r') as input_f:
        crt_param_lines = [line.strip() for line in input_f.readlines()]
    input_f.close()
    # Using enumerate iterator to get .next method
    crt_param_enumerate = enumerate(crt_param_lines)
    for crt_param_line in crt_param_enumerate:
        if crt_param_line[1] == break_str:
            # Skip break string
            crt_param_line = crt_param_enumerate.next()
            # Read parameter name and get next line
            param_name = crt_param_line[1]
            param_names[param_name] = param_name
            crt_param_line = crt_param_enumerate.next()
            # Read dimension count and get next line
            param_dimen_counts[param_name] = int(crt_param_line[1])
            crt_param_line = crt_param_enumerate.next()
            # For each dimen (based on count) read in dimension name
            param_dimen_names[param_name] = []
            for dimen_i in range(param_dimen_counts[param_name]):
                param_dimen_names[param_name].append(crt_param_line[1])
                crt_param_line = crt_param_enumerate.next()
            # Read in number of parameter values
            param_value_counts[param_name] = int(crt_param_line[1])
            crt_param_line = crt_param_enumerate.next()
            # Read in parameter type
            param_types[param_name] = int(crt_param_line[1])
            # Read in parameter values
            # Get next in loop is place intentionally
            # Placing  after getting the value causes it to skip next break
            for i in range(param_value_counts[param_name]):
                crt_param_line = crt_param_enumerate.next()
                if param_types[param_name] == 1:
                    param_values[param_name][i] = int(crt_param_line[1])
                if param_types[param_name] in [2, 3]:
                    param_values[param_name][i] = float(crt_param_line[1])
                if param_types[param_name] == 4:
                    param_values[param_name][i] = crt_param_line[1]

    # Read in CRT groundwater parameters
    logging.info('Reading CRT groundwater parameters')
    with open(crt_gw_parameter_path, 'r') as input_f:
        crt_param_lines = [line.strip() for line in input_f.readlines()]
    input_f.close()
    # Using enumerate iterator to get .next method
    crt_param_enumerate = enumerate(crt_param_lines)
    for crt_param_line in crt_param_enumerate:
        if crt_param_line[1] == break_str:
            # Skip break string
            crt_param_line = crt_param_enumerate.next()
            # Read parameter name and get next line
            param_name = crt_param_line[1]
            param_names[param_name] = param_name
            crt_param_line = crt_param_enumerate.next()
            # Read dimension count and get next line
            param_dimen_counts[param_name] = int(crt_param_line[1])
            crt_param_line = crt_param_enumerate.next()
            # For each dimen (based on count) read in dimension name
            param_dimen_names[param_name] = []
            for dimen_i in range(param_dimen_counts[param_name]):
                param_dimen_names[param_name].append(crt_param_line[1])
                crt_param_line = crt_param_enumerate.next()
            # Read in number of parameter values
            param_value_counts[param_name] = int(crt_param_line[1])
            crt_param_line = crt_param_enumerate.next()
            # Read in parameter type
            param_types[param_name] = int(crt_param_line[1])
            # Read in parameter values
            # Get next in loop is place intentionally
            # Placing  after getting the value causes it to skip next break
            for i in range(param_value_counts[param_name]):
                crt_param_line = crt_param_enumerate.next()
                if param_types[param_name] == 1:
                    param_values[param_name][i] = int(crt_param_line[1])
                if param_types[param_name] in [2, 3]:
                    param_values[param_name][i] = float(crt_param_line[1])
                if param_types[param_name] == 4:
                    param_values[param_name][i] = crt_param_line[1]
    del crt_param_enumerate, crt_param_lines

    # # Add lake HRU's to groundwater cascades
    # logging.info('Modifying CRT groundwater parameters for all lake HRU\'s')
    # logging.info('  gw_up_id = HRU_ID (lake)')
    # logging.info('  gw_down_id = 0')
    # # logging.info('  gw_strmseg_down_id = OUTSEG')
    # logging.info('  gw_strmseg_down_id = 2')
    # logging.info('  gw_pct_up = 1')
    # field_list = [hru.type_field, hru.id_field, hru.outseg_field,
    #              hru.outflow_field]
    # lake_hru_id_dict = dict([
    #    (row[1], row[2])
    #    for row in arcpy.da.SearchCursor(hru.polygon_path, field_list)
    #    if int(row[0]) == 2 and int(row[3]) == 0])
    # for lake_hru_id, outseg in sorted(lake_hru_id_dict.items()):
    #    # if lake_hru_id == 9128:
    #        # print lake_hru_id, outseg
    #    # raw_input('ENTER')
    #    i = dimen_sizes['ncascdgw']
    #    dimen_sizes['ncascdgw'] += 1
    #    param_values['gw_up_id'][i] = lake_hru_id
    #    param_values['gw_down_id'][i] = 0
    #    # DEADBEEF - PRMS didn't like when set to OUTSEG, but 2 worked?
    #    # param_values['gw_strmseg_down_id'][i] = outseg
    #    param_values['gw_strmseg_down_id'][i] = 2
    #    # DEADBEEF - Trying 0
    #    # param_values['gw_strmseg_down_id'][i] = 0
    #    param_values['gw_pct_up'][i] = 1.00
    #    # print param_values['gw_up_id'][i]
    #    # print param_values['gw_down_id'][i]
    #    # print param_values['gw_strmseg_down_id'][i]
    #    # print param_values['gw_pct_up'][i]
    # param_value_counts['gw_up_id'] = int(dimen_sizes['ncascdgw'])
    # param_value_counts['gw_down_id'] = int(dimen_sizes['ncascdgw'])
    # param_value_counts['gw_strmseg_down_id'] = int(dimen_sizes['ncascdgw'])
    # param_value_counts['gw_pct_up'] = int(dimen_sizes['ncascdgw'])
    # logging.info('  ncascade = {}'.format(dimen_sizes['ncascade']))
    # logging.info('  ncascdgw = {}'.format(dimen_sizes['ncascdgw']))
    # # raw_input('ENTER')

    # DEADBEEF
    # Override -999 values
    # logging.info('\nChanging SOIL_MOIST_MAX nodata (-999) to 2')
    # for i,v in param_values['soil_moist_max'].items():
    #    if v == -999: param_values['soil_moist_max'][i] = 2
    # logging.info('Changing SOIL_RECHR_MAX nodata (-999) to 1')
    # for i,v in param_values['soil_rechr_max'].items():
    #    if v == -999: param_values['soil_rechr_max'][i] = 1
    # logging.info('Changing SAT_THRESHOLD nodata (-999) to 4')
    # for i,v in param_values['sat_threshold'].items():
    #    if v == -999: param_values['sat_threshold'][i] = 4

    # Override negative values
    # logging.info('Changing negative SSR2GW_RATE (< 0) to 0.1 (PRMS default)')
    # for i,v in param_values['ssr2gw_rate'].items():
    #    if v < 0: param_values['ssr2gw_rate'][i] = 0.1
    # raw_input('ENTER')

    # Write dimensions/parameters to PRMS param file
    logging.info('\nWriting parameter file(s)')
    prms_parameter_paths = sorted(
        list(set(param_files.values() + dimen_files.values())))

    for prms_parameter_path in prms_parameter_paths:
        logging.info('{}'.format(prms_parameter_path))
        if os.path.isfile(prms_parameter_path):
            logging.debug('  Removing existing file')
            os.remove(prms_parameter_path)
        # Get parameters and dimensions for each file
        param_name_list = sorted([
            p_name for p_name, p_file in param_files.items()
            if p_file == prms_parameter_path
        ])
        dimen_name_list = sorted([
            d_name for d_name, d_file in dimen_files.items()
            if d_file == prms_parameter_path
        ])

        with open(prms_parameter_path, 'w') as output_f:
            output_f.write(file_header_str + '\n')

            # Write dimensions
            if dimen_name_list:
                output_f.write(dimen_header_str + '\n')
                logging.debug('  Set dimensions')
            for dimen_name in dimen_name_list:
                try:
                    dimen_size = dimen_sizes[dimen_name]
                except KeyError:
                    continue
                if (type(dimen_size) is str
                        and dimen_size.lower() in ['calculated']):
                    logging.debug(
                        '    Dimension {} not calculated'.format(dimen_size))
                    continue
                logging.debug('    {}'.format(dimen_name))
                output_f.write(break_str + '\n')
                output_f.write(dimen_name + '\n')
                output_f.write(str(dimen_size) + '\n')

            # Then write set parameters
            if param_name_list:
                output_f.write(param_header_str + '\n')
                logging.debug('  Set parameters')
            for param_name in param_name_list:
                if param_name not in param_values.keys():
                    # logging.debug(param_name)
                    continue
                logging.debug('    {}'.format(param_name))

                output_f.write(break_str + '\n')
                output_f.write('{}\n'.format(param_name))
                output_f.write('{}\n'.format(param_dimen_counts[param_name]))
                for dimen_name in param_dimen_names[param_name]:
                    output_f.write(dimen_name + '\n')
                output_f.write(str(param_value_counts[param_name]) + '\n')
                param_type = param_types[param_name]
                output_f.write(str(param_type) + '\n')

                # Get list of values sorted by parameter name
                sorted_param_values = [
                    v for i, v in sorted(param_values[param_name].items())
                ]

                # If dimension is "nhru", write values as an array.
                # Write blocks of values for each row
                if ('nhru' in param_dimen_names[param_name]
                        and not param_column_flag):
                    n = ncol
                else:
                    n = 1

                for i in range(0, len(sorted_param_values), n):
                    values_str = ' '.join([
                        param_formats[param_type].format(v)
                        for v in sorted_param_values[i:i + n]
                    ])
                    output_f.write(values_str + '\n')

        # Close file
        output_f.close()