示例#1
0
def daymet_parameters(
    config_path,
    data_name='PPT',
    overwrite_flag=False,
    debug_flag=False,
):
    """Calculate GSFLOW DAYMET Parameters

    Args:
        config_file: Project config file path
        data_name (str): DAYMET data type (ALL, PPT, TMAX, TMIN, etc.)
        ovewrite_flag (bool): if True, overwrite existing files
        debug_flag (bool): if True, enable debug level logging

    Returns:
        None
    """

    # Initialize hru_parameters class
    hru = support.HRUParameters(config_path)

    # Open input parameter config file
    inputs_cfg = ConfigParser.ConfigParser()
    try:
        inputs_cfg.readfp(open(config_path))
    except Exception as e:
        logging.error('\nERROR: Config file could not be read, '
                      'is not an input file, or does not exist\n'
                      '  config_file = {}\n'
                      '  Exception: {}\n'.format(config_path, e))
        sys.exit()

    # Log DEBUG to file
    log_file_name = 'daymet_normals_log.txt'
    log_console = logging.FileHandler(filename=os.path.join(
        hru.log_ws, log_file_name),
                                      mode='w')
    log_console.setLevel(logging.DEBUG)
    log_console.setFormatter(logging.Formatter('%(message)s'))
    logging.getLogger('').addHandler(log_console)
    logging.info('\nGSFLOW DAYMET Parameters')

    # DAYMET
    daymet_ws = inputs_cfg.get('INPUTS', 'daymet_folder')
    daymet_proj_method = inputs_cfg.get('INPUTS', 'prism_projection_method')
    daymet_cs = inputs_cfg.getint('INPUTS', 'prism_cellsize')
    calc_jh_coef_flag = inputs_cfg.getboolean('INPUTS',
                                              'calc_prism_jh_coef_flag')

    # Check input paths
    if not arcpy.Exists(hru.polygon_path):
        logging.error('\nERROR: Fishnet ({}) does not exist'.format(
            hru.polygon_path))
        sys.exit()
    # Check that DAYMET folder is valid
    if not os.path.isdir(daymet_ws):
        logging.error(
            '\nERROR: DAYMET folder ({}) does not exist'.format(daymet_ws))
        sys.exit()
    proj_method_list = ['BILINEAR', 'CUBIC', 'NEAREST']
    if daymet_proj_method.upper() not in proj_method_list:
        logging.error('\nERROR: DAYMET projection method must be: {}'.format(
            ', '.join(proj_method_list)))
        sys.exit()
    logging.debug('  Projection method:    {}'.format(
        daymet_proj_method.upper()))

    # Check other inputs
    if daymet_cs <= 0:
        logging.error('\nERROR: DAYMET cellsize must be greater than 0\n')
        sys.exit()

    # Set ArcGIS environment variables
    arcpy.CheckOutExtension('Spatial')
    env.overwriteOutput = True
    env.pyramid = 'PYRAMIDS 0'
    env.workspace = hru.param_ws
    env.scratchWorkspace = hru.scratch_ws

    # DAYMET data names
    if data_name == 'ALL':
        data_name_list = ['PPT', 'TMAX', 'TMIN']
    else:
        data_name_list = [data_name]

    # Set month list
    month_list = ['{:02d}'.format(m) for m in range(1, 13)]
    # month_list.extend(['annual'])

    # Check fields
    logging.info('\nAdding DAYMET fields if necessary')
    for data_name in data_name_list:
        for month in month_list:
            support.add_field_func(hru.polygon_path,
                                   '{}_{}'.format(data_name, month), 'DOUBLE')

    # Process each DAYMET data type
    logging.info('\nProjecting/clipping DAYMET mean monthly rasters')
    for data_name in data_name_list:
        logging.info('\n{}'.format(data_name))
        daymet_normal_re = re.compile(
            'daymet_(?P<type>%s)_30yr_normal_(?P<month>\d{2}).img$' %
            data_name, re.IGNORECASE)

        # Search all files & subfolders in DAYMET folder
        #   for images that match data type
        input_raster_dict = dict()
        for root, dirs, files in os.walk(daymet_ws):
            for file_name in files:
                daymet_normal_match = daymet_normal_re.match(file_name)
                if daymet_normal_match:
                    month_str = daymet_normal_match.group('month')
                    input_raster_dict[month_str] = os.path.join(
                        daymet_ws, root, file_name)
        if not input_raster_dict:
            logging.error(
                ('\nERROR: No DAYMET rasters were found matching the ' +
                 'following pattern:\n  {}\n\n').format(
                     daymet_normal_re.pattern))
            logging.error()
            sys.exit()

        # DAYMET input data workspace
        # input_ws = os.path.join(daymet_ws, data_name.lower())
        # if not os.path.isdir(input_ws):
        #    logging.error('\nERROR: The DAYMET {} folder does not exist'.format(
        #        data_name.lower()))
        #    sys.exit()

        # DAYMET output data workspace
        output_ws = os.path.join(hru.param_ws, data_name.lower() + '_rasters')
        if not os.path.isdir(output_ws):
            os.mkdir(output_ws)

        # Remove all non year/month rasters in DAYMET temp folder
        logging.info('  Removing existing DAYMET files')
        for item in os.listdir(output_ws):
            if daymet_normal_re.match(item):
                os.remove(os.path.join(output_ws, item))

        # Extract, project/resample, clip
        # Process images by month
        zs_daymet_dict = dict()
        # env.extent = hru.extent
        for month in month_list:
            logging.info('  Month: {}'.format(month))

            # Projected/clipped DAYMET raster
            input_raster = input_raster_dict[month]
            # input_name = 'daymet_{}_30yr_normal_800mM2_{}_bil.bil'.format(
            #    data_name.lower(), input_month)
            # input_raster = os.path.join(input_ws, input_name)
            output_name = 'daymet_{}_normal_{}.img'.format(
                data_name.lower(), month)
            output_raster = os.path.join(output_ws, output_name)

            # Set preferred transforms
            input_sr = arcpy.sa.Raster(input_raster).spatialReference
            transform_str = support.transform_func(hru.sr, input_sr)
            if transform_str:
                logging.debug('  Transform: {}'.format(transform_str))

            # Project DAYMET rasters to HRU coordinate system
            # DEADBEEF - Arc10.2 ProjectRaster does not extent
            support.project_raster_func(input_raster, output_raster, hru.sr,
                                        daymet_proj_method.upper(), daymet_cs,
                                        transform_str,
                                        '{} {}'.format(hru.ref_x, hru.ref_y),
                                        input_sr, hru)
            # arcpy.ProjectRaster_management(
            #    input_raster, output_raster, hru.sr,
            #    daymet_proj_method.upper(), daymet_cs, transform_str,
            #    '{} {}'.format(hru.ref_x, hru.ref_y),
            #    input_sr)

            # Save parameters for calculating zonal stats
            zs_field = '{}_{}'.format(data_name, month)
            zs_daymet_dict[zs_field] = [output_raster, 'MEAN']

            # Cleanup
            del input_raster, output_raster, output_name
            del input_sr, transform_str, zs_field

        # Cleanup
        # arcpy.ClearEnvironment('extent')

        # Calculate zonal statistics
        logging.info('\nCalculating DAYMET zonal statistics')
        support.zonal_stats_func(zs_daymet_dict, hru.polygon_path,
                                 hru.point_path, hru)
        del zs_daymet_dict
示例#2
0
def impervious_parameters(config_path, overwrite_flag=False, debug_flag=False):
    """Calculate GSFLOW Impervious Parameters

    Args:
        config_file (str): Project config file path
        ovewrite_flag (bool): if True, overwrite existing files
        debug_flag (bool): if True, enable debug level logging

    Returns:
        None
    """

    # Initialize hru_parameters class
    hru = support.HRUParameters(config_path)

    # Open input parameter config file
    inputs_cfg = ConfigParser.ConfigParser()
    try:
        inputs_cfg.readfp(open(config_path))
    except Exception as e:
        logging.error('\nERROR: Config file could not be read, '
                      'is not an input file, or does not exist\n'
                      '  config_file = {}\n'
                      '  Exception: {}\n'.format(config_path, e))
        sys.exit()

    # Log DEBUG to file
    log_file_name = 'impervious_parameters_log.txt'
    log_console = logging.FileHandler(filename=os.path.join(
        hru.log_ws, log_file_name),
                                      mode='w')
    log_console.setLevel(logging.DEBUG)
    log_console.setFormatter(logging.Formatter('%(message)s'))
    logging.getLogger('').addHandler(log_console)
    logging.info('\nGSFLOW Impervious Parameters')

    #
    imperv_orig_path = inputs_cfg.get('INPUTS', 'impervious_orig_path')
    # imperv_proj_method = inputs_cfg.get('INPUTS', 'impervious_projection_method')
    imperv_proj_method = 'NEAREST'
    imperv_cs = inputs_cfg.getint('INPUTS', 'impervious_cellsize')
    imperv_pct_flag = inputs_cfg.getboolean('INPUTS', 'impervious_pct_flag')

    # Check input paths
    if not arcpy.Exists(hru.polygon_path):
        logging.error('\nERROR: Fishnet ({}) does not exist'.format(
            hru.polygon_path))
        sys.exit()
    # Impervious raster must exist
    if not arcpy.Exists(imperv_orig_path):
        logging.error('\nERROR: Impervious raster does not exist')
        sys.exit()

    # Check other inputs
    if imperv_cs <= 0:
        logging.error('\nERROR: soil cellsize must be greater than 0')
        sys.exit()
    imperv_proj_method_list = ['BILINEAR', 'CUBIC', 'NEAREST']
    if imperv_proj_method.upper() not in imperv_proj_method_list:
        logging.error(
            '\nERROR: Impervious projection method must be: {}'.format(
                ', '.join(imperv_proj_method_list)))
        sys.exit()

    # Build output folder if necessary
    imperv_temp_ws = os.path.join(hru.param_ws, 'impervious_rasters')
    if not os.path.isdir(imperv_temp_ws):
        os.mkdir(imperv_temp_ws)
    # Output paths
    imperv_path = os.path.join(imperv_temp_ws, 'impervious_cover.img')

    # Set ArcGIS environment variables
    arcpy.CheckOutExtension('Spatial')
    env.overwriteOutput = True
    env.pyramid = 'PYRAMIDS -1'
    # env.pyramid = 'PYRAMIDS 0'
    env.workspace = imperv_temp_ws
    env.scratchWorkspace = hru.scratch_ws

    # Check field
    logging.info('\nAdding impervious fields if necessary')
    support.add_field_func(hru.polygon_path, hru.imperv_pct_field, 'DOUBLE')
    # add_field_func(hru.polygon_path, hru.carea_min_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.carea_max_field, 'DOUBLE')

    # Available Water Capacity (AWC)
    logging.info('\nProjecting/clipping impervious cover raster')
    imperv_orig_sr = arcpy.sa.Raster(imperv_orig_path).spatialReference
    logging.debug('  Impervious GCS:  {}'.format(imperv_orig_sr.GCS.name))
    # Remove existing projected raster
    if arcpy.Exists(imperv_path):
        arcpy.Delete_management(imperv_path)
    # Set preferred transforms
    transform_str = support.transform_func(hru.sr, imperv_orig_sr)
    logging.debug('  Transform: {}'.format(transform_str))
    logging.debug('  Projection method: NEAREST')
    # Project impervious raster
    # DEADBEEF - Arc10.2 ProjectRaster does not extent
    # env.extent = hru.extent
    support.project_raster_func(imperv_orig_path, imperv_path, hru.sr,
                                imperv_proj_method, imperv_cs, transform_str,
                                '{} {}'.format(hru.ref_x,
                                               hru.ref_y), imperv_orig_sr, hru)
    # arcpy.ProjectRaster_management(
    #    imperv_orig_path, imperv_path, hru.sr,
    #    imperv_proj_method, imperv_cs, transform_str,
    #    '{} {}'.format(hru.ref_x, hru.ref_y),
    #    imperv_orig_sr)
    # arcpy.ClearEnvironment('extent')

    # List of rasters, fields, and stats for zonal statistics
    zs_imperv_dict = dict()
    zs_imperv_dict[hru.imperv_pct_field] = [imperv_path, 'MEAN']
    # zs_imperv_dict[hru.carea_min_field] = [imperv_path, 'MEAN']
    # zs_imperv_dict[hru.carea_max_field] = [imperv_path, 'MEAN']

    # Calculate zonal statistics
    logging.info('\nCalculating zonal statistics')
    support.zonal_stats_func(zs_imperv_dict, hru.polygon_path, hru.point_path,
                             hru)

    # Calculate CAREA_MIN / CAREA_MAX
    logging.info('\nCalculating CAREA_MIN / CAREA_MAX')
    if imperv_pct_flag:
        arcpy.CalculateField_management(
            hru.polygon_path, hru.imperv_pct_field,
            '0.01 * !{}!'.format(hru.imperv_pct_field), 'PYTHON')
        # arcpy.CalculateField_management(
        #    hru.polygon_path, hru.carea_min_field,
        #    '0.01 * !{}!'.format(hru.imperv_pct_field), 'PYTHON')
        arcpy.CalculateField_management(
            hru.polygon_path, hru.carea_max_field,
            '0.01 * !{}!'.format(hru.imperv_pct_field), 'PYTHON')
    else:
        # arcpy.CalculateField_management(
        #    hru.polygon_path, hru.carea_min_field,
        #    '!{}!'.format(hru.imperv_pct_field), 'PYTHON')
        arcpy.CalculateField_management(hru.polygon_path, hru.carea_max_field,
                                        '!{}!'.format(hru.imperv_pct_field),
                                        'PYTHON')
示例#3
0
def prism_4km_parameters(
    config_path,
    data_name='ALL',
    overwrite_flag=False,
    debug_flag=False,
):
    """Calculate GSFLOW PRISM Parameters

    Args:
        config_file (str): Project config file path
        data_name -- the prism data type (ALL, PPT, TMAX, TMIN, etc.)
        ovewrite_flag (bool): if True, overwrite existing files
        debug_flag (bool): if True, enable debug level logging

    Returns:
        None
    """

    # Initialize hru_parameters class
    hru = support.HRUParameters(config_path)

    # Open input parameter config file
    inputs_cfg = ConfigParser.ConfigParser()
    try:
        inputs_cfg.readfp(open(config_path))
    except Exception as e:
        logging.error('\nERROR: Config file could not be read, '
                      'is not an input file, or does not exist\n'
                      '  config_file = {}\n'
                      '  Exception: {}\n'.format(config_path, e))
        sys.exit()

    # Log DEBUG to file
    log_file_name = 'prism_4km_normals_log.txt'
    log_console = logging.FileHandler(filename=os.path.join(
        hru.log_ws, log_file_name),
                                      mode='w')
    log_console.setLevel(logging.DEBUG)
    log_console.setFormatter(logging.Formatter('%(message)s'))
    logging.getLogger('').addHandler(log_console)
    logging.info('\nGSFLOW PRISM Parameters')

    # PRISM
    prism_ws = inputs_cfg.get('INPUTS', 'prism_folder')
    prism_proj_method = inputs_cfg.get('INPUTS', 'prism_projection_method')
    prism_cs = inputs_cfg.getint('INPUTS', 'prism_cellsize')
    calc_jh_coef_flag = inputs_cfg.getboolean('INPUTS',
                                              'calc_prism_jh_coef_flag')

    if calc_jh_coef_flag:
        # DEADBEEF - This could/should be moved to support_functions.py since it is
        #   in this script and in both PRISM scripts.
        # DEM Units
        dem_units = inputs_cfg.get('INPUTS', 'dem_units').lower()
        dem_unit_types = {
            'meters': 'meter',
            'm': 'meter',
            'meter': 'meter',
            'feet': 'feet',
            'ft': 'meter',
            'foot': 'meter',
        }
        try:
            dem_units = dem_unit_types[dem_units]
        except:
            logging.error(
                '\nERROR: DEM unit "{}" is not supported\n'.format(dem_units))
            sys.exit()
        # Many expressions are hardcoded to units of feet
        # If dem_units are in meters, scale DEM_ADJ to get to feet
        if dem_units == 'meter':
            dem_unit_scalar = 0.3048
        else:
            dem_unit_scalar = 1.0

    # Check input paths
    if not arcpy.Exists(hru.polygon_path):
        logging.error('\nERROR: Fishnet ({}) does not exist'.format(
            hru.polygon_path))
        sys.exit()
    # Check that PRISM folder is valid
    if not os.path.isdir(prism_ws):
        logging.error(
            '\nERROR: PRISM folder ({}) does not exist'.format(prism_ws))
        sys.exit()
    proj_method_list = ['BILINEAR', 'CUBIC', 'NEAREST']
    if prism_proj_method.upper() not in proj_method_list:
        logging.error('\nERROR: PRISM projection method must be: {}'.format(
            ', '.join(proj_method_list)))
        sys.exit()
    logging.debug('  Projection method:    {}'.format(
        prism_proj_method.upper()))

    # Check other inputs
    if prism_cs <= 0:
        logging.error('\nERROR: PRISM cellsize must be greater than 0\n')
        sys.exit()

    # Set ArcGIS environment variables
    arcpy.CheckOutExtension('Spatial')
    env.overwriteOutput = True
    env.pyramid = 'PYRAMIDS 0'
    env.workspace = hru.param_ws
    env.scratchWorkspace = hru.scratch_ws

    # PRISM data names
    if data_name == 'ALL':
        data_name_list = ['PPT', 'TMAX', 'TMIN']
    else:
        data_name_list = [data_name]

    # Set month list
    month_list = ['{0:02d}'.format(m) for m in range(1, 13)]
    # month_list.extend(['annual'])

    # Check fields
    logging.info('\nAdding PRISM fields if necessary')
    for data_name in data_name_list:
        for month in month_list:
            support.add_field_func(hru.polygon_path,
                                   '{}_{}'.format(data_name, month), 'DOUBLE')

    # Process each PRISM data type
    logging.info('\nProjecting/clipping PRISM mean monthly rasters')
    for data_name in data_name_list:
        logging.info('\n{}'.format(data_name))
        prism_normal_re = re.compile(
            'PRISM_(?P<type>%s)_30yr_normal_4kmM2_(?P<month>\d{2})_bil.bil$' %
            data_name, re.IGNORECASE)

        # Search all files & subfolders in prism folder
        #   for images that match data type
        input_raster_dict = dict()
        for root, dirs, files in os.walk(prism_ws):
            for file_name in files:
                prism_normal_match = prism_normal_re.match(file_name)
                if prism_normal_match:
                    month_str = prism_normal_match.group('month')
                    input_raster_dict[month_str] = os.path.join(
                        root, file_name)
        if not input_raster_dict:
            logging.error(
                '\nERROR: No PRISM rasters were found matching the '
                'following pattern:\n  {}\n\nDouble check that the script '
                'and folder are for the same resolution '
                '(800m vs 4km)\n\n'.format(prism_normal_re.pattern))
            logging.error()
            sys.exit()

        # PRISM input data workspace
        # input_ws = os.path.join(prism_ws, data_name.lower())
        # if not os.path.isdir(input_ws):
        #    logging.error('\nERROR: The PRISM {} folder does not exist'.format(
        #        data_name.lower()))
        #    sys.exit()

        # PRISM output data workspace
        output_ws = os.path.join(hru.param_ws, data_name.lower() + '_rasters')
        if not os.path.isdir(output_ws):
            os.mkdir(output_ws)

        # Remove all non year/month rasters in PRISM temp folder
        logging.info('  Removing existing PRISM files')
        for item in os.listdir(output_ws):
            # if prism_normal_re.match(item) and overwrite_flag:
            if prism_normal_re.match(item):
                os.remove(os.path.join(output_ws, item))

        # Extract, project/resample, clip
        # Process images by month
        zs_prism_dict = dict()
        # env.extent = hru.extent
        for month in month_list:
            logging.info('  Month: {}'.format(month))

            # Projected/clipped PRISM raster
            input_raster = input_raster_dict[month]
            # input_name = 'PRISM_{}_30yr_normal_4kmM2_{1}_bil.bil'.format(
            #    data_name.lower(), input_month)
            # input_raster = os.path.join(input_ws, input_name)
            output_name = 'PRISM_{}_30yr_normal_4kmM2_{}.img'.format(
                data_name.lower(), month)
            output_raster = os.path.join(output_ws, output_name)

            # Set preferred transforms
            input_sr = arcpy.sa.Raster(input_raster).spatialReference
            transform_str = support.transform_func(hru.sr, input_sr)
            if transform_str:
                logging.debug('  Transform: {}'.format(transform_str))

            # Project PRISM rasters to HRU coordinate system
            # DEADBEEF - Arc10.2 ProjectRaster does not extent
            support.project_raster_func(input_raster, output_raster, hru.sr,
                                        prism_proj_method.upper(), prism_cs,
                                        transform_str,
                                        '{} {}'.format(hru.ref_x, hru.ref_y),
                                        input_sr, hru)
            # arcpy.ProjectRaster_management(
            #    input_raster, output_raster, hru.sr,
            #    prism_proj_method.upper(), prism_cs, transform_str,
            #    '{} {}'.format(hru.ref_x, hru.ref_y),
            #    input_sr)

            # Save parameters for calculating zonal stats
            zs_field = '{}_{}'.format(data_name, month)
            zs_prism_dict[zs_field] = [output_raster, 'MEAN']

            # Cleanup
            del input_raster, output_raster, output_name
            del input_sr, transform_str, zs_field

        # Cleanup
        # arcpy.ClearEnvironment('extent')

        # Calculate zonal statistics
        logging.info('\nCalculating PRISM zonal statistics')
        support.zonal_stats_func(zs_prism_dict, hru.polygon_path,
                                 hru.point_path, hru)
        del zs_prism_dict

    # Jensen-Haise Potential ET air temperature coefficient
    # Update Jensen-Haise PET estimate using PRISM air temperature
    # DEADBEEF - First need to figure out month with highest Tmax
    #            Then get Tmin for same month
    if calc_jh_coef_flag:
        logging.info('\nRe-Calculating JH_COEF_HRU')
        logging.info('  Using PRISM temperature values')
        tmax_field_list = ['!TMAX_{:02d}!'.format(m) for m in range(1, 13)]
        tmin_field_list = ['!TMIN_{:02d}!'.format(m) for m in range(1, 13)]
        tmax_expr = 'max([{}])'.format(','.join(tmax_field_list))
        arcpy.CalculateField_management(hru.polygon_path, hru.jh_tmax_field,
                                        tmax_expr, 'PYTHON')
        # Sort TMAX and get TMIN for same month
        tmin_expr = 'max(zip([{}],[{}]))[1]'.format(','.join(tmax_field_list),
                                                    ','.join(tmin_field_list))
        arcpy.CalculateField_management(hru.polygon_path, hru.jh_tmin_field,
                                        tmin_expr, 'PYTHON')

        # Pass unit scalar to convert DEM_ADJ to feet if necessary
        support.jensen_haise_func(hru.polygon_path, hru.jh_coef_field,
                                  hru.dem_adj_field, hru.jh_tmin_field,
                                  hru.jh_tmax_field, dem_unit_scalar)
示例#4
0
def soil_parameters(config_path, overwrite_flag=False, debug_flag=False):
    """Calculate GSFLOW Soil Parameters

    Args:
        config_file (str): Project config file path
        ovewrite_flag (bool): if True, overwrite existing files
        debug_flag (bool): if True, enable debug level logging

    Returns:
        None
    """

    # Initialize hru_parameters class
    hru = support.HRUParameters(config_path)

    # Open input parameter config file
    inputs_cfg = ConfigParser.ConfigParser()
    try:
        inputs_cfg.readfp(open(config_path))
    except Exception as e:
        logging.error(
            '\nERROR: Config file could not be read, '
            'is not an input file, or does not exist\n'
            '  config_file = {}\n'
            '  Exception: {}\n'.format(config_path, e))
        sys.exit()

    # Log DEBUG to file
    log_file_name = 'soil_parameters_log.txt'
    log_console = logging.FileHandler(
        filename=os.path.join(hru.log_ws, log_file_name), mode='w')
    log_console.setLevel(logging.DEBUG)
    log_console.setFormatter(logging.Formatter('%(message)s'))
    logging.getLogger('').addHandler(log_console)
    logging.info('\nGSFLOW Soil Parameters')

    # Input parameters
    try:
        soil_pct_flag = inputs_cfg.getboolean('INPUTS', 'soil_pct_flag')
    except ConfigParser.NoOptionError:
        soil_pct_flag = True
        logging.info(
            '  Missing INI parameter, setting {} = {}'.format(
                'soil_pct_flag', soil_pct_flag))
    try:
        moist_init_ratio = inputs_cfg.getfloat('INPUTS', 'moist_init_ratio')
    except ConfigParser.NoOptionError:
        moist_init_ratio = 0.1
        logging.info(
            '  Missing INI parameter, setting {} = {}'.format(
                'moist_init_ratio', moist_init_ratio))
    try:
        rechr_init_ratio = inputs_cfg.getfloat('INPUTS', 'rechr_init_ratio')
    except ConfigParser.NoOptionError:
        rechr_init_ratio = 0.1
        logging.info(
            '  Missing INI parameter, setting {} = {}'.format(
                'rechr_init_ratio', rechr_init_ratio))

    # Read and apply ssr2gw multiplier raster
    # Otherwise default value will be used
    try:
        ssr2gw_mult_flag = inputs_cfg.getboolean('INPUTS', 'ssr2gw_mult_flag')
    except ConfigParser.NoOptionError:
        ssr2gw_mult_flag = False
    try:
        ssr2gw_k_default = inputs_cfg.getfloat('INPUTS', 'ssr2gw_k_default')
    except ConfigParser.NoOptionError:
        ssr2gw_k_default = 0.001
        logging.info(
        '  Missing INI parameter, setting {} = {}'.format(
            'ssr2gw_k_default', ssr2gw_k_default))

    # Read and apply soil depth raster
    # Otherwise soil depth will only be derived from rooting depth
    try:
        soil_depth_flag = inputs_cfg.getboolean('INPUTS', 'soil_depth_flag')
    except ConfigParser.NoOptionError:
        soil_depth_flag = False
        logging.info(
            '  Missing INI parameter, setting {} = {}'.format(
                'soil_depth_flag', soil_depth_flag))

    # Input folders
    soil_temp_ws = os.path.join(hru.param_ws, 'soil_rasters')
    if not os.path.isdir(soil_temp_ws):
        os.mkdir(soil_temp_ws)

    # Input paths
    awc_path = os.path.join(soil_temp_ws, 'awc.img')
    clay_pct_path = os.path.join(soil_temp_ws, 'clay_pct.img')
    sand_pct_path = os.path.join(soil_temp_ws, 'sand_pct.img')
    ksat_path = os.path.join(soil_temp_ws, 'ksat.img')
    soil_depth_path = os.path.join(soil_temp_ws, 'soil_depth.img')
    soil_root_max_path = os.path.join(soil_temp_ws, 'soil_root_max.img')
    ssr2gw_mult_path = os.path.join(soil_temp_ws, 'ssr2gw_mult.img')

    # Check input paths
    if not arcpy.Exists(hru.polygon_path):
        logging.error(
            '\nERROR: Fishnet ({}) does not exist'.format(
                hru.polygon_path))
        sys.exit()
    # All of the soil rasters must exist
    # Check that the projected/clipped/filled raster exists
    if not arcpy.Exists(awc_path):
        logging.error('\nERROR: AWC raster does not exist')
        sys.exit()
    if not arcpy.Exists(clay_pct_path):
        logging.error('\nERROR: Clay raster does not exist')
        sys.exit()
    if not arcpy.Exists(sand_pct_path):
        logging.error('\nERROR: Sand raster does not exist')
        sys.exit()
    if not arcpy.Exists(ksat_path):
        logging.error('\nERROR: Ksat raster does not exist')
        sys.exit()
    if soil_depth_flag and not arcpy.Exists(soil_depth_path):
        logging.error('\nERROR: Soil depth raster does not exist')
        sys.exit()
    if ssr2gw_mult_flag and not arcpy.Exists(ssr2gw_mult_path):
        logging.error('\nERROR: SSR2GW multiplier raster does not exist')
        sys.exit()
    # Check soil init ratios
    if moist_init_ratio < 0 or moist_init_ratio > 1:
        logging.error('\nERROR: Soil moist_init_ratio must be between 0 & 1')
        sys.exit()
    if rechr_init_ratio < 0 or rechr_init_ratio > 1:
        logging.error('\nERROR: Soil rechr_init_ratio must be between 0 & 1')
        sys.exit()

    # DEM Slope is needed for SSR2GW_RATE
    dem_temp_ws = os.path.join(hru.param_ws, 'dem_rasters')
    dem_slope_path = os.path.join(dem_temp_ws, 'dem_slope.img')
    if not os.path.isdir(dem_temp_ws):
        logging.error(
            '\nERROR: DEM temp folder does not exist\n' +
            '\nERROR: Try re-running dem_2_stream.py')
        sys.exit()
    if not os.path.isfile(dem_slope_path):
        logging.error(
            '\nERROR: Slope raster does not exist\n' +
            '\nERROR: Try re-running dem_2_stream.py')
        sys.exit()

    # Output paths
    # soil_type_path = os.path.join(soil_temp_ws, 'soil_type.img')
    moist_max_path = os.path.join(soil_temp_ws, 'soil_moist_max.img')
    rechr_max_path = os.path.join(soil_temp_ws, 'soil_rechr_max.img')

    # Root depth is calculated by veg script
    veg_temp_ws = os.path.join(hru.param_ws, 'veg_rasters')
    root_depth_path = os.path.join(veg_temp_ws, 'root_depth.img')
    if not arcpy.Exists(root_depth_path):
        logging.error(
            '\nERROR: Root depth raster does not exists' +
            '\nERROR: Try re-running veg_parameters script\n')
        sys.exit()


    # Set ArcGIS environment variables
    arcpy.CheckOutExtension('Spatial')
    env.overwriteOutput = True
    env.pyramid = 'PYRAMIDS -1'
    # env.pyramid = 'PYRAMIDS 0'
    env.workspace = soil_temp_ws
    env.scratchWorkspace = hru.scratch_ws

    # Check field
    logging.info('\nAdding soil fields if necessary')
    support.add_field_func(hru.polygon_path, hru.awc_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.clay_pct_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.sand_pct_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.ksat_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.soil_type_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.soil_root_max_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.moist_init_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.moist_max_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.rechr_init_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.rechr_max_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.ssr2gw_rate_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.slowcoef_lin_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.slowcoef_sq_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.ssr2gw_k_field, 'DOUBLE')


    # Compute soil depth as max of root and soil depth
    if soil_depth_flag:
        logging.info('\nComputing max soil depth from root and soil depth')
        soil_depth_obj = arcpy.sa.Con(
           arcpy.sa.Raster(root_depth_path) > arcpy.sa.Raster(soil_depth_path),
           arcpy.sa.Raster(root_depth_path), arcpy.sa.Raster(soil_depth_path))
        soil_depth_obj.save(soil_root_max_path)
    else:
        soil_depth_obj = arcpy.sa.Raster(root_depth_path)

    # Calculate maximum soil moisture
    # logging.info('\nCalculating soil {}'.format(hru.moist_max_field))
    # moist_max_obj = arcpy.sa.Raster(awc_path) * soil_depth_obj
    # moist_max_obj.save(moist_max_path)
    # del moist_max_obj

    # # Calculate soil recharge zone maximum
    # logging.info('Calculating soil {}'.format(hru.rechr_max_field))
    # # Minimum of rooting depth and 18 (inches?)
    # rechr_max_obj = arcpy.sa.Float(
    #     arcpy.sa.Con(soil_depth_obj < 18, soil_depth_obj, 18))
    # rechr_max_obj *= arcpy.sa.Raster(awc_path)
    # rechr_max_obj.save(rechr_max_path)
    # del rechr_max_obj

    # # Read in slope raster and convert to radians
    # dem_slope_obj = math.pi * arcpy.sa.Raster(dem_slope_path) / 180
    # porosity_obj = 0.475
    #
    # # Gravity drainage to groundwater reservoir linear coefficient
    # logging.info('\nCalculating SSR2GW_RATE')
    # logging.info('  Assuming slope is in degrees')
    # logging.info('  Porosity is currently fixed at: {}'.format(
    #     porosity_obj))
    # ssr2gw_rate_obj = (
    #     arcpy.sa.Raster(ksat_path) * porosity_obj * (1 - dem_slope_obj))
    # ssr2gw_rate_obj.save(ssr2gw_rate_path)
    # del ssr2gw_rate_obj
    #
    # # Gravity drainage to groundwater reservoir linear coefficient
    # logging.info('\nCalculating SLOWCOEF_L')
    # logging.info('  Assuming slope is in degrees')
    # logging.info('  Porosity is currently fixed at: {}'.format(
    # slowcoef_lin_obj = (
    #     arcpy.sa.Raster(ksat_path) * math.sin(dem_slope_obj) /
    #     (porosity_obj * hru_length_obj))
    # slowcoef_lin_obj.save(slowcoef_lin_path)
    # del slowcoef_lin_obj, hru_length_obj
    # del dem_slope_obj, porosity_obj
    # # This block ^^ could be used to perform operations on a raster level if wanted


    # List of rasters, fields, and stats for zonal statistics
    zs_soil_dict = dict()
    zs_soil_dict[hru.awc_field] = [awc_path, 'MEAN']
    zs_soil_dict[hru.clay_pct_field] = [clay_pct_path, 'MEAN']
    zs_soil_dict[hru.sand_pct_field] = [sand_pct_path, 'MEAN']
    zs_soil_dict[hru.ksat_field] = [ksat_path, 'MEAN']
    if soil_depth_flag:
        zs_soil_dict[hru.soil_root_max_field] = [soil_root_max_path, 'MEAN']
    else:
        zs_soil_dict[hru.soil_root_max_field] = [root_depth_path, 'MEAN']
    if ssr2gw_mult_flag:
        zs_soil_dict[hru.ssr2gw_k_field] = [ssr2gw_mult_path, 'MEAN']
    # zs_soil_dict[hru.moist_max_field] = [moist_max_path, 'MEAN']
    # zs_soil_dict[hru.rechr_max_field] = [rechr_max_path, 'MEAN']

    # Calculate zonal statistics
    logging.info('\nCalculating zonal statistics')
    support.zonal_stats_func(
        zs_soil_dict, hru.polygon_path, hru.point_path, hru)


    # Make a fishnet layer for calculating fields
    hru_polygon_layer = "hru_polygon_layer"
    arcpy.MakeFeatureLayer_management(
        hru.polygon_path, hru_polygon_layer)

    # Calculate maximum soil moisture
    arcpy.CalculateField_management(
        hru_polygon_layer, hru.moist_max_field,
        '!{}! * !{}!'.format(hru.soil_root_max_field, hru.awc_field),
        'PYTHON')

    # Calculate soil recharge zone maximum
    logging.info('Calculating soil {}'.format(hru.rechr_max_field))
    # Minimum of rooting depth and 18 (inches)
    rech_max_cb = (
        'def rech_max_func(soil_root_max, awc):\n' +
        '    if soil_root_max > 18: return 18*awc\n' +
        '    else: return soil_root_max*awc\n')
    arcpy.CalculateField_management(
        hru_polygon_layer, hru.rechr_max_field,
        'rech_max_func(!{}!, !{}!)'.format(
            hru.soil_root_max_field, hru.awc_field),
        'PYTHON', rech_max_cb)

    # Calculate SOIL_TYPE
    logging.info('\nCalculating {}'.format(hru.soil_type_field))
    arcpy.SelectLayerByAttribute_management(
        hru_polygon_layer, "NEW_SELECTION",
        '"{}" = 1'.format(hru.type_field))
    if soil_pct_flag:
        soil_type_pct = (50, 40)
    else:
        soil_type_pct = (0.50, 0.40)
    soil_type_cb = (
        'def soil_type_func(clay, sand):\n' +
        '    if sand > {}: return 1\n' +
        '    elif clay > {}: return 3\n' +
        '    else: return 2\n').format(*soil_type_pct)
    arcpy.CalculateField_management(
        hru_polygon_layer, hru.soil_type_field,
        'soil_type_func(!{}!, !{}!)'.format(
            hru.clay_pct_field, hru.sand_pct_field),
        'PYTHON', soil_type_cb)
    arcpy.SelectLayerByAttribute_management(
        hru_polygon_layer, "SWITCH_SELECTION")
    arcpy.CalculateField_management(
        hru_polygon_layer, hru.soil_type_field, '0', 'PYTHON')

    # Calculate SOIL_MOIST_INIT & SOIL_RECHR_INIT from max values
    arcpy.SelectLayerByAttribute_management(
        hru_polygon_layer, "NEW_SELECTION",
        '"{}" = 1 AND "{}" >= 0'.format(
            hru.type_field, hru.moist_max_field))
    logging.info('\nCalculating {0} as {2} * {1}'.format(
        hru.moist_init_field, hru.moist_max_field, moist_init_ratio))
    arcpy.CalculateField_management(
        hru.polygon_path, hru.moist_init_field,
        '!{}! * {}'.format(hru.moist_max_field, moist_init_ratio), 'PYTHON')
    arcpy.SelectLayerByAttribute_management(
        hru_polygon_layer, "SWITCH_SELECTION")
    arcpy.CalculateField_management(
        hru_polygon_layer, hru.moist_init_field, '0', 'PYTHON')
    arcpy.CalculateField_management(
        hru_polygon_layer, hru.moist_max_field, '0', 'PYTHON')

    # Calculate SOIL_MOIST_INIT & SOIL_RECHR_INIT from max values
    arcpy.SelectLayerByAttribute_management(
        hru_polygon_layer, "NEW_SELECTION",
        '"{}" = 1 AND "{}" >= 0'.format(
            hru.type_field, hru.rechr_max_field))
    logging.info('Calculating {0} as {2} * {1}'.format(
        hru.rechr_init_field, hru.rechr_max_field, moist_init_ratio))
    arcpy.CalculateField_management(
        hru.polygon_path, hru.rechr_init_field,
        '!{}! * {}'.format(hru.rechr_max_field, moist_init_ratio), 'PYTHON')
    arcpy.SelectLayerByAttribute_management(
        hru_polygon_layer, "SWITCH_SELECTION",
        '"{}" != 1'.format(hru.type_field))
    arcpy.CalculateField_management(
        hru_polygon_layer, hru.rechr_init_field, '0', 'PYTHON')
    arcpy.CalculateField_management(
        hru_polygon_layer, hru.rechr_max_field, '0', 'PYTHON')

    # Calculate SSR2G_KFAC from ssr2gw_mult raster
    arcpy.SelectLayerByAttribute_management(
        hru_polygon_layer, "NEW_SELECTION",
        '"{}" = 1 AND "{}" >= 0'.format(
            hru.type_field, hru.ssr2gw_k_field))
    logging.info('Using {1} to calculate {0}'.format(
        hru.ssr2gw_k_field, ssr2gw_mult_path))
    arcpy.CalculateField_management(
        hru.polygon_path, hru.ssr2gw_k_field,
        '!{}!'.format(hru.ssr2gw_k_field), 'PYTHON')

    # Fill SSR2G_K multiplier value if field not set
    logging.info ('ssr2gw_k_default = {}'.format(ssr2gw_k_default))
    if (all([row[0] == 0 for row in arcpy.da.SearchCursor(
            hru.polygon_path, [hru.ssr2gw_k_field])])):
        logging.info('Filling {} from default value in config file'.format(
            hru.ssr2gw_k_field))
        arcpy.CalculateField_management(
            hru.polygon_path, hru.ssr2gw_k_field,
            ssr2gw_k_default, 'PYTHON')
    else:
        logging.info(
            ('{} appears to already have been set and ' +
             'will not be overwritten').format(hru.ssr2gw_k_field))

    # Calculating ssr2gw_rate
    # Gravity drainage to groundwater reservoir linear coefficient
    # Default value is 0.1 (range 0-1)
    # Convert Ksat from um/s to in/day
    # ssr2gw_rate = ks / sat_threshold
    # sat_threshold = moist_max * (sand% / 100)
    logging.info('\nCalculating {}'.format(hru.ssr2gw_rate_field))
    logging.info('  assuming {} is in units of um/s'.format(hru.ksat_field))
    # porosity_flt = 0.475
    ssr2gw_exp = 1
    logging.debug('  using eqn: ssr2gw_rate = ks/sat threshold')
    # logging.debug('  default values: porosity_flt = 0.475')
    logging.debug('  default values: ssr2gw_exp = 1')
    arcpy.SelectLayerByAttribute_management(
        hru_polygon_layer, "NEW_SELECTION",
        '"{}" = 1 AND "{}" > 0 AND "{}" > 0'.format(
            hru.type_field, hru.moist_max_field, hru.sand_pct_field))
    arcpy.CalculateField_management(
        hru_polygon_layer, hru.ssr2gw_rate_field,
        '(!{}! * (3600 * 24 / (2.54 * 10000))) * !{}! / (!{}! * (!{}!/ 100))'.format(
            hru.ksat_field, hru.ssr2gw_k_field, hru.moist_max_field, hru.sand_pct_field),
        'PYTHON')
    arcpy.SelectLayerByAttribute_management(
        hru_polygon_layer, "SWITCH_SELECTION")
    arcpy.CalculateField_management(
        hru_polygon_layer, hru.ssr2gw_rate_field, '0', 'PYTHON')

    # Calculating slowceof_lin
    # Default value is 0.015 (range 0-1)
    # Convert Ksat from um/s to m/day
    logging.info('Calculating {}'.format(hru.slowcoef_lin_field))
    logging.info('  {} must be in um/s'.format(hru.ksat_field))
    arcpy.SelectLayerByAttribute_management(
        hru_polygon_layer, "NEW_SELECTION",
        '"{}" = 1'.format(hru.type_field))
    slowcoef_lin_cb = (
        'def slowcoef_lin(ksat, slope, cs):\n' +
        '    return 0.1 * ksat * 0.0864 * math.sin(slope) / cs\n')
    arcpy.CalculateField_management(
        hru_polygon_layer, hru.slowcoef_lin_field,
        'slowcoef_lin(!{0}!, !{1}!, {2})'.format(
            hru.ksat_field, hru.dem_slope_rad_field, hru.cs),
        'PYTHON', slowcoef_lin_cb)
    arcpy.SelectLayerByAttribute_management(
        hru_polygon_layer, "SWITCH_SELECTION")
    arcpy.CalculateField_management(
        hru_polygon_layer, hru.slowcoef_lin_field, '0', 'PYTHON')

    # Calculating slowceof_sq
    # Default value is 0.015 (range 0-1)
    # Convert Ksat from um/s to m/day
    logging.info('Calculating {}'.format(hru.slowcoef_sq_field))
    logging.info('  {} must be in um/s'.format(hru.ksat_field))
    arcpy.SelectLayerByAttribute_management(
        hru_polygon_layer, "NEW_SELECTION",
        '"{}" = 1 AND "{}" > 0 AND "{}" > 0'.format(
            hru.type_field, hru.moist_max_field, hru.sand_pct_field))
    slowcoef_sq_cb = (
        'def slowcoef_sq(ksat, slope, moist_max, sand, cs):\n' +
        '    return 0.9 * (ksat * 0.0864 * math.sin(slope) / ' +
        '(moist_max * (sand / 100) * cs))\n')
    arcpy.CalculateField_management(
        hru_polygon_layer, hru.slowcoef_sq_field,
        'slowcoef_sq(!{0}!, !{1}!, !{2}!, !{3}!, {4})'.format(
            hru.ksat_field, hru.dem_slope_rad_field,
            hru.moist_max_field, hru.sand_pct_field, hru.cs),
        'PYTHON', slowcoef_sq_cb)
    arcpy.SelectLayerByAttribute_management(
        hru_polygon_layer, "SWITCH_SELECTION")
    arcpy.CalculateField_management(
        hru_polygon_layer, hru.slowcoef_sq_field, '0', 'PYTHON')

    # Cleanup
    arcpy.Delete_management(hru_polygon_layer)
    del hru_polygon_layer
示例#5
0
def veg_parameters(config_path):
    """Calculate GSFLOW Vegetation Parameters

    Parameters
    ----------
    config_path : str
        Project configuration file (.ini) path.

    Returns
    -------
    None

    """
    # Initialize hru_parameters class
    hru = support.HRUParameters(config_path)

    # Open input parameter config file
    inputs_cfg = ConfigParser.ConfigParser()
    try:
        inputs_cfg.readfp(open(config_path))
    except Exception as e:
        logging.error('\nERROR: Config file could not be read, '
                      'is not an input file, or does not exist\n'
                      '  config_file = {}\n'
                      '  Exception: {}\n'.format(config_path, e))
        sys.exit()

    # Log DEBUG to file
    log_file_name = 'veg_parameters_log.txt'
    log_console = logging.FileHandler(filename=os.path.join(
        hru.log_ws, log_file_name),
                                      mode='w')
    log_console.setLevel(logging.DEBUG)
    log_console.setFormatter(logging.Formatter('%(message)s'))
    logging.getLogger('').addHandler(log_console)
    logging.info('\nGSFLOW Vegetation Parameters')

    # Landfire Vegetation Type
    veg_type_orig_path = inputs_cfg.get('INPUTS', 'veg_type_orig_path')
    veg_type_cs = inputs_cfg.getint('INPUTS', 'veg_type_cellsize')
    try:
        veg_type_field = inputs_cfg.get('INPUTS', 'veg_type_field')
    except ConfigParser.NoOptionError:
        veg_type_field = None
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'veg_type_field', veg_type_field))

    # Landfire Vegetation Cover
    veg_cover_orig_path = inputs_cfg.get('INPUTS', 'veg_cover_orig_path')
    veg_cover_cs = inputs_cfg.getint('INPUTS', 'veg_cover_cellsize')

    # Remap
    remap_ws = inputs_cfg.get('INPUTS', 'remap_folder')
    cov_type_remap_name = inputs_cfg.get('INPUTS', 'cov_type_remap')
    covden_sum_remap_name = inputs_cfg.get('INPUTS', 'covden_sum_remap')
    covden_win_remap_name = inputs_cfg.get('INPUTS', 'covden_win_remap')
    snow_intcp_remap_name = inputs_cfg.get('INPUTS', 'snow_intcp_remap')
    srain_intcp_remap_name = inputs_cfg.get('INPUTS', 'srain_intcp_remap')
    wrain_intcp_remap_name = inputs_cfg.get('INPUTS', 'wrain_intcp_remap')
    root_depth_remap_name = inputs_cfg.get('INPUTS', 'root_depth_remap')

    # Get remap conversion factors
    try:
        snow_intcp_remap_factor = inputs_cfg.getfloat(
            'INPUTS', 'snow_intcp_remap_factor')
    except ConfigParser.NoOptionError:
        snow_intcp_remap_factor = 0.01
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'snow_intcp_remap_factor', snow_intcp_remap_factor))
    try:
        wrain_intcp_remap_factor = inputs_cfg.getfloat(
            'INPUTS', 'wrain_intcp_remap_factor')
    except ConfigParser.NoOptionError:
        wrain_intcp_remap_factor = 0.01
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'wrain_intcp_remap_factor', wrain_intcp_remap_factor))
    try:
        srain_intcp_remap_factor = inputs_cfg.getfloat(
            'INPUTS', 'srain_intcp_remap_factor')
    except ConfigParser.NoOptionError:
        srain_intcp_remap_factor = 0.01
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'srain_intcp_remap_factor', srain_intcp_remap_factor))

    # Check input paths
    if not arcpy.Exists(hru.polygon_path):
        logging.error('\nERROR: Fishnet ({}) does not exist'.format(
            hru.polygon_path))
        sys.exit()
    # Check that either the original vegetation raster exist
    if not arcpy.Exists(veg_cover_orig_path):
        logging.error('\nERROR: Vegetation cover raster does not exist')
        sys.exit()
    if not arcpy.Exists(veg_type_orig_path):
        logging.error('\nERROR: Vegetation type raster does not exist')
        sys.exit()
    # Vegetation cover can be set from another field in the raster
    # This is mostly for US_120EVT
    if not veg_type_field:
        logging.info('\n  Using VALUE field to set vegetation type')
        veg_type_field = 'VALUE'
    elif len(arcpy.ListFields(veg_type_orig_path, veg_type_field)) == 0:
        logging.info('  veg_type_field {} does not exist\n  Using VALUE '
                     'field to set vegetation type'.format(veg_type_field))
        veg_type_field = 'VALUE'
    elif arcpy.ListFields(veg_type_orig_path, veg_type_field)[0].type not in [
            'Integer', 'SmallInteger'
    ]:
        logging.info(
            '  veg_type_field {} is not an integer type\n  Using VALUE '
            'field to set vegetation type'.format(veg_type_field))
        veg_type_field = 'VALUE'

    # Check that remap folder is valid
    if not os.path.isdir(remap_ws):
        logging.error('\nERROR: Remap folder does not exist')
        sys.exit()
    # Check that remap files exist
    # Check remap files comment style
    cov_type_remap_path = os.path.join(remap_ws, cov_type_remap_name)
    covden_sum_remap_path = os.path.join(remap_ws, covden_sum_remap_name)
    covden_win_remap_path = os.path.join(remap_ws, covden_win_remap_name)
    snow_intcp_remap_path = os.path.join(remap_ws, snow_intcp_remap_name)
    srain_intcp_remap_path = os.path.join(remap_ws, srain_intcp_remap_name)
    wrain_intcp_remap_path = os.path.join(remap_ws, wrain_intcp_remap_name)
    root_depth_remap_path = os.path.join(remap_ws, root_depth_remap_name)
    remap_path_list = [
        cov_type_remap_path, covden_sum_remap_path, covden_win_remap_path,
        snow_intcp_remap_path, srain_intcp_remap_path, wrain_intcp_remap_path,
        root_depth_remap_path
    ]
    for remap_path in remap_path_list:
        support.remap_check(remap_path)

    # Check other inputs
    if veg_type_cs <= 0:
        logging.error('\nERROR: Veg. type cellsize must be greater than 0')
        sys.exit()
    if veg_cover_cs <= 0:
        logging.error('\nERROR: Veg. cover cellsize must be greater than 0')
        sys.exit()

    # Build output folders if necesssary
    veg_temp_ws = os.path.join(hru.param_ws, 'veg_rasters')
    if not os.path.isdir(veg_temp_ws):
        os.mkdir(veg_temp_ws)
    # Output paths
    veg_cover_path = os.path.join(veg_temp_ws, 'veg_cover.img')
    veg_type_path = os.path.join(veg_temp_ws, 'veg_type.img')
    cov_type_path = os.path.join(veg_temp_ws, 'cov_type.img')
    covden_sum_path = os.path.join(veg_temp_ws, 'covden_sum.img')
    covden_win_path = os.path.join(veg_temp_ws, 'covden_win.img')
    snow_intcp_path = os.path.join(veg_temp_ws, 'snow_intcp.img')
    wrain_intcp_path = os.path.join(veg_temp_ws, 'wrain_intcp.img')
    srain_intcp_path = os.path.join(veg_temp_ws, 'srain_intcp.img')
    root_depth_path = os.path.join(veg_temp_ws, 'root_depth.img')
    rad_trncf_path = os.path.join(veg_temp_ws, 'rad_trncf.img')

    # Set ArcGIS environment variables
    arcpy.CheckOutExtension('Spatial')
    env.overwriteOutput = True
    env.pyramid = 'PYRAMIDS -1'
    # env.pyramid = 'PYRAMIDS 0'
    env.workspace = veg_temp_ws
    env.scratchWorkspace = hru.scratch_ws

    # Check fields
    logging.info('\nAdding vegetation fields if necessary')
    support.add_field_func(hru.polygon_path, hru.cov_type_field, 'SHORT')
    support.add_field_func(hru.polygon_path, hru.covden_sum_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.covden_win_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.rad_trncf_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.snow_intcp_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.srain_intcp_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.wrain_intcp_field, 'DOUBLE')
    # support.add_field_func(hru.polygon_path, hru.root_depth_field, 'DOUBLE')

    # Check that remaps have all necessary values
    logging.info('\nChecking remap tables against all raster cells'
                 '  (i.e. even those outside the study area)')
    check_remap_keys(cov_type_remap_path, veg_type_orig_path)
    check_remap_keys(covden_sum_remap_path, veg_cover_orig_path)
    check_remap_keys(root_depth_remap_path, veg_type_orig_path)

    # Assume all vegetation rasters will need to be rebuilt
    # Check veg cover and veg type rasters
    # This will check for matching spat. ref., snap point, and cellsize

    # Project/clip veg cover to match HRU
    logging.info('\nProjecting/clipping vegetation cover raster')
    veg_cover_orig_sr = arcpy.sa.Raster(veg_cover_orig_path).spatialReference
    # Remove existing clipped/projected veg cover raster
    if arcpy.Exists(veg_cover_path):
        arcpy.Delete_management(veg_cover_path)
    # Set preferred transforms
    transform_str = support.transform_func(hru.sr, veg_cover_orig_sr)
    logging.debug('  Transform: {}'.format(transform_str))
    logging.debug('  Projection method: NEAREST')

    # Project veg cover
    # DEADBEEF - Arc10.2 ProjectRaster does not extent
    support.project_raster_func(veg_cover_orig_path, veg_cover_path, hru.sr,
                                'NEAREST', veg_cover_cs, transform_str,
                                '{} {}'.format(hru.ref_x, hru.ref_y),
                                veg_cover_orig_sr, hru)
    # env.extent = hru.extent
    # arcpy.ProjectRaster_management(
    #    veg_cover_orig_path, veg_cover_path, hru.sr,
    #    'NEAREST', veg_cover_cs, transform_str,
    #    '{} {}'.format(hru.ref_x, hru.ref_y),
    #    veg_cover_orig_sr)
    # arcpy.ClearEnvironment('extent')
    del transform_str, veg_cover_orig_sr

    # Project/clip veg type to match HRU
    logging.info('Projecting/clipping vegetation type raster')
    veg_type_orig_sr = arcpy.sa.Raster(veg_type_orig_path).spatialReference
    # Remove existing clipped/projected veg type raster
    if arcpy.Exists(veg_type_path):
        arcpy.Delete_management(veg_type_path)
    # Set preferred transforms
    transform_str = support.transform_func(hru.sr, veg_type_orig_sr)
    logging.debug('  Transform: {}'.format(transform_str))
    logging.debug('  Projection method: NEAREST')
    # Use a different field to calculate vegetation type
    if veg_type_field != 'VALUE':
        logging.info('  Calculating vegetation type from {} field'.format(
            veg_type_field))
        veg_type_obj = arcpy.sa.Lookup(veg_type_orig_path, veg_type_field)
    else:
        veg_type_obj = arcpy.sa.Raster(veg_type_orig_path)

    # Project veg type
    # DEADBEEF - Arc10.2 ProjectRaster does not honor extent
    support.project_raster_func(veg_type_obj, veg_type_path, hru.sr, 'NEAREST',
                                veg_type_cs, transform_str,
                                '{} {}'.format(hru.ref_x, hru.ref_y),
                                veg_type_orig_sr, hru)
    # env.extent = hru.extent
    # arcpy.ProjectRaster_management(
    #    veg_type_obj, veg_type_path, hru.sr,
    #    'NEAREST', veg_type_cs, transform_str,
    #    '{} {}'.format(hru.ref_x, hru.ref_y),
    #    veg_type_orig_sr)
    # arcpy.ClearEnvironment('extent')
    del transform_str, veg_type_orig_sr, veg_type_obj

    # Reclassifying vegetation cover type
    logging.info('\nCalculating COV_TYPE')
    logging.debug('  Reclassifying: {}'.format(cov_type_remap_path))
    cov_type_obj = arcpy.sa.ReclassByASCIIFile(veg_type_path,
                                               cov_type_remap_path)
    cov_type_obj.save(cov_type_path)
    del cov_type_obj

    # Summer cover density
    logging.info('Calculating COVDEN_SUM')
    logging.debug('  Reclassifying: {}'.format(covden_sum_remap_path))
    covden_sum_obj = arcpy.sa.ReclassByASCIIFile(veg_cover_path,
                                                 covden_sum_remap_path)
    covden_sum_obj *= 0.01
    covden_sum_obj.save(covden_sum_path)
    del covden_sum_obj

    # Winter cover density
    logging.info('Calculating COVDEN_WIN')
    logging.debug('  Reclassifying: {}'.format(covden_win_remap_path))
    covden_win_obj = arcpy.sa.ReclassByASCIIFile(cov_type_path,
                                                 covden_win_remap_path)
    covden_win_obj *= 0.01
    covden_win_obj *= arcpy.sa.Raster(covden_sum_path)
    covden_win_obj.save(covden_win_path)
    del covden_win_obj

    # Snow interception storage capacity
    logging.info('Calculating SNOW_INTCP')
    logging.debug('  Reclassifying: {}'.format(snow_intcp_remap_path))
    snow_intcp_obj = arcpy.sa.ReclassByASCIIFile(cov_type_path,
                                                 snow_intcp_remap_path)
    snow_intcp_obj *= snow_intcp_remap_factor
    snow_intcp_obj.save(snow_intcp_path)
    del snow_intcp_obj

    # Winter rain interception storage capacity
    logging.info('Calculating WRAIN_INTCP')
    logging.debug('  Reclassifying: {}'.format(wrain_intcp_remap_path))
    wrain_intcp_obj = arcpy.sa.ReclassByASCIIFile(cov_type_path,
                                                  wrain_intcp_remap_path)
    wrain_intcp_obj *= wrain_intcp_remap_factor
    wrain_intcp_obj.save(wrain_intcp_path)
    del wrain_intcp_obj

    # Summer rain interception storage capacity
    logging.info('Calculating SRAIN_INTCP')
    logging.debug('  Reclassifying: {}'.format(srain_intcp_remap_path))
    srain_intcp_obj = arcpy.sa.ReclassByASCIIFile(cov_type_path,
                                                  srain_intcp_remap_path)
    srain_intcp_obj *= srain_intcp_remap_factor
    srain_intcp_obj.save(srain_intcp_path)
    del srain_intcp_obj

    # Root depth
    logging.info('Calculating ROOT_DEPTH')
    logging.debug('  Reclassifying: {}'.format(root_depth_remap_path))
    root_depth_obj = arcpy.sa.ReclassByASCIIFile(veg_type_path,
                                                 root_depth_remap_path)
    root_depth_obj.save(root_depth_path)
    del root_depth_obj

    # Short-wave radiation transmission coefficent
    logging.info('Calculating {}'.format(hru.rad_trncf_field))
    rad_trncf_obj = 0.9917 * arcpy.sa.Exp(
        -2.7557 * arcpy.sa.Raster(covden_win_path))
    rad_trncf_obj.save(rad_trncf_path)
    del rad_trncf_obj

    # List of rasters, fields, and stats for zonal statistics
    zs_veg_dict = dict()
    zs_veg_dict[hru.cov_type_field] = [cov_type_path, 'MAJORITY']
    zs_veg_dict[hru.covden_sum_field] = [covden_sum_path, 'MEAN']
    zs_veg_dict[hru.covden_win_field] = [covden_win_path, 'MEAN']
    zs_veg_dict[hru.snow_intcp_field] = [snow_intcp_path, 'MEAN']
    zs_veg_dict[hru.srain_intcp_field] = [srain_intcp_path, 'MEAN']
    zs_veg_dict[hru.wrain_intcp_field] = [wrain_intcp_path, 'MEAN']
    # zs_veg_dict[hru.root_depth_field] = [root_depth_path, 'MEAN']
    zs_veg_dict[hru.rad_trncf_field] = [rad_trncf_path, 'MEAN']

    # Calculate zonal statistics
    logging.info('\nCalculating vegetation zonal statistics')
    support.zonal_stats_func(zs_veg_dict, hru.polygon_path, hru.point_path,
                             hru)

    # Short-wave radiation transmission coefficient
    # logging.info('\nCalculating {}'.format(hru.rad_trncf_field))
    # arcpy.CalculateField_management(
    #    hru.polygon_path, hru.rad_trncf_field,
    #    '0.9917 * math.exp(-2.7557 * !{}!)'.format(hru.covden_win_field),
    #    'PYTHON')

    # Clear COV_TYPE values for lake cells (HRU_TYPE == 2)
    if True:
        logging.info('\nClearing lake nodata vegetation parameters')
        # logging.info(
        #     '\nClearing vegetation parameters for lake and inactive cells')
        hru_polygon_layer = "hru_polygon_layer"
        arcpy.MakeFeatureLayer_management(hru.polygon_path, hru_polygon_layer)
        arcpy.SelectLayerByAttribute_management(
            hru_polygon_layer, "NEW_SELECTION",
            '"{0}" = 2 OR ("{0}" = 0 AND "{1}" = 0)'.format(
                hru.type_field, hru.dem_adj_field))
        arcpy.CalculateField_management(hru_polygon_layer, hru.cov_type_field,
                                        0, 'PYTHON')
        arcpy.CalculateField_management(hru_polygon_layer,
                                        hru.covden_sum_field, 0, 'PYTHON')
        arcpy.CalculateField_management(hru_polygon_layer,
                                        hru.covden_win_field, 0, 'PYTHON')
        arcpy.CalculateField_management(hru_polygon_layer,
                                        hru.snow_intcp_field, 0, 'PYTHON')
        arcpy.CalculateField_management(hru_polygon_layer,
                                        hru.srain_intcp_field, 0, 'PYTHON')
        arcpy.CalculateField_management(hru_polygon_layer,
                                        hru.wrain_intcp_field, 0, 'PYTHON')
        arcpy.CalculateField_management(hru_polygon_layer, hru.rad_trncf_field,
                                        0, 'PYTHON')
        arcpy.Delete_management(hru_polygon_layer)
        del hru_polygon_layer
示例#6
0
def dem_parameters(config_path, overwrite_flag=False, debug_flag=False):
    """Calculate GSFLOW DEM Parameters

    Args:
        config_path: Project config file path
        ovewrite_flag (bool): if True, overwrite existing files
        debug_flag (bool): if True, enable debug level logging

    Returns:
        None
    """

    # Initialize hru parameters class
    hru = support.HRUParameters(config_path)

    # Open input parameter config file
    inputs_cfg = ConfigParser.ConfigParser()
    try:
        inputs_cfg.readfp(open(config_path))
    except Exception as e:
        logging.error('\nERROR: Config file could not be read, '
                      'is not an input file, or does not exist\n'
                      '  config_file = {}\n'
                      '  Exception: {}\n'.format(config_path, e))
        sys.exit()

    # Log DEBUG to file
    log_file_name = 'dem_parameters_log.txt'
    log_console = logging.FileHandler(filename=os.path.join(
        hru.log_ws, log_file_name),
                                      mode='w')
    log_console.setLevel(logging.DEBUG)
    log_console.setFormatter(logging.Formatter('%(message)s'))
    logging.getLogger('').addHandler(log_console)
    logging.info('\nGSFLOW DEM Parameters')

    #
    dem_orig_path = inputs_cfg.get('INPUTS', 'dem_orig_path')
    # Resampling method 'BILINEAR', 'CUBIC', 'NEAREST'
    dem_proj_method = inputs_cfg.get('INPUTS', 'dem_projection_method').upper()
    dem_cs = inputs_cfg.getint('INPUTS', 'dem_cellsize')

    # DEADBEEF - This could/should be moved to support_functions.py since it is
    #   in this script and in both PRISM scripts.
    # DEM Units
    dem_units = inputs_cfg.get('INPUTS', 'dem_units').lower()
    dem_unit_types = {
        'meters': 'meter',
        'm': 'meter',
        'meter': 'meter',
        'feet': 'feet',
        'ft': 'meter',
        'foot': 'meter',
    }
    try:
        dem_units = dem_unit_types[dem_units]
    except:
        logging.error(
            '\nERROR: DEM unit "{}" is not supported\n'.format(dem_units))
        sys.exit()
    # Many expressions are hardcoded to units of feet
    # If dem_units are in meters, scale DEM_ADJ to get to feet
    if dem_units == 'meter':
        dem_unit_scalar = 0.3048
    else:
        dem_unit_scalar = 1.0

    #
    try:
        reset_dem_adj_flag = inputs_cfg.getboolean('INPUTS',
                                                   'reset_dem_adj_flag')
    except:
        reset_dem_adj_flag = False
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'reset_dem_adj_flag', reset_dem_adj_flag))

    try:
        calc_flow_acc_dem_flag = inputs_cfg.getboolean(
            'INPUTS', 'calc_flow_acc_dem_flag')
    except:
        calc_flow_acc_dem_flag = True
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'calc_flow_acc_dem_flag', calc_flow_acc_dem_flag))

    try:
        dem_adj_copy_field = inputs_cfg.get('INPUTS', 'dem_adj_copy_field')
    except:
        if calc_flow_acc_dem_flag:
            dem_adj_copy_field = 'DEM_FLOWAC'
        else:
            dem_adj_copy_field = 'DEM_MEAN'
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'dem_adj_copy_field', dem_adj_copy_field))

    # Use PRISM temperature to set Jensen-Haise coefficient
    # Typically these values will not be available when dem_parameters is first run
    # Setting it True means that the values will remain consistent even if
    #   dem_parameters is run again after the prism_script.
    try:
        calc_prism_jh_coef_flag = inputs_cfg.getboolean(
            'INPUTS', 'calc_prism_jh_coef_flag')
    except:
        calc_prism_jh_coef_flag = True
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'calc_prism_jh_coef_flag', calc_prism_jh_coef_flag))

    # Calculate flow accumulation weighted elevation
    if calc_flow_acc_dem_flag:
        # Get factor for scaling dem_flowacc values to avoid 32 bit int limits
        try:
            flow_acc_dem_factor = float(
                inputs_cfg.get('INPUTS', 'flow_acc_dem_factor'))
        except:
            # This is a worst case for keeping flow_acc_dem from exceeding 2E9
            # Assume all cells flow to 1 cell
            flow_acc_dem_factor = int(
                arcpy.GetCount_management(hru.point_path).getOutput(0))
            # Assume flow acc is in every DEM cell in HRU cell
            flow_acc_dem_factor *= (float(hru.cs) / dem_cs)**2
            # Need to account for the elevation in this worst cell
            # For now just make it 100
            # flow_acc_dem_factor *= max_elevation
            flow_acc_dem_factor *= 100
            # Calculate ratio of flow_acc_dem to a 32 bit int
            flow_acc_dem_factor /= (0.5 * 2**32)
            # If the ratio is less than 0.1, round up to 0.1 so factor -> 1.0
            flow_acc_dem_factor = min(0.1, flow_acc_dem_factor)
            # Round up to next multiple of 10 just to be safe
            flow_acc_dem_factor = 1.0 / 10**(
                int(math.log10(flow_acc_dem_factor)) + 1)
            logging.info(
                '  flow_acc_dem_factor was not set in the input file\n'
                '  Using automatic flow_acc_dem_factor: {}'.format(
                    flow_acc_dem_factor))

    # Calc flow_acc/flow_dir
    # DEADBEEF - For now, set these to True only if needed
    # calc_flow_acc_flag = inputs_cfg.getboolean('INPUTS', 'calc_flow_acc_flag')
    # calc_flow_dir_flag = inputs_cfg.getboolean('INPUTS', 'calc_flow_dir_flag')
    if calc_flow_acc_dem_flag:
        calc_flow_acc_flag = True
        calc_flow_dir_flag = True
    else:
        calc_flow_acc_flag = False
        calc_flow_dir_flag = False

    # Remap
    remap_ws = inputs_cfg.get('INPUTS', 'remap_folder')
    temp_adj_remap_name = inputs_cfg.get('INPUTS', 'temp_adj_remap')

    # Check input paths
    if not arcpy.Exists(hru.polygon_path):
        logging.error('\nERROR: Fishnet ({}) does not exist\n'.format(
            hru.polygon_path))
        sys.exit()
    # Check that either the original DEM raster exists
    if not arcpy.Exists(dem_orig_path):
        logging.error(
            '\nERROR: DEM ({}) raster does not exist\n'.format(dem_orig_path))
        sys.exit()
    # Check that remap folder is valid
    if not os.path.isdir(remap_ws):
        logging.error('\nERROR: Remap folder does not exist\n')
        sys.exit()
    # Check that remap files exist
    # Check remap files comment style
    temp_adj_remap_path = os.path.join(remap_ws, temp_adj_remap_name)
    remap_path_list = [temp_adj_remap_path]
    # remap_path_list = [aspect_remap_path, temp_adj_remap_path]
    for remap_path in remap_path_list:
        support.remap_check(remap_path)

    # DEADBEEF - Trying out setting SWALE points before filling
    model_inputs_path = inputs_cfg.get('INPUTS', 'model_points_path')
    try:
        model_points_type_field = inputs_cfg.get('INPUTS',
                                                 'model_points_type_field')
    except:
        model_points_type_field = 'TYPE'
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'model_points_type_field', model_points_type_field))

    # Check model points
    if not os.path.isfile(model_inputs_path):
        logging.error('\nERROR: Model points shapefiles does not exist'
                      '\nERROR:   {}'.format(model_inputs_path))
        sys.exit()
    # model_points_path must be a point shapefile
    elif arcpy.Describe(model_inputs_path).datasetType != 'FeatureClass':
        logging.error('\nERROR: model_points_path must be a point shapefile')
        sys.exit()

    # DEADBEEF
    # if not os.path.isfile(temp_adj_remap_path):
    #    logging.error(
    #        '\nERROR: ASCII remap file ({}) does not exist\n'.format(
    #            os.path.basename(temp_adj_remap_path)))
    #    sys.exit()
    #  Check remap files comment style
    # if '10.2' in arcpy.GetInstallInfo()['version']:
    #    if remap_comment_check(temp_adj_remap_path):
    #        logging.error(
    #            ('\nERROR: ASCII remap file ({}) has pre-ArcGIS 10.2 ' +
    #             'comments\n').format(os.path.basename(temp_adj_remap_path)))
    #        sys.exit()

    # Check other inputs
    if dem_cs <= 0:
        logging.error('\nERROR: DEM cellsize must be greater than 0')
        sys.exit()
    dem_proj_method_list = ['BILINEAR', 'CUBIC', 'NEAREST']
    if dem_proj_method not in dem_proj_method_list:
        logging.error('\nERROR: DEM projection method must be: {}'.format(
            ', '.join(dem_proj_method_list)))
        sys.exit()
    if reset_dem_adj_flag:
        logging.warning(
            '\nWARNING: All values in {} will be overwritten'.format(
                hru.dem_adj_field))
        raw_input('  Press ENTER to continue')

    # Build output folder if necessary
    dem_temp_ws = os.path.join(hru.param_ws, 'dem_rasters')
    if not os.path.isdir(dem_temp_ws):
        os.mkdir(dem_temp_ws)

    # Output paths
    dem_path = os.path.join(dem_temp_ws, 'dem.img')
    dem_fill_path = os.path.join(dem_temp_ws, 'dem_fill.img')
    flow_dir_path = os.path.join(dem_temp_ws, 'flow_dir.img')
    flow_acc_path = os.path.join(dem_temp_ws, 'flow_acc.img')
    flow_acc_dem_path = os.path.join(dem_temp_ws, 'flow_acc_x_dem.img')
    flow_acc_filter_path = os.path.join(dem_temp_ws, 'flow_acc_filter.img')
    dem_integer_path = os.path.join(dem_temp_ws, 'dem_integer.img')
    dem_slope_path = os.path.join(dem_temp_ws, 'dem_slope.img')
    dem_aspect_path = os.path.join(dem_temp_ws, 'dem_aspect.img')
    dem_aspect_reclass_path = os.path.join(dem_temp_ws, 'aspect_reclass.img')
    temp_adj_path = os.path.join(dem_temp_ws, 'temp_adj.img')
    swale_path = os.path.join(dem_temp_ws, 'swale.img')
    model_points_path = os.path.join(dem_temp_ws, 'model_points.shp')

    # Set ArcGIS environment variables
    arcpy.CheckOutExtension('Spatial')
    env.overwriteOutput = True
    env.pyramid = 'PYRAMIDS -1'
    # env.pyramid = 'PYRAMIDS 0'
    # env.rasterStatistics = 'NONE'
    # env.extent = 'MINOF'
    env.workspace = dem_temp_ws
    env.scratchWorkspace = hru.scratch_ws

    # DEADBEEF - Trying out setting SWALE points before filling
    # Read in model points shapefile
    logging.info('\nChecking model points shapefile')
    model_points_desc = arcpy.Describe(model_inputs_path)
    model_points_sr = model_points_desc.spatialReference
    logging.debug('  Points: {}'.format(model_inputs_path))
    logging.debug('  Points spat. ref.:  {}'.format(model_points_sr.name))
    logging.debug('  Points GCS:         {}'.format(model_points_sr.GCS.name))

    # If model points spat_ref doesn't match hru_param spat_ref
    # Project model points to hru_param spat ref
    # Otherwise, read model points directly
    if hru.sr.name != model_points_sr.name:
        logging.info('  Model points projection does not match fishnet.\n'
                     '  Projecting model points.\n')
        # Set preferred transforms
        transform_str = support.transform_func(hru.sr, model_points_sr)
        logging.debug('    Transform: {}'.format(transform_str))
        arcpy.Project_management(model_inputs_path, model_points_path, hru.sr,
                                 transform_str, model_points_sr)
    else:
        arcpy.Copy_management(model_inputs_path, model_points_path)
    model_points_lyr = 'model_points_lyr'
    arcpy.MakeFeatureLayer_management(model_points_path, model_points_lyr)

    # Check model point types
    logging.info('  Checking model point types')
    model_point_types = [
        str(r[0]).upper() for r in arcpy.da.SearchCursor(
            model_points_path, [model_points_type_field])
    ]
    if not set(model_point_types).issubset(set(['OUTLET', 'SUBBASIN', 'SWALE'
                                                ])):
        logging.error(
            '\nERROR: Unsupported model point type(s) found, exiting')
        logging.error('\n  Model point types: {}\n'.format(model_point_types))
        sys.exit()
    elif not set(model_point_types).issubset(set(['OUTLET', 'SWALE'])):
        logging.error(
            '\nERROR: At least one model point must be an OUTLET or SWALE, '
            'exiting\n')
        sys.exit()
    else:
        logging.debug('  {}'.format(', '.join(model_point_types)))

    # Check DEM field
    logging.info('\nAdding DEM fields if necessary')
    support.add_field_func(hru.polygon_path, hru.dem_mean_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.dem_max_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.dem_min_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.dem_adj_field, 'DOUBLE')
    if calc_flow_acc_dem_flag:
        support.add_field_func(hru.polygon_path, hru.dem_flowacc_field,
                               'DOUBLE')
        support.add_field_func(hru.polygon_path, hru.dem_sum_field, 'DOUBLE')
        support.add_field_func(hru.polygon_path, hru.dem_count_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.dem_sink_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.dem_aspect_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.dem_slope_deg_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.dem_slope_rad_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.dem_slope_pct_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.jh_tmin_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.jh_tmax_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.jh_coef_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.snarea_thresh_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.tmax_adj_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.tmin_adj_field, 'DOUBLE')

    # Check that dem_adj_copy_field exists
    if len(arcpy.ListFields(hru.polygon_path, dem_adj_copy_field)) == 0:
        logging.error('\nERROR: dem_adj_copy_field {} does not exist\n'.format(
            dem_adj_copy_field))
        sys.exit()

    # Assume all DEM rasters will need to be rebuilt
    # Check slope, aspect, and proejcted DEM rasters
    # This will check for matching spat. ref., snap point, and cellsize

    # If DEM is GCS, project it to 10m to match
    # DEADBEEF - I had originally wanted the DEM to get projected only once
    #   but if the user wants to rerun this script, then all steps should
    #   be rerun.  This also allows the user to change the DEM raster
    # dem_flag = valid_raster_func(
    #    dem_path, 'projected DEM', hru, dem_cs)
    # if arcpy.Exists(dem_orig_path) and not dem_flag:
    logging.info('\nProjecting DEM raster')
    dem_orig_sr = arcpy.sa.Raster(dem_orig_path).spatialReference
    logging.debug('  DEM GCS:   {}'.format(dem_orig_sr.GCS.name))
    # Remove existing projected DEM
    if arcpy.Exists(dem_path):
        arcpy.Delete_management(dem_path)
    # Set preferred transforms
    transform_str = support.transform_func(hru.sr, dem_orig_sr)
    logging.debug('  Transform: {}'.format(transform_str))
    logging.debug('  Projection method: {}'.format(dem_proj_method))
    # Project DEM
    # DEADBEEF - Arc10.2 ProjectRaster does not honor extent
    logging.debug('  Input SR:  {}'.format(dem_orig_sr.exportToString()))
    logging.debug('  Output SR: {}'.format(hru.sr.exportToString()))
    support.project_raster_func(dem_orig_path,
                                dem_path,
                                hru.sr,
                                dem_proj_method,
                                dem_cs,
                                transform_str,
                                '{} {}'.format(hru.ref_x, hru.ref_y),
                                dem_orig_sr,
                                hru,
                                in_memory=False)
    # env.extent = hru.extent
    # arcpy.ProjectRaster_management(
    #    dem_orig_path, dem_path, hru.sr,
    #    dem_proj_method, dem_cs, transform_str,
    #    '{} {}'.format(hru.ref_x, hru.ref_y),
    #    dem_orig_sr)
    # arcpy.ClearEnvironment('extent')

    # Check linear unit of raster
    # DEADBEEF - The conversion could probably be dynamic
    dem_obj = arcpy.sa.Raster(dem_path)
    linear_unit_list = ['METERS', 'METER', 'FOOT_US', 'FOOT']
    linear_unit = dem_obj.spatialReference.linearUnitName.upper()
    if linear_unit not in linear_unit_list:
        logging.error(
            '\nERROR: The linear unit of the projected/clipped DEM must'
            ' be meters or feet\n  {}'.format(linear_unit))
        sys.exit()
    del dem_obj

    # DEADBEEF - Trying out setting SWALE points before filling
    hru_polygon_lyr = 'hru_polygon_lyr'
    arcpy.MakeFeatureLayer_management(hru.polygon_path, hru_polygon_lyr)
    arcpy.SelectLayerByAttribute_management(hru_polygon_lyr, 'CLEAR_SELECTION')
    arcpy.CalculateField_management(hru_polygon_lyr, hru.outflow_field, 0,
                                    'PYTHON')

    if 'SWALE' in model_point_types:
        logging.info('  Building SWALE point raster')
        arcpy.SelectLayerByAttribute_management(model_points_lyr,
                                                'NEW_SELECTION',
                                                '"TYPE" = \'SWALE\'')

        # DEADBEEF - Should SWALE points be written to OUTFLOWHRU.TXT?
        arcpy.SelectLayerByLocation_management(hru_polygon_lyr, 'INTERSECT',
                                               model_points_lyr)
        arcpy.CalculateField_management(hru_polygon_lyr, hru.outflow_field, 1,
                                        'PYTHON')

        arcpy.PointToRaster_conversion(model_points_lyr,
                                       model_points_type_field, swale_path, "",
                                       "", hru.cs)
        swale_obj = arcpy.sa.Raster(swale_path)
        arcpy.SelectLayerByAttribute_management(model_points_lyr,
                                                'CLEAR_SELECTION')

    dem_obj = arcpy.sa.Raster(dem_path)

    if 'SWALE' in model_point_types:
        logging.debug('  Setting DEM_ADJ values to NoData for SWALE cells')
        dem_obj = arcpy.sa.Con(arcpy.sa.IsNull(swale_obj), dem_obj)

    # Calculate filled DEM, flow_dir, & flow_acc
    logging.info('\nCalculating filled DEM raster')
    dem_fill_obj = arcpy.sa.Fill(dem_obj)
    dem_fill_obj.save(dem_fill_path)
    del dem_fill_obj

    # # Calculate filled DEM, flow_dir, & flow_acc
    # logging.info('\nCalculating filled DEM raster')
    # dem_fill_obj = arcpy.sa.Fill(dem_obj)
    # dem_fill_obj.save(dem_fill_path)
    # del dem_fill_obj

    if calc_flow_dir_flag:
        logging.info('Calculating flow direction raster')
        dem_fill_obj = arcpy.sa.Raster(dem_fill_path)
        flow_dir_obj = arcpy.sa.FlowDirection(dem_fill_obj, True)
        flow_dir_obj.save(flow_dir_path)
        del flow_dir_obj, dem_fill_obj
    if calc_flow_acc_flag:
        logging.info('Calculating flow accumulation raster')
        flow_dir_obj = arcpy.sa.Raster(flow_dir_path)
        flow_acc_obj = arcpy.sa.FlowAccumulation(flow_dir_obj)
        flow_acc_obj.save(flow_acc_path)
        del flow_acc_obj, flow_dir_obj
    if calc_flow_acc_dem_flag:
        # flow_acc_dem_obj = dem_fill_obj * flow_acc_obj
        # Low pass filter of flow_acc then take log10
        flow_acc_filter_obj = arcpy.sa.Filter(arcpy.sa.Raster(flow_acc_path),
                                              'LOW', 'NODATA')
        flow_acc_filter_obj *= flow_acc_dem_factor
        flow_acc_filter_obj.save(flow_acc_filter_path)
        flow_acc_dem_obj = arcpy.sa.Raster(dem_fill_path) * flow_acc_filter_obj
        flow_acc_dem_obj.save(flow_acc_dem_path)
        del flow_acc_dem_obj, flow_acc_filter_obj

    # Calculate an integer version of DEM for median zonal stats
    dem_integer_obj = arcpy.sa.Int(arcpy.sa.Raster(dem_path) * 100)
    dem_integer_obj.save(dem_integer_path)
    del dem_integer_obj

    # Calculate slope
    logging.info('Calculating slope raster')
    dem_slope_obj = arcpy.sa.Slope(dem_fill_path, 'DEGREE')
    # Setting small slopes to zero
    logging.info('  Setting slopes <= 0.01 to 0')
    dem_slope_obj = arcpy.sa.Con(dem_slope_obj <= 0.01, 0, dem_slope_obj)
    dem_slope_obj.save(dem_slope_path)
    del dem_slope_obj

    # Calculate aspect
    logging.info('Calculating aspect raster')
    dem_aspect_obj = arcpy.sa.Int(arcpy.sa.Aspect(dem_fill_path))
    # Set small slopes to -1 aspect
    logging.debug('  Setting aspect for slopes <= 0.01 to -1')
    dem_aspect_obj = arcpy.sa.Con(
        arcpy.sa.Raster(dem_slope_path) > 0.01, dem_aspect_obj, -1)
    dem_aspect_obj.save(dem_aspect_path)
    del dem_aspect_obj

    # Temperature Aspect Adjustment
    logging.info('Calculating temperature aspect adjustment raster')
    temp_adj_obj = arcpy.sa.Float(
        arcpy.sa.ReclassByASCIIFile(dem_aspect_path, temp_adj_remap_path))
    # temp_adj_obj = arcpy.sa.Float(arcpy.sa.ReclassByASCIIFile(
    #     dem_aspect_reclass_path, temp_adj_remap_path))
    # Since reclass can't remap to floats directly
    # Values are scaled by 10 and stored as integers
    temp_adj_obj *= 0.1
    temp_adj_obj.save(temp_adj_path)
    del temp_adj_obj

    # List of rasters, fields, and stats for zonal statistics
    zs_dem_dict = dict()
    zs_dem_dict[hru.dem_mean_field] = [dem_path, 'MEAN']
    if calc_flow_acc_dem_flag:
        zs_dem_dict[hru.dem_sum_field] = [flow_acc_dem_path, 'SUM']
        zs_dem_dict[hru.dem_count_field] = [flow_acc_filter_path, 'SUM']
    zs_dem_dict[hru.dem_max_field] = [dem_path, 'MAXIMUM']
    zs_dem_dict[hru.dem_min_field] = [dem_path, 'MINIMUM']
    zs_dem_dict[hru.dem_aspect_field] = [dem_aspect_path, 'MEAN']
    zs_dem_dict[hru.dem_slope_deg_field] = [dem_slope_path, 'MEAN']
    zs_dem_dict[hru.tmax_adj_field] = [temp_adj_path, 'MEAN']
    zs_dem_dict[hru.tmin_adj_field] = [temp_adj_path, 'MEAN']

    # Calculate DEM zonal statistics
    logging.info('\nCalculating DEM zonal statistics')
    support.zonal_stats_func(zs_dem_dict, hru.polygon_path, hru.point_path,
                             hru)

    # Flow accumulation weighted elevation
    if calc_flow_acc_dem_flag:
        logging.info('Calculating {}'.format(hru.dem_flowacc_field))
        hru_polygon_layer = 'hru_polygon_layer'
        arcpy.MakeFeatureLayer_management(hru.polygon_path, hru_polygon_layer)
        arcpy.SelectLayerByAttribute_management(
            hru_polygon_layer, "NEW_SELECTION",
            '"{}" > 0'.format(hru.dem_count_field))
        arcpy.CalculateField_management(
            hru_polygon_layer, hru.dem_flowacc_field,
            'float(!{}!) / !{}!'.format(hru.dem_sum_field,
                                        hru.dem_count_field), 'PYTHON')
        # Clear dem_flowacc for any cells that have zero sum or count
        arcpy.SelectLayerByAttribute_management(
            hru_polygon_layer, "NEW_SELECTION",
            '("{}" = 0) OR ("{}" = 0)'.format(hru.dem_count_field,
                                              hru.dem_sum_field))
        arcpy.CalculateField_management(hru_polygon_layer,
                                        hru.dem_flowacc_field, 0, 'PYTHON')
        arcpy.Delete_management(hru_polygon_layer)

    # Fill DEM_ADJ if it is not set
    if all([
            row[0] == 0 for row in arcpy.da.SearchCursor(
                hru.polygon_path, [hru.dem_adj_field])
    ]):
        logging.info('Filling {} from {}'.format(hru.dem_adj_field,
                                                 dem_adj_copy_field))
        arcpy.CalculateField_management(
            hru.polygon_path, hru.dem_adj_field,
            'float(!{}!)'.format(dem_adj_copy_field), 'PYTHON')
    elif reset_dem_adj_flag:
        logging.info('Filling {} from {}'.format(hru.dem_adj_field,
                                                 dem_adj_copy_field))
        arcpy.CalculateField_management(
            hru.polygon_path, hru.dem_adj_field,
            'float(!{}!)'.format(dem_adj_copy_field), 'PYTHON')
    else:
        logging.info('{} appears to already have been set and '
                     'will not be overwritten'.format(hru.dem_adj_field))

    # HRU_SLOPE in radians
    logging.info('Calculating {} (Slope in Radians)'.format(
        hru.dem_slope_rad_field))
    arcpy.CalculateField_management(
        hru.polygon_path, hru.dem_slope_rad_field,
        'math.pi * !{}! / 180'.format(hru.dem_slope_deg_field), 'PYTHON')
    # HRU_SLOPE in percent
    logging.info('Calculating {} (Percent Slope)'.format(
        hru.dem_slope_pct_field))
    arcpy.CalculateField_management(
        hru.polygon_path, hru.dem_slope_pct_field,
        'math.tan(!{}!)'.format(hru.dem_slope_rad_field), 'PYTHON')

    # Jensen-Haise Potential ET air temperature coefficient
    logging.info('Calculating JH_COEF_HRU')
    # First check if PRISM TMAX/TMIN have been set
    # If max July value is 0, use default values
    if (calc_prism_jh_coef_flag
            and (len(arcpy.ListFields(hru.polygon_path, 'TMAX_07')) == 0
                 or support.field_stat_func(hru.polygon_path, 'TMAX_07',
                                            'MAXIMUM') == 0)):
        calc_prism_jh_coef_flag = False
    # Use PRISM temperature values
    if calc_prism_jh_coef_flag:
        logging.info('  Using PRISM temperature values')
        tmax_field_list = ['!TMAX_{:02d}!'.format(m) for m in range(1, 13)]
        tmin_field_list = ['!TMIN_{:02d}!'.format(m) for m in range(1, 13)]
        tmax_expr = 'max([{}])'.format(','.join(tmax_field_list))
        arcpy.CalculateField_management(hru.polygon_path, hru.jh_tmax_field,
                                        tmax_expr, 'PYTHON')
        # Get TMIN for same month as maximum TMAX
        tmin_expr = 'max(zip([{}],[{}]))[1]'.format(','.join(tmax_field_list),
                                                    ','.join(tmin_field_list))
        arcpy.CalculateField_management(hru.polygon_path, hru.jh_tmin_field,
                                        tmin_expr, 'PYTHON')
    # Use default temperature values
    else:
        logging.info('  setting temperature values (7 & 25)')
        arcpy.CalculateField_management(hru.polygon_path, hru.jh_tmax_field,
                                        25, 'PYTHON')
        arcpy.CalculateField_management(hru.polygon_path, hru.jh_tmin_field, 7,
                                        'PYTHON')
    # Pass unit scalar to convert DEM_ADJ to feet if necessary
    support.jensen_haise_func(hru.polygon_path, hru.jh_coef_field,
                              hru.dem_adj_field, hru.jh_tmin_field,
                              hru.jh_tmax_field, dem_unit_scalar)

    # SNAREA_THRESH
    # Convert DEM_ADJ to feet if necessary
    logging.info('Calculating {}'.format(hru.snarea_thresh_field))
    elev_min = support.field_stat_func(hru.polygon_path, hru.dem_adj_field,
                                       'MINIMUM')
    arcpy.CalculateField_management(
        hru.polygon_path, hru.snarea_thresh_field,
        '(!{}! - {}) * 0.005'.format(hru.dem_adj_field,
                                     elev_min * dem_unit_scalar), 'PYTHON')

    # Clear slope/aspect values for lake cells (HRU_TYPE == 2)
    # Also clear for ocean cells (HRU_TYPE == 0 and DEM_ADJ == 0)
    if True:
        logging.info('\nClearing slope/aspect parameters for lake cells')
        hru_polygon_layer = "hru_polygon_layer"
        arcpy.MakeFeatureLayer_management(hru.polygon_path, hru_polygon_layer)
        arcpy.SelectLayerByAttribute_management(
            hru_polygon_layer, "NEW_SELECTION",
            '"{0}" = 2 OR ("{0}" = 0 AND "{1}" = 0)'.format(
                hru.type_field, hru.dem_adj_field))
        arcpy.CalculateField_management(hru_polygon_layer,
                                        hru.dem_aspect_field, 0, 'PYTHON')
        arcpy.CalculateField_management(hru_polygon_layer,
                                        hru.dem_slope_deg_field, 0, 'PYTHON')
        arcpy.CalculateField_management(hru_polygon_layer,
                                        hru.dem_slope_rad_field, 0, 'PYTHON')
        arcpy.CalculateField_management(hru_polygon_layer,
                                        hru.dem_slope_pct_field, 0, 'PYTHON')
        # arcpy.CalculateField_management(
        #    hru_polygon_layer, hru.deplcrv_field, 0, 'PYTHON')
        # arcpy.CalculateField_management(
        #    hru_polygon_layer, hru.snarea_field, 0, 'PYTHON')
        # arcpy.CalculateField_management(
        #    hru_polygon_layer, hru.tmax_adj_field, 0, 'PYTHON')
        # arcpy.CalculateField_management(
        #    hru_polygon_layer, hru.tmin_adj_field, 0, 'PYTHON')

        # Should JH coefficients be cleared for lakes?
        # logging.info('\nClearing JH parameters for ocean cells')
        arcpy.SelectLayerByAttribute_management(
            hru_polygon_layer, "NEW_SELECTION",
            '"{}" = 0 AND "{}" = 0'.format(hru.type_field, hru.dem_adj_field))
        arcpy.CalculateField_management(hru_polygon_layer, hru.jh_coef_field,
                                        0, 'PYTHON')
        arcpy.CalculateField_management(hru_polygon_layer, hru.jh_tmax_field,
                                        0, 'PYTHON')
        arcpy.CalculateField_management(hru_polygon_layer, hru.jh_tmin_field,
                                        0, 'PYTHON')

        arcpy.Delete_management(hru_polygon_layer)
        del hru_polygon_layer