def crt_fill_parameters(config_path):
    """Calculate GSFLOW CRT Fill Parameters

    Parameters
    ----------
    config_path : str
        Project configuration file (.ini) path.

    Returns
    -------
    None

    """
    # Initialize hru_parameters class
    hru = support.HRUParameters(config_path)

    # Open input parameter config file
    inputs_cfg = ConfigParser.ConfigParser()
    try:
        inputs_cfg.readfp(open(config_path))
    except Exception as e:
        logging.error('\nERROR: Config file could not be read, '
                      'is not an input file, or does not exist\n'
                      '  config_file = {}\n'
                      '  Exception: {}\n'.format(config_path, e))
        sys.exit()

    # Log DEBUG to file
    log_file_name = 'crt_fill_parameters_log.txt'
    log_console = logging.FileHandler(filename=os.path.join(
        hru.log_ws, log_file_name),
                                      mode='w')
    log_console.setLevel(logging.DEBUG)
    log_console.setFormatter(logging.Formatter('%(message)s'))
    logging.getLogger('').addHandler(log_console)
    logging.info('\nGSFLOW CRT Fill Parameters')

    # Parameters
    exit_seg = 0

    # CRT Parameters
    try:
        use_crt_fill_flag = inputs_cfg.getboolean('INPUTS',
                                                  'use_crt_fill_flag')
    except ConfigParser.NoOptionError:
        use_crt_fill_flag = False
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'use_crt_fill_flag', use_crt_fill_flag))

    try:
        crt_hruflg = inputs_cfg.getint('INPUTS', 'crt_hruflg')
    except ConfigParser.NoOptionError:
        crt_hruflg = 0
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'crt_hruflg', crt_hruflg))
    try:
        crt_flowflg = inputs_cfg.getint('INPUTS', 'crt_flowflg')
    except ConfigParser.NoOptionError:
        crt_flowflg = 1
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'crt_flowflg', crt_flowflg))
    try:
        crt_dpit = inputs_cfg.getfloat('INPUTS', 'crt_dpit')
    except ConfigParser.NoOptionError:
        crt_dpit = 0.01
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'crt_dpit', crt_dpit))
    try:
        crt_outitmax = inputs_cfg.getint('INPUTS', 'crt_outitmax')
    except ConfigParser.NoOptionError:
        crt_outitmax = 100000
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'crt_outitmax', crt_outitmax))

    # Intentionally not allowing user to change this value
    crt_iprn = 1

    # CRT Fill Parameters
    fill_ws_name = 'fill_work'
    fill_strmflg = 0
    fill_visflg = 0
    fill_ifill = 1

    # CRT Executable
    crt_exe_path = inputs_cfg.get('INPUTS', 'crt_exe_path')
    output_name = 'outputstat.txt'

    # Check input paths
    if not arcpy.Exists(hru.polygon_path):
        logging.error('\nERROR: Fishnet ({}) does not exist\n'.format(
            hru.polygon_path))
        sys.exit()
    # Check that input fields exist and have data
    # Fields generated by hru_parameters
    for f in [hru.type_field, hru.row_field, hru.col_field]:
        if not arcpy.ListFields(hru.polygon_path, f):
            logging.error(
                '\nERROR: Input field {} is not present in fishnet'
                '\nERROR: Try re-running hru_parameters.py\n'.format(f))
            sys.exit()
        elif support.field_stat_func(hru.polygon_path, f, 'MAXIMUM') == 0:
            logging.error(
                '\nERROR: Input field {} contains only 0'
                '\nERROR: Try re-running hru_parameters.py\n'.format(f))
            sys.exit()
    # Fields generated by dem_2_streams
    for f in [
            hru.irunbound_field, hru.iseg_field, hru.flow_dir_field,
            hru.outflow_field, hru.subbasin_field
    ]:
        if not arcpy.ListFields(hru.polygon_path, f):
            logging.error(
                '\nERROR: Input field {} is not present in fishnet'
                '\nERROR: Try re-running dem_2_streams.py\n'.format(f))
            sys.exit()
        elif support.field_stat_func(hru.polygon_path, f, 'MAXIMUM') == 0:
            logging.error(
                '\nERROR: Input field {} contains only 0'
                '\nERROR: Try re-running dem_2_streams.py\n'.format(f))
            sys.exit()

    # Build output folder if necessary
    fill_ws = os.path.join(hru.param_ws, fill_ws_name)
    if not os.path.isdir(fill_ws):
        os.makedirs(fill_ws)

    # Copy CRT executable if necessary
    crt_exe_name = os.path.basename(crt_exe_path)
    if not os.path.isfile(os.path.join(fill_ws, crt_exe_name)):
        shutil.copy(crt_exe_path, fill_ws)
    if not os.path.isfile(os.path.join(fill_ws, crt_exe_name)):
        logging.error('\nERROR: CRT executable ({}) does not exist\n'.format(
            os.path.join(fill_ws, crt_exe_name)))
        sys.exit()

    # Fill files
    fill_hru_casc_path = os.path.join(fill_ws, 'HRU_CASC.DAT')
    fill_outflow_hru_path = os.path.join(fill_ws, 'OUTFLOW_HRU.DAT')
    fill_land_elev_path = os.path.join(fill_ws, 'LAND_ELEV.DAT')
    fill_xy_path = os.path.join(fill_ws, 'XY.DAT')

    # Output names
    # dem_adj_raster_name = 'dem_adj'
    # hru_type_raster_name = 'hru_type'
    # lakes_raster_name = 'lakes'
    # streams_raster_name = 'streams'
    # iseg_raster_name = 'iseg'
    # irunbound_raster_name = 'irunbound'

    # Output raster paths
    # dem_adj_raster = os.path.join(fill_ws, dem_adj_raster_name + '.img')
    # hru_type_raster = os.path.join(fill_ws, hru_type_raster_name + '.img')

    # Output ascii paths
    # a_fmt = '{}_ascii.txt'
    # dem_adj_ascii = os.path.join(fill_ws, a_fmt.format(dem_adj_raster_name))
    # hru_type_ascii = os.path.join(fill_ws, a_fmt.format(hru_type_raster_name))

    # Set ArcGIS environment variables
    arcpy.CheckOutExtension('Spatial')
    env.overwriteOutput = True
    # env.pyramid = 'PYRAMIDS -1'
    env.pyramid = 'PYRAMIDS 0'
    env.workspace = fill_ws
    env.scratchWorkspace = hru.scratch_ws

    # Add fields if necessary
    logging.info('\nAdding fields if necessary')
    support.add_field_func(hru.polygon_path, hru.krch_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.irch_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.jrch_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.iseg_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.reach_field, 'LONG')
    # add_field_func(hru.polygon_path, hru.rchlen_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.maxreach_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.outseg_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.irunbound_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.crt_elev_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.crt_fill_field, 'DOUBLE')

    # Calculate KRCH, IRCH, JRCH for stream segments
    logging.info('\nKRCH, IRCH, & JRCH for streams')
    fields = [
        hru.type_field, hru.iseg_field, hru.row_field, hru.col_field,
        hru.krch_field, hru.irch_field, hru.jrch_field
    ]
    with arcpy.da.UpdateCursor(hru.polygon_path, fields) as update_c:
        for row in update_c:
            if (int(row[0]) in [1, 3] and int(row[1]) > 0):
                row[4], row[5], row[6] = 1, int(row[2]), int(row[3])
            else:
                row[4], row[5], row[6] = 0, 0, 0
            update_c.updateRow(row)

    # Get list of segments and downstream cell for each stream/lake cell
    # Downstream is calculated from flow direction
    # Use IRUNBOUND instead of ISEG, since ISEG will be zeroed for lakes
    # DEADBEEF - I don't think ISEG will be zero for lakes anymore
    logging.info('Cell out-flow dictionary')
    cell_dict = dict()
    fields = [
        hru.type_field, hru.krch_field, hru.lake_id_field, hru.iseg_field,
        hru.irunbound_field, hru.dem_adj_field, hru.flow_dir_field,
        hru.col_field, hru.row_field, hru.id_field
    ]
    for row in arcpy.da.SearchCursor(hru.polygon_path, fields):
        # Skip inactive cells
        if int(row[0]) == 0:
            continue
        # Skip non-stream and non-lake cells
        elif (int(row[1]) == 0 and int(row[2]) == 0):
            continue

        # ROW / COL
        cell = (int(row[7]), int(row[8]))

        # Read in parameters
        # HRU_ID, ISEG, support.next_row_col(FLOW_DIR, CELL), DEM_ADJ, X, X, X
        cell_dict[cell] = [
            int(row[9]),
            int(row[4]),
            support.next_row_col(int(row[6]), cell),
            float(row[5]), 0, 0, 0
        ]

    # Build list of unique segments
    iseg_list = sorted(list(set([v[1] for v in cell_dict.values()])))
    print(iseg_list)

    # Calculate IREACH and OUTSEG
    logging.info('Calculate {} and {}'.format(hru.reach_field,
                                              hru.outseg_field))
    outseg_dict = dict()
    for iseg in sorted(iseg_list):
        logging.debug('    Segment: {}'.format(iseg))

        # Subset of cell_dict for current iseg
        iseg_dict = dict([(k, v) for k, v in cell_dict.items()
                          if v[1] == iseg])

        # List of all cells in current iseg
        iseg_cells = iseg_dict.keys()

        # List of out_cells for all cells in current iseg
        out_cells = [value[2] for value in iseg_dict.values()]

        # Every iseg will (should?) have one out_cell
        out_cell = list(set(out_cells) - set(iseg_cells))

        # Process streams and lakes separately
        # Streams
        if iseg > 0:
            # If there is more than one out_cell
            #   there is a problem with the stream network
            if len(out_cell) != 1:
                logging.error(
                    '\nERROR: ISEG {} has more than one out put cell'
                    '\n  Out cells: {}'
                    '\n  Check for streams exiting then re-entering a lake'
                    '\n  Lake cell elevations may not be constant\n'.format(
                        iseg, out_cell))
                sys.exit()

            # If not output cell, assume edge of domain
            try:
                outseg = cell_dict[out_cell[0]][1]
            except KeyError:
                outseg = exit_seg

            # Track sub-basin outseg
            outseg_dict[iseg] = outseg

            # Calculate reach number for each cell
            reach_dict = dict()
            start_cell = list(set(iseg_cells) - set(out_cells))[0]
            for i in range(len(out_cells)):
                # logging.debug('    Reach: {}  Cell: {}'.format(i+1, start_cell))
                reach_dict[start_cell] = i + 1
                start_cell = iseg_dict[start_cell][2]
            # For each cell in iseg, save outseg, reach, & maxreach
            for iseg_cell in iseg_cells:
                cell_dict[iseg_cell][4:] = [
                    outseg, reach_dict[iseg_cell],
                    len(iseg_cells)
                ]
            del reach_dict, start_cell
        # Lakes
        else:
            # For lake cells, there can be multiple outlets if all of them
            #   are to inactive cells or out of the model
            # Otherwise, like streams, there should only be one outcell per iseg
            logging.debug('  Length: {}'.format(len(out_cells)))
            if len(out_cell) == 1:
                try:
                    outseg = cell_dict[out_cell[0]][1]
                except KeyError:
                    outseg = exit_seg
            elif (len(out_cell) != 1
                  and all(x[0] not in cell_dict.keys() for x in out_cell)):
                outseg = exit_seg
                logging.debug('  All out cells are inactive, setting outseg '
                              'to exit_seg {}'.format(exit_seg))
            else:
                logging.error(
                    '\nERROR: ISEG {} has more than one out put cell'
                    '\n  Out cells: {}'
                    '\n  Check for streams exiting then re-entering a lake'
                    '\n  Lake cell elevations may not be constant\n'.format(
                        iseg, out_cell))
                raw_input('ENTER')

            # Track sub-basin outseg
            outseg_dict[iseg] = outseg

            # For each lake segment cell, only save outseg
            # All lake cells are routed directly to the outseg
            for iseg_cell in iseg_cells:
                cell_dict[iseg_cell][4:] = [outseg, 0, 0]
            del outseg

        del iseg_dict, iseg_cells, iseg
        del out_cells, out_cell

    # Saving ireach and outseg
    logging.info('Save {} and {}'.format(hru.reach_field, hru.outseg_field))
    fields = [
        hru.type_field, hru.iseg_field, hru.col_field, hru.row_field,
        hru.outseg_field, hru.reach_field, hru.maxreach_field
    ]
    with arcpy.da.UpdateCursor(hru.polygon_path, fields) as update_c:
        for row in update_c:
            # if (int(row[0]) > 0 and int(row[1]) > 0):
            # #DEADBEEF - I'm not sure why only iseg > 0 in above line
            # DEADBEEF - This should set outseg for streams and lakes
            if (int(row[0]) > 0 and int(row[1]) != 0):
                row[4:] = cell_dict[(int(row[2]), int(row[3]))][4:]
            else:
                row[4:] = [0, 0, 0]
            update_c.updateRow(row)

    # Set all lake iseg to 0
    logging.info('Lake {}'.format(hru.iseg_field))
    update_rows = arcpy.UpdateCursor(hru.polygon_path)
    for row in update_rows:
        if int(row.getValue(hru.type_field)) != 2:
            continue
        iseg = int(row.getValue(hru.iseg_field))
        if iseg < 0:
            row.setValue(hru.iseg_field, 0)
        update_rows.updateRow(row)
        del row, iseg
    del update_rows

    # Set environment parameters
    env.extent = hru.extent
    env.cellsize = hru.cs
    env.outputCoordinateSystem = hru.sr

    # # Build rasters
    # logging.info('\nOutput model grid rasters')
    # arcpy.PolygonToRaster_conversion(
    #    hru.polygon_path, hru.type_field, hru_type_raster,
    #    'CELL_CENTER', '', hru.cs)
    # arcpy.PolygonToRaster_conversion(
    #    hru.polygon_path, hru.dem_adj_field, dem_adj_raster,
    #    'CELL_CENTER', '', hru.cs)
    #
    # # Build rasters
    # logging.info('Output model grid ascii')
    # arcpy.RasterToASCII_conversion(hru_type_raster, hru_type_ascii)
    # arcpy.RasterToASCII_conversion(dem_adj_raster, dem_adj_ascii)

    logging.debug('\nRemoving existing CRT fill files')
    if os.path.isfile(fill_outflow_hru_path):
        os.remove(fill_outflow_hru_path)
    if os.path.isfile(fill_hru_casc_path):
        os.remove(fill_hru_casc_path)
    if os.path.isfile(fill_land_elev_path):
        os.remove(fill_land_elev_path)
    if os.path.isfile(fill_xy_path):
        os.remove(fill_xy_path)

    # Input parameters files for Cascade Routing Tool (CRT)
    logging.info('\nBuilding output CRT fill files')

    # Generate OUTFLOW_HRU.DAT for CRT
    # Outflow cells exit the model to inactive cells or out of the domain
    #   Outflow field is set in dem_2_streams
    logging.info('  {}'.format(os.path.basename(fill_outflow_hru_path)))
    outflow_hru_list = []
    fields = [
        hru.type_field, hru.outflow_field, hru.subbasin_field, hru.row_field,
        hru.col_field
    ]
    for row in arcpy.da.SearchCursor(hru.polygon_path, fields):
        if int(row[0]) != 0 and int(row[1]) == 1:
            outflow_hru_list.append([int(row[3]), int(row[4])])
    if outflow_hru_list:
        with open(fill_outflow_hru_path, 'w+') as f:
            f.write('{}    NUMOUTFLOWHRU\n'.format(len(outflow_hru_list)))
            for i, outflow_hru in enumerate(outflow_hru_list):
                f.write('{} {} {}   OUTFLOW_ID ROW COL\n'.format(
                    i + 1, outflow_hru[0], outflow_hru[1]))
        f.close()
    else:
        logging.error('\nERROR: No OUTFLOWHRU points, exiting')
        sys.exit()
    del outflow_hru_list

    # # DEADBEEF - Old method for setting OUTFLOW_HRU.DAT
    # #   Only streams that flow to real gauges are used
    # # Generate OUTFLOW_HRU.DAT for CRT
    # logging.info('  {}'.format(
    #    os.path.basename(fill_outflow_hru_path)))
    # outflow_hru_list = []
    # fields = [
    #    hru.type_field, hru.iseg_field, hru.outseg_field, hru.reach_field,
    #    hru.maxreach_field, hru.col_field, hru.row_field]
    # for row in arcpy.da.SearchCursor(hru.polygon_path, fields):
    #    if int(row[0]) != 1 or int(row[1]) == 0:
    #        continue
    #    if int(row[2]) == 0 and int(row[3]) == int(row[4]):
    #        outflow_hru_list.append([int(row[6]), int(row[5])])
    # if outflow_hru_list:
    #    with open(fill_outflow_hru_path, 'w+') as f:
    #        f.write('{}    NUMOUTFLOWHRU\n'.format(
    #            len(outflow_hru_list)))
    #        for i, outflow_hru in enumerate(outflow_hru_list):
    #            f.write('{} {} {}   OUTFLOW_ID ROW COL\n'.format(
    #                i+1, outflow_hru[0], outflow_hru[1]))
    #    f.close()
    # del outflow_hru_list

    # Generate HRU_CASC.DAT for CRT from hru_polygon
    logging.info('  {}'.format(os.path.basename(fill_hru_casc_path)))
    hru_type_dict = defaultdict(dict)
    for row in sorted(
            arcpy.da.SearchCursor(hru.polygon_path, [
                hru.row_field, hru.col_field, hru.type_field, hru.dem_adj_field
            ])):
        # Calculate CRT fill for all non-lake and non-ocean (elev > 0) cells
        # if row[3] > 0 and row[2] == 0:
        #    hru_type_dict[int(row[0])][int(row[1])] = 1
        # else: hru_type_dict[int(row[0])][int(row[1])] = row[2]
        # Calculate CRT fill for all active cells
        hru_type_dict[int(row[0])][int(row[1])] = row[2]
    hru_casc_header = (
        '{} {} {} {} {} {} {} {}     '
        'HRUFLG STRMFLG FLOWFLG VISFLG IPRN IFILL DPIT OUTITMAX\n').format(
            crt_hruflg, fill_strmflg, crt_flowflg, fill_visflg, crt_iprn,
            fill_ifill, crt_dpit, crt_outitmax)
    with open(fill_hru_casc_path, 'w+') as f:
        f.write(hru_casc_header)
        for row, col_data in sorted(hru_type_dict.items()):
            f.write(' '.join([str(t)
                              for c, t in sorted(col_data.items())]) + '\n')
    f.close()
    del hru_casc_header, hru_type_dict
    # # Generate HRU_CASC.DATA for CRT from raster/ascii
    # with open(hru_type_ascii, 'r') as f: ascii_data = f.readlines()
    # f.close()
    # hru_casc_header = (
    #    '{} {} {} {} {} {} {} {}     '
    #    'HRUFLG STRMFLG FLOWFLG VISFLG '
    #    'IPRN IFILL DPIT OUTITMAX\n').format(
    #        crt_hruflg, fill_strmflg, crt_flowflg, fill_visflg,
    #        crt_iprn, fill_ifill, crt_dpit, crt_outitmax)
    # with open(fill_hru_casc_path, 'w+') as f:
    #    f.write(hru_casc_header)
    #    for ascii_line in ascii_data[6:]: f.write(ascii_line)
    # f.close()
    # del hru_casc_header, ascii_data

    # Generate LAND_ELEV.DAT for CRT from hru_polygon
    logging.info('  {}'.format(os.path.basename(fill_land_elev_path)))
    dem_adj_dict = defaultdict(dict)
    for row in sorted(
            arcpy.da.SearchCursor(
                hru.polygon_path,
                [hru.row_field, hru.col_field, hru.dem_adj_field])):
        dem_adj_dict[int(row[0])][int(row[1])] = row[2]
    with open(fill_land_elev_path, 'w+') as f:
        row_first = dem_adj_dict.keys()[0]
        f.write('{} {}       NROW NCOL\n'.format(len(dem_adj_dict.keys()),
                                                 len(dem_adj_dict[row_first])))
        for row, col_data in sorted(dem_adj_dict.items()):
            f.write(' '.join(
                ['{:10.6f}'.format(t)
                 for c, t in sorted(col_data.items())]) + '\n')
    f.close()
    del dem_adj_dict
    # # Generate LAND_ELEV.DAT for CRT from raster/ascii
    # logging.info('  {}'.format(os.path.basename(fill_land_elev_path)))
    # with open(dem_adj_ascii, 'r') as f: ascii_data = f.readlines()
    # f.close()
    # with open(fill_land_elev_path, 'w+') as f:
    #    f.write('{} {}       NROW NCOL\n'.format(
    #        ascii_data[1].split()[1], ascii_data[0].split()[1]))
    #    for ascii_line in ascii_data[6:]: f.write(ascii_line)
    # f.close()
    # del ascii_data

    # Generate XY.DAT for CRT
    logging.info('  {}'.format(os.path.basename(fill_xy_path)))
    xy_list = [
        map(int, row) for row in sorted(
            arcpy.da.SearchCursor(hru.polygon_path,
                                  [hru.id_field, hru.x_field, hru.y_field]))
    ]
    with open(fill_xy_path, 'w+') as f:
        for line in sorted(xy_list):
            f.write(' '.join(map(str, line)) + '\n')
    f.close()
    del xy_list

    # Run CRT
    logging.info('\nRunning CRT')
    subprocess.check_output(crt_exe_name, cwd=fill_ws, shell=True)

    # Read in outputstat.txt and get filled DEM
    logging.info('\nReading CRT {}'.format(output_name))
    output_path = os.path.join(fill_ws, output_name)
    with open(output_path, 'r') as f:
        output_data = [l.strip() for l in f.readlines()]
    f.close()

    # Determine where filled data is in the file
    try:
        crt_dem_i = output_data.index(
            'CRT FILLED LAND SURFACE MODEL USED TO GENERATE CASCADES')
        crt_fill_i = output_data.index(
            'DIFFERENCES BETWEEN FILLED AND UNFILLED LAND SURFACE MODELS')
    except ValueError:
        logging.error('\nERROR: CRT didn\'t completely run\n'
                      '  Check the CRT outputstat.txt file\n')
        sys.exit()

    logging.info('  Break indices: {}, {}'.format(crt_dem_i, crt_fill_i))
    crt_dem_data = [
        r.split() for r in output_data[crt_dem_i + 1:crt_dem_i + hru.rows + 1]
    ]
    crt_fill_data = [
        r.split()
        for r in output_data[crt_fill_i + 1:crt_fill_i + hru.rows + 1]
    ]
    logging.info('  ROWS/COLS: {}/{}'.format(len(crt_dem_data),
                                             len(crt_dem_data[0])))
    logging.info('  ROWS/COLS: {}/{}'.format(len(crt_fill_data),
                                             len(crt_fill_data[0])))

    #   crt_type_i = crt_fill_i + (crt_fill_i - crt_dem_i)

    #    crt_dem_data = [
    #        r.split() for r in output_data[crt_dem_i+1: crt_dem_i+hru.rows+1]]
    #    crt_fill_data = [
    #        r.split() for r in output_data[crt_fill_i+1: crt_type_i-1]]

    # Build dictionaries of the CRT data
    crt_dem_dict = defaultdict(dict)
    crt_fill_dict = defaultdict(dict)
    for i, r in enumerate(crt_dem_data):
        crt_dem_dict[i + 1] = dict([(j + 1, c)
                                    for j, c in enumerate(crt_dem_data[i])])
    for i, r in enumerate(crt_fill_data):
        crt_fill_dict[i + 1] = dict([(j + 1, c)
                                     for j, c in enumerate(crt_fill_data[i])])

    # Write CRT values to hru_polygon
    logging.info('Writing CRT data to fishnet')
    logging.debug('  {:<4s} {:<4s} {:>7s}'.format('ROW', 'COL', 'FILL'))
    fields = [
        hru.row_field, hru.col_field, hru.crt_elev_field, hru.crt_fill_field,
        hru.dem_adj_field
    ]
    with arcpy.da.UpdateCursor(hru.polygon_path, fields) as update_c:
        for row in update_c:
            # If DEM values are too large for CRT, they may be symbols that will be skipped
            if support.is_number(crt_dem_dict[int(row[0])][int(row[1])]):
                row[2] = float(crt_dem_dict[int(row[0])][int(row[1])])
                row[3] = float(crt_fill_dict[int(row[0])][int(row[1])])
                if float(row[3]) > 0:
                    logging.debug('  {:>4d} {:>4d} {:>7.2f}'.format(
                        row[0], row[1], float(row[3])))
                if use_crt_fill_flag and float(row[3]) > 0:
                    row[4] = row[2]
                update_c.updateRow(row)
def stream_parameters(config_path):
    """Calculate GSFLOW Stream Parameters

    Parameters
    ----------
    config_path : str
        Project configuration file (.ini) path.

    Returns
    -------
    None

    """
    # Initialize hru_parameters class
    hru = support.HRUParameters(config_path)

    # Open input parameter config file
    inputs_cfg = ConfigParser.ConfigParser()
    try:
        inputs_cfg.readfp(open(config_path))
    except Exception as e:
        logging.error(
            '\nERROR: Config file could not be read, '
            'is not an input file, or does not exist\n'
            '  config_file = {}\n'
            '  Exception: {}\n'.format(config_path, e))
        sys.exit()

    # Log DEBUG to file
    log_file_name = 'stream_parameters_log.txt'
    log_console = logging.FileHandler(
        filename=os.path.join(hru.log_ws, log_file_name), mode='w')
    log_console.setLevel(logging.DEBUG)
    log_console.setFormatter(logging.Formatter('%(message)s'))
    logging.getLogger('').addHandler(log_console)
    logging.info('\nGSFLOW Stream Parameters')

    # CRT Parameters
    try:
        crt_hruflg = inputs_cfg.getint('INPUTS', 'crt_hruflg')
    except ConfigParser.NoOptionError:
        crt_hruflg = 0
        logging.info(
            '  Missing INI parameter, setting {} = {}'.format(
                'crt_hruflg', crt_hruflg))
    try:
        crt_flowflg = inputs_cfg.getint('INPUTS', 'crt_flowflg')
    except ConfigParser.NoOptionError:
        crt_flowflg = 1
        logging.info(
            '  Missing INI parameter, setting {} = {}'.format(
                'crt_flowflg', crt_flowflg))
    try:
        crt_dpit = inputs_cfg.getfloat('INPUTS', 'crt_dpit')
    except ConfigParser.NoOptionError:
        crt_dpit = 0.01
        logging.info(
            '  Missing INI parameter, setting {} = {}'.format(
                'crt_dpit', crt_dpit))
    try:
        crt_outitmax = inputs_cfg.getint('INPUTS', 'crt_outitmax')
    except ConfigParser.NoOptionError:
        crt_outitmax = 100000
        logging.info(
            '  Missing INI parameter, setting {} = {}'.format(
                'crt_outitmax', crt_outitmax))
    # Intentionally not allowing user to change this value
    crt_iprn = 1

    # CRT streams/cascade parameters
    crt_ws = os.path.join(hru.param_ws, 'cascade_work')
    crt_strmflg = 1
    crt_visflg = 1
    crt_ifill = 1

    # CRT groundwater cascades
    gw_ws = os.path.join(hru.param_ws, 'cascade_gw_work')
    gw_strmflg = 1
    gw_visflg = 1
    gw_ifill = 1

    # CRT Executable
    crt_exe_path = inputs_cfg.get('INPUTS', 'crt_exe_path')
    output_name = 'outputstat.txt'

    # Override ascii and rasters flags to generate CRT inputs
    output_ascii_flag = True
    output_rasters_flag = True

    # Parameters
    exit_seg = 0

    # Check input paths
    if not arcpy.Exists(hru.polygon_path):
        logging.error(
            '\nERROR: Fishnet ({}) does not exist\n'.format(
                hru.polygon_path))
        sys.exit()
    # Streams shapefile from dem_2_streams is needed to get the length
    flow_temp_ws = os.path.join(hru.param_ws, 'flow_rasters')
    if not os.path.isdir(flow_temp_ws):
        logging.error(
             '\nERROR: Flow_rasters folder does not exist'
             '\nERROR:   {}'
             '\nERROR: Try re-running dem_2_streams.py\n'.format(
                 flow_temp_ws))
        sys.exit()
    streams_path = os.path.join(flow_temp_ws, 'streams.shp')
    if not os.path.isfile(streams_path):
        logging.error(
             '\nERROR: Stream shapefiles does not exist'
             '\nERROR:   {}'
             '\nERROR: Try re-running dem_2_streams.py\n'.format(
                 streams_path))
        sys.exit()
    # Check that input fields exist and have data
    # Fields generated by hru_parameters
    for f in [hru.type_field, hru.row_field, hru.col_field]:
        if not arcpy.ListFields(hru.polygon_path, f):
            logging.error(
                '\nERROR: Input field {} is not present in fishnet'
                '\nERROR: Try re-running hru_parameters.py\n'.format(f))
            sys.exit()
        elif support.field_stat_func(hru.polygon_path, f, 'MAXIMUM') == 0:
            logging.error(
                '\nERROR: Input field {} contains only 0'
                '\nERROR: Try re-running hru_parameters.py\n'.format(f))
            sys.exit()
    # Fields generated by dem_2_streams
    for f in [hru.irunbound_field, hru.iseg_field, hru.flow_dir_field,
              hru.outflow_field, hru.subbasin_field]:
        if not arcpy.ListFields(hru.polygon_path, f):
            logging.error(
                '\nERROR: Input field {} is not present in fishnet'
                '\nERROR: Try re-running dem_2_streams.py\n'.format(f))
            sys.exit()
        elif support.field_stat_func(hru.polygon_path, f, 'MAXIMUM') == 0:
            logging.error(
                '\nERROR: Input field {} contains only 0'
                '\nERROR: Try re-running dem_2_streams.py\n'.format(f))
            sys.exit()

    # Build output folder if necessary
    stream_temp_ws = os.path.join(hru.param_ws, 'stream_rasters')
    if not os.path.isdir(stream_temp_ws):
        os.mkdir(stream_temp_ws)
    if not os.path.isdir(crt_ws):
        os.mkdir(crt_ws)
    if not os.path.isdir(gw_ws):
        os.mkdir(gw_ws)

    # Copy CRT executable if necessary
    crt_exe_name = os.path.basename(crt_exe_path)
    if not os.path.isfile(os.path.join(crt_ws, crt_exe_name)):
        shutil.copy(crt_exe_path, crt_ws)
    if not os.path.isfile(os.path.join(gw_ws, crt_exe_name)):
        shutil.copy(crt_exe_path, gw_ws)
    if not os.path.isfile(os.path.join(crt_ws, crt_exe_name)):
        logging.error(
            '\nERROR: CRT executable ({}) does not exist\n'.format(
                os.path.join(crt_ws, crt_exe_name)))
        sys.exit()

    # Cascades files
    crt_hru_casc_path = os.path.join(crt_ws, 'HRU_CASC.DAT')
    crt_outflow_hru_path = os.path.join(crt_ws, 'OUTFLOW_HRU.DAT')
    crt_land_elev_path = os.path.join(crt_ws, 'LAND_ELEV.DAT')
    crt_stream_cells_path = os.path.join(crt_ws, 'STREAM_CELLS.DAT')
    crt_xy_path = os.path.join(crt_ws, 'XY.DAT')

    # Groundwater cascades files
    gw_hru_casc_path = os.path.join(gw_ws, 'HRU_CASC.DAT')
    gw_outflow_hru_path = os.path.join(gw_ws, 'OUTFLOW_HRU.DAT')
    gw_land_elev_path = os.path.join(gw_ws, 'LAND_ELEV.DAT')
    gw_stream_cells_path = os.path.join(gw_ws, 'STREAM_CELLS.DAT')
    gw_xy_path = os.path.join(gw_ws, 'XY.DAT')

    # Output names
    dem_adj_raster_name = 'dem_adj'
    hru_type_raster_name = 'hru_type'
    iseg_raster_name = 'iseg'
    irunbound_raster_name = 'irunbound'
    subbasin_raster_name = 'sub_basins'
    segbasin_raster_name = 'seg_basins'

    # Output raster paths
    dem_adj_raster = os.path.join(stream_temp_ws, dem_adj_raster_name + '.img')
    hru_type_raster = os.path.join(stream_temp_ws, hru_type_raster_name + '.img')
    iseg_raster = os.path.join(stream_temp_ws, iseg_raster_name + '.img')
    irunbound_raster = os.path.join(stream_temp_ws, irunbound_raster_name + '.img')
    subbasin_raster = os.path.join(stream_temp_ws, subbasin_raster_name + '.img')
    segbasin_raster = os.path.join(stream_temp_ws, segbasin_raster_name + '.img')

    # Output ascii paths
    a_fmt = '{}_ascii.txt'
    dem_adj_ascii = os.path.join(stream_temp_ws, a_fmt.format(dem_adj_raster_name))
    hru_type_ascii = os.path.join(stream_temp_ws, a_fmt.format(hru_type_raster_name))
    iseg_ascii = os.path.join(stream_temp_ws, a_fmt.format(iseg_raster_name))
    irunbound_ascii = os.path.join(stream_temp_ws, a_fmt.format(irunbound_raster_name))
    subbasin_ascii = os.path.join(stream_temp_ws, a_fmt.format(subbasin_raster_name))
    segbasin_ascii = os.path.join(stream_temp_ws, a_fmt.format(segbasin_raster_name))

    # Layers
    hru_polygon_lyr = 'hru_polygon_lyr'

    # Set ArcGIS environment variables
    arcpy.CheckOutExtension('Spatial')
    env.overwriteOutput = True
    # env.pyramid = 'PYRAMIDS -1'
    env.pyramid = 'PYRAMIDS 0'
    env.workspace = stream_temp_ws
    env.scratchWorkspace = hru.scratch_ws

    # Add fields if necessary
    logging.info('\nAdding fields if necessary')
    support.add_field_func(hru.polygon_path, hru.iseg_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.irunbound_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.flow_dir_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.krch_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.irch_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.jrch_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.iseg_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.reach_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.rchlen_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.maxreach_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.outseg_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.iupseg_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.subbasin_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.segbasin_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.strm_top_field, 'FLOAT')
    support.add_field_func(hru.polygon_path, hru.strm_slope_field, 'FLOAT')

    # Calculate KRCH, IRCH, JRCH for stream segments
    logging.info('\nKRCH, IRCH, & JRCH for streams')
    fields = [
        hru.type_field, hru.iseg_field, hru.row_field, hru.col_field,
        hru.krch_field, hru.irch_field, hru.jrch_field]
    with arcpy.da.UpdateCursor(hru.polygon_path, fields) as update_c:
        for row in update_c:
            if (int(row[0]) in [1, 3] and int(row[1]) > 0):
                row[4], row[5], row[6] = 1, int(row[2]), int(row[3])
            else:
                row[4], row[5], row[6] = 0, 0, 0
            update_c.updateRow(row)

    # Get stream length for each cell
    logging.info('Stream length')
    arcpy.MakeFeatureLayer_management(hru.polygon_path, hru_polygon_lyr)
    arcpy.SelectLayerByAttribute_management(
        hru_polygon_lyr, 'NEW_SELECTION',
        ' \"{}\" = 1 And "{}" <> 0'.format(hru.type_field, hru.iseg_field))
    length_path = os.path.join('in_memory', 'length')
    arcpy.Intersect_analysis(
        [hru_polygon_lyr, streams_path],
        length_path, 'ALL', '', 'LINE')
    arcpy.Delete_management(hru_polygon_lyr)
    length_field = 'LENGTH'
    arcpy.AddField_management(length_path, length_field, 'LONG')
    arcpy.CalculateField_management(
        length_path, length_field, '!shape.length@meters!', 'PYTHON')
    length_dict = defaultdict(int)
    # DEADBEEF - This probably needs a maximum limit
    for row in arcpy.da.SearchCursor(
            length_path, [hru.id_field, length_field]):
        length_dict[int(row[0])] += int(row[1])
    fields = [hru.type_field, hru.iseg_field, hru.rchlen_field, hru.id_field]
    with arcpy.da.UpdateCursor(hru.polygon_path, fields) as update_c:
        for row in update_c:
            if (int(row[0]) == 1 and int(row[1]) != 0):
                row[2] = length_dict[int(row[3])]
            else:
                row[2] = 0
            update_c.updateRow(row)
    del length_dict, length_field, fields, hru_polygon_lyr

    # Get list of segments and downstream cell for each stream/lake cell
    # Downstream is calculated from flow direction
    # Use IRUNBOUND instead of ISEG, since ISEG will be zeroed for lakes
    # DEADBEEF - I don't think ISEG will be zero for lakes anymore
    logging.info('Cell out-flow dictionary')
    cell_dict = dict()
    fields = [
        hru.type_field, hru.krch_field, hru.lake_id_field, hru.iseg_field,
        hru.irunbound_field, hru.dem_adj_field, hru.flow_dir_field,
        hru.col_field, hru.row_field, hru.id_field]
    for row in arcpy.da.SearchCursor(hru.polygon_path, fields):
        # Skip inactive cells
        if int(row[0]) == 0:
            continue
        # Skip if not lake and not stream
        elif (int(row[1]) == 0 and int(row[2]) == 0):
            continue

        # ROW / COL
        cell = (int(row[7]), int(row[8]))

        # Read in parameters
        # HRU_ID, ISEG, support.next_row_col(FLOW_DIR, CELL), DEM_ADJ, X, X, X
        cell_dict[cell] = [
            int(row[9]), int(row[4]), support.next_row_col(int(row[6]), cell),
            float(row[5]), 0, 0, 0]

    # Build list of unique segments
    iseg_list = sorted(list(set([v[1] for v in cell_dict.values()])))

    # Calculate IREACH and OUTSEG
    logging.info('Calculate {} and {}'.format(
        hru.reach_field, hru.outseg_field))
    outseg_dict = dict()
    for iseg in sorted(iseg_list):
        logging.debug('    Segment: {}'.format(iseg))

        # Subset of cell_dict for current iseg
        iseg_dict = dict([(k, v) for k, v in cell_dict.items() if v[1] == iseg])

        # List of all cells in current iseg
        iseg_cells = iseg_dict.keys()

        # List of out_cells for all cells in current iseg
        out_cells = [value[2] for value in iseg_dict.values()]

        # Every iseg will (should?) have one out_cell
        out_cell = list(set(out_cells) - set(iseg_cells))

        # Process streams and lakes separately
        # Streams
        if iseg > 0:
            # If there is more than one out_cell
            #   there is a problem with the stream network
            if len(out_cell) != 1:
                logging.error(
                    '\nERROR: ISEG {} has more than one out put cell'
                    '\n  Out cells: {}'
                    '\n  Check for streams exiting then re-entering a lake'
                    '\n  Lake cell elevations may not be constant\n'.format(
                        iseg, out_cell))
                sys.exit()

            # If not output cell, assume edge of domain
            try:
                outseg = cell_dict[out_cell[0]][1]
            except KeyError:
                outseg = exit_seg

            # Track sub-basin outseg
            outseg_dict[iseg] = outseg

            # Calculate reach number for each cell
            reach_dict = dict()
            start_cell = list(set(iseg_cells) - set(out_cells))[0]
            for i in range(len(out_cells)):
                # logging.debug('    Reach: {}  Cell: {}'.format(i+1, start_cell))
                reach_dict[start_cell] = i + 1
                start_cell = iseg_dict[start_cell][2]

            # For each cell in iseg, save outseg, reach, & maxreach
            for iseg_cell in iseg_cells:
                cell_dict[iseg_cell][4:] = [
                    outseg, reach_dict[iseg_cell], len(iseg_cells)]
            del reach_dict, start_cell, outseg
        # Lakes
        else:
            # For lake cells, there can be multiple outlets if all of them
            #   are to inactive cells or out of the model
            # Otherwise, like streams, there should only be one outcell per iseg
            logging.debug('  Length: {}'.format(len(out_cells)))
            if len(out_cell) == 1:
                try:
                    outseg = cell_dict[out_cell[0]][1]
                except KeyError:
                    outseg = exit_seg
            elif (len(out_cell) != 1 and
                  all(x[0] not in cell_dict.keys() for x in out_cell)):
                outseg = exit_seg
                logging.debug(
                    '  All out cells are inactive, setting outseg '
                    'to exit_seg {}'.format(exit_seg))
            else:
                logging.error(
                    '\nERROR: ISEG {} has more than one out put cell'
                    '\n  Out cells: {}'
                    '\n  Check for streams exiting then re-entering a lake'
                    '\n  Lake cell elevations may not be constant\n'.format(
                         iseg, out_cell))
                raw_input('ENTER')

            # Track sub-basin outseg
            outseg_dict[iseg] = outseg

            # For each lake segment cell, only save outseg
            # All lake cells are routed directly to the outseg
            for iseg_cell in iseg_cells:
                cell_dict[iseg_cell][4:] = [outseg, 0, 0]
            del outseg

        del iseg_dict, iseg_cells, iseg
        del out_cells, out_cell

    # Calculate stream elevation
    logging.info('Stream elevation (DEM_ADJ - 1 for now)')
    fields = [
        hru.type_field, hru.iseg_field, hru.dem_adj_field,
        hru.strm_top_field]
    with arcpy.da.UpdateCursor(hru.polygon_path, fields) as update_c:
        for row in update_c:
            if int(row[0]) == 1 and int(row[1]) != 0:
                row[3] = float(row[2]) - 1
            else:
                row[3] = 0
            update_c.updateRow(row)

    # Saving ireach and outseg
    logging.info('Save IREACH and OUTSEG')
    fields = [
        hru.type_field, hru.iseg_field, hru.col_field, hru.row_field,
        hru.outseg_field, hru.reach_field, hru.maxreach_field]
    with arcpy.da.UpdateCursor(hru.polygon_path, fields) as update_c:
        for row in update_c:
            # if (int(row[0]) > 0 and int(row[1]) > 0):
            # DEADBEEF - I'm not sure why only iseg > 0 in above line
            # DEADBEEF - This should set outseg for streams and lakes
            if (int(row[0]) > 0 and int(row[1]) != 0):
                row[4:] = cell_dict[(int(row[2]), int(row[3]))][4:]
            else:
                row[4:] = [0, 0, 0]
            update_c.updateRow(row)

    # Calculate IUPSEG for all segments flowing out of lakes
    logging.info('IUPSEG for streams flowing out of lakes')
    upseg_dict = dict(
        [(v, k) for k, v in outseg_dict.iteritems() if k < 0])
    fields = [hru.type_field, hru.iseg_field, hru.iupseg_field]
    with arcpy.da.UpdateCursor(hru.polygon_path, fields) as update_c:
        for row in update_c:
            if (int(row[0]) == 1 and int(row[1]) != 0 and
                    int(row[1]) in upseg_dict.keys()):
                row[2] = upseg_dict[int(row[1])]
            else:
                row[2] = 0
            update_c.updateRow(row)

    # Build dictionary of which segments flow into each segment
    # Used to calculate seg-basins (sub watersheds) for major streams
    # Also save list of all segments that pour to exit
    logging.info('Segment in/out-flow dictionary')
    inseg_dict = defaultdict(list)
    pourseg_dict = dict()
    pourseg_list = []
    for key, value in outseg_dict.iteritems():
        if key == exit_seg:
            continue
            # inseg_dict[key].append(key)
        elif value == exit_seg:
            pourseg_list.append(key)
            inseg_dict[key].append(key)
        else:
            inseg_dict[value].append(key)

    # Update pourseg for each segment, working up from initial pourseg
    # Pourseg is the final exit segment for each upstream segment
    for pourseg in pourseg_list:
        testseg_list = inseg_dict[pourseg]
        while testseg_list:
            testseg = testseg_list.pop()
            pourseg_dict[testseg] = pourseg
            if pourseg == testseg:
                continue
            testseg_list.extend(inseg_dict[testseg])
        del testseg_list

    # Calculate SEG_BASIN for all active cells
    # SEG_BASIN corresponds to the ISEG of the lowest segment
    logging.info('SEG_BASIN')
    fields = [hru.type_field, hru.irunbound_field, hru.segbasin_field]
    with arcpy.da.UpdateCursor(hru.polygon_path, fields) as update_c:
        for row in update_c:
            if int(row[0]) > 0 and int(row[1]) != 0:
                row[2] = pourseg_dict[int(row[1])]
            else:
                row[2] = 0
            update_c.updateRow(row)

    # # Set all swale cells back to hru_type 2 (lake)
    # logging.info('Swale HRU_TYPE')
    # with arcpy.da.UpdateCursor(hru.polygon_path, [hru.type_field]) as update_c:
    #     for row in update_c:
    #         if int(row[0]) == 3:
    #             row[0] = 2
    #             update_c.updateRow(row)

    # Set all lake iseg to 0
    logging.info('Lake ISEG')
    fields = [hru.type_field, hru.iseg_field]
    with arcpy.da.UpdateCursor(hru.polygon_path, fields) as update_c:
        for row in update_c:
            if int(row[0]) != 2:
                continue
            iseg = int(row[1])
            if iseg < 0:
                row[1] = 0
                update_c.updateRow(row)

    # Set environment parameters
    env.extent = hru.extent
    env.cellsize = hru.cs
    env.outputCoordinateSystem = hru.sr

    # Build rasters
    if output_rasters_flag:
        logging.info('\nOutput model grid rasters')
        arcpy.PolygonToRaster_conversion(
            hru.polygon_path, hru.type_field, hru_type_raster,
            'CELL_CENTER', '', hru.cs)
        arcpy.PolygonToRaster_conversion(
            hru.polygon_path, hru.dem_adj_field, dem_adj_raster,
            'CELL_CENTER', '', hru.cs)
        arcpy.PolygonToRaster_conversion(
            hru.polygon_path, hru.iseg_field, iseg_raster,
            'CELL_CENTER', '', hru.cs)
        arcpy.PolygonToRaster_conversion(
            hru.polygon_path, hru.irunbound_field, irunbound_raster,
            'CELL_CENTER', '', hru.cs)
        arcpy.PolygonToRaster_conversion(
            hru.polygon_path, hru.segbasin_field, segbasin_raster,
            'CELL_CENTER', '', hru.cs)
        arcpy.PolygonToRaster_conversion(
            hru.polygon_path, hru.subbasin_field, subbasin_raster,
            'CELL_CENTER', '', hru.cs)

    # Build rasters
    if output_ascii_flag:
        logging.info('Output model grid ascii')
        arcpy.RasterToASCII_conversion(hru_type_raster, hru_type_ascii)
        arcpy.RasterToASCII_conversion(dem_adj_raster, dem_adj_ascii)
        arcpy.RasterToASCII_conversion(iseg_raster, iseg_ascii)
        arcpy.RasterToASCII_conversion(irunbound_raster, irunbound_ascii)
        arcpy.RasterToASCII_conversion(segbasin_raster, segbasin_ascii)
        arcpy.RasterToASCII_conversion(subbasin_raster, subbasin_ascii)
        sleep(5)

    logging.debug('\nRemoving existing CRT fill files')
    if os.path.isfile(crt_hru_casc_path):
        os.remove(crt_hru_casc_path)
    if os.path.isfile(crt_outflow_hru_path):
        os.remove(crt_outflow_hru_path)
    if os.path.isfile(crt_land_elev_path):
        os.remove(crt_land_elev_path)
    if os.path.isfile(crt_stream_cells_path):
        os.remove(crt_stream_cells_path)
    if os.path.isfile(crt_xy_path):
        os.remove(crt_xy_path)

    # Input parameters files for Cascade Routing Tool (CRT)
    logging.info('\nBuilding output CRT files')

    # Generate STREAM_CELLS.DAT file for CRT
    # Include non-lake SWALES in streams file
    logging.info('  {}'.format(
        os.path.basename(crt_stream_cells_path)))
    stream_cells_list = []
    fields = [
        hru.type_field, hru.iseg_field, hru.reach_field,
        hru.col_field, hru.row_field]
    for row in arcpy.da.SearchCursor(hru.polygon_path, fields):
        if int(row[0]) in [1, 3] and int(row[1]) > 0:
            stream_cells_list.append(
                [int(row[4]), int(row[3]), int(row[1]), int(row[2]), 1])
    if stream_cells_list:
        with open(crt_stream_cells_path, 'w+') as f:
            f.write('{}    NREACH\n'.format(len(stream_cells_list)))
            for stream_cells_l in sorted(stream_cells_list):
                f.write(' '.join(map(str, stream_cells_l)) + '\n')
        f.close
    del stream_cells_list

    # Generate OUTFLOW_HRU.DAT for CRT
    # Outflow cells exit the model to inactive cells or out of the domain
    #   Outflow field is set in dem_2_streams
    logging.info('  {}'.format(
        os.path.basename(crt_outflow_hru_path)))
    outflow_hru_list = []
    fields = [
        hru.type_field, hru.outflow_field, hru.subbasin_field,
        hru.row_field, hru.col_field]
    for row in arcpy.da.SearchCursor(hru.polygon_path, fields):
        if int(row[0]) != 0 and int(row[1]) == 1:
            outflow_hru_list.append([int(row[3]), int(row[4])])
    if outflow_hru_list:
        with open(crt_outflow_hru_path, 'w+') as f:
            f.write('{}    NUMOUTFLOWHRU\n'.format(
                len(outflow_hru_list)))
            for i, outflow_hru in enumerate(outflow_hru_list):
                f.write('{} {} {}   OUTFLOW_ID ROW COL\n'.format(
                    i + 1, outflow_hru[0], outflow_hru[1]))
        f.close()
    else:
        logging.error('\nERROR: No OUTFLOWHRU points, exiting')
        sys.exit()
    del outflow_hru_list

    #  Generate OUTFLOW_HRU.DAT for CRT
    # logging.info('  {}'.format(
    #    os.path.basename(crt_outflow_hru_path)))
    # outflow_hru_list = []
    # fields = [
    #    hru.type_field, hru.iseg_field, hru.outseg_field, hru.reach_field,
    #    hru.maxreach_field, hru.col_field, hru.row_field]
    # for row in arcpy.da.SearchCursor(hru.polygon_path, fields):
    #    if int(row[0]) != 1 or int(row[1]) == 0: continue
    #    if int(row[2]) == 0 and int(row[3]) == int(row[4]):
    #        outflow_hru_list.append([int(row[6]), int(row[5])])
    # if outflow_hru_list:
    #    with open(crt_outflow_hru_path, 'w+') as f:
    #        f.write('{}    NUMOUTFLOWHRU\n'.format(
    #            len(outflow_hru_list)))
    #        for i, outflow_hru in enumerate(outflow_hru_list):
    #            f.write('{} {} {}   OUTFLOW_ID ROW COL\n'.format(
    #                i+1, outflow_hru[0], outflow_hru[1]))
    #    f.close()
    # del outflow_hru_list

    # Generate HRU_CASC.DAT for CRT
    logging.info('  {}'.format(os.path.basename(crt_hru_casc_path)))
    with open(hru_type_ascii, 'r') as f:
        ascii_data = f.readlines()
    f.close()
    hru_casc_header = (
        '{} {} {} {} {} {} {} {}     '
        'HRUFLG STRMFLG FLOWFLG VISFLG IPRN IFILL DPIT OUTITMAX\n').format(
            crt_hruflg, crt_strmflg, crt_flowflg, crt_visflg,
            crt_iprn, crt_ifill, crt_dpit, crt_outitmax)
    with open(crt_hru_casc_path, 'w+') as f:
        f.write(hru_casc_header)
        for ascii_line in ascii_data[6:]:
            f.write(ascii_line)
    f.close()
    del hru_casc_header, ascii_data

    # Generate LAND_ELEV.DAT for CRT
    logging.info('  {}'.format(os.path.basename(crt_land_elev_path)))
    with open(dem_adj_ascii, 'r') as f:
        ascii_data = f.readlines()
    f.close()
    with open(crt_land_elev_path, 'w+') as f:
        f.write('{} {}       NROW NCOL\n'.format(
            ascii_data[1].split()[1], ascii_data[0].split()[1]))
        for ascii_line in ascii_data[6:]:
            f.write(ascii_line)
    f.close()
    del ascii_data

    # Generate XY.DAT for CRT
    logging.info('  {}'.format(os.path.basename(crt_xy_path)))
    xy_list = [
        map(int, row)
        for row in sorted(arcpy.da.SearchCursor(
            hru.polygon_path, [hru.id_field, hru.x_field, hru.y_field]))]
    with open(crt_xy_path, 'w+') as f:
        for line in sorted(xy_list):
            f.write(' '.join(map(str, line)) + '\n')
    f.close()

    # Run CRT
    logging.info('\nRunning CRT')
    subprocess.check_output(crt_exe_name, cwd=crt_ws, shell=True)

    # Read in outputstat.txt to check for errors
    logging.info('\nReading CRT {}'.format(output_name))
    output_path = os.path.join(crt_ws, output_name)
    with open(output_path, 'r') as f:
        output_data = [l.strip() for l in f.readlines()]
    f.close()

    # Check if there are errors
    if 'CRT FOUND UNDECLARED SWALE HRUS' in output_data:
        logging.error(
            '\nERROR: CRT found undeclared swale HRUs (sinks)\n'
            '  All sinks must be filled before generating cascades\n'
            '  Check the CRT outputstat.txt file\n')
        sys.exit()
    elif 'CRT EXECUTION COMPLETE' not in output_data:
        logging.error('\nERROR: CRT did not successfully complete\n')
        sys.exit()




    # Rerun CRT without lakes to build groundwater cascades
    # This is only needed if there are lakes in the model
    # For now the input files are being coped from the cascade_work folder
    # (except HRU_CASC.DAT)
    logging.debug('\nRemoving existing CRT fill files')
    if os.path.isfile(gw_hru_casc_path):
        os.remove(gw_hru_casc_path)
    if os.path.isfile(gw_outflow_hru_path):
        os.remove(gw_outflow_hru_path)
    if os.path.isfile(gw_land_elev_path):
        os.remove(gw_land_elev_path)
    if os.path.isfile(gw_stream_cells_path):
        os.remove(gw_stream_cells_path)
    if os.path.isfile(gw_xy_path):
        os.remove(gw_xy_path)

    logging.info('\nCopying cascade CRT files (except HRU_CASC.DAT)')
    shutil.copy(crt_outflow_hru_path, gw_outflow_hru_path)
    shutil.copy(crt_land_elev_path, gw_land_elev_path)
    shutil.copy(crt_stream_cells_path, gw_stream_cells_path)
    shutil.copy(crt_xy_path, gw_xy_path)

    # Input parameters files for Cascade Routing Tool (CRT)
    logging.info('\nBuilding groundwater cascade CRT files')

    # Generate HRU_CASC.DAT for CRT
    logging.info('  {}'.format(os.path.basename(gw_hru_casc_path)))
    with open(hru_type_ascii, 'r') as f:
        ascii_data = f.readlines()
    f.close()
    hru_casc_header = (
        '{} {} {} {} {} {} {} {}     '
        'HRUFLG STRMFLG FLOWFLG VISFLG IPRN IFILL DPIT OUTITMAX\n').format(
            crt_hruflg, crt_strmflg, crt_flowflg, crt_visflg,
            crt_iprn, crt_ifill, crt_dpit, crt_outitmax)
    with open(gw_hru_casc_path, 'w+') as f:
        f.write(hru_casc_header)
        for ascii_line in ascii_data[6:]:
            # Convert all lakes to active
            # Should swales (type 3) be converted also?
            f.write(ascii_line.replace('2', '1'))
    f.close()
    del hru_casc_header, ascii_data

    # Run CRT
    logging.info('\nRunning CRT for groundwater cascades')
    subprocess.check_output(crt_exe_name, cwd=gw_ws, shell=True)

    # Read in outputstat.txt to check for errors
    logging.info('\nReading CRT {}'.format(output_name))
    output_path = os.path.join(gw_ws, output_name)
    with open(output_path, 'r') as f:
        output_data = [l.strip() for l in f.readlines()]
    f.close()

    # Check if there are errors
    if 'CRT FOUND UNDECLARED SWALE HRUS' in output_data:
        logging.error(
            '\nERROR: CRT found undeclared swale HRUs (sinks)\n'
            '  All sinks must be filled before generating cascades\n'
            '  Check the CRT outputstat.txt file\n')
        sys.exit()
    elif 'CRT EXECUTION COMPLETE' not in output_data:
        logging.error('\nERROR: CRT did not successfully complete\n')
        sys.exit()
示例#3
0
def flow_parameters(config_path, overwrite_flag=False, debug_flag=False):
    """Calculate GSFLOW Flow Parameters

    Args:
        config_file (str): Project config file path
        ovewrite_flag (bool): if True, overwrite existing files
        debug_flag (bool): if True, enable debug level logging

    Returns:
        None
    """

    # Initialize hru_parameters class
    hru = support.HRUParameters(config_path)

    # Open input parameter config file
    inputs_cfg = ConfigParser.ConfigParser()
    try:
        inputs_cfg.readfp(open(config_path))
    except Exception as e:
        logging.error('\nERROR: Config file could not be read, '
                      'is not an input file, or does not exist\n'
                      '  config_file = {}\n'
                      '  Exception: {}\n'.format(config_path, e))
        sys.exit()

    # Log DEBUG to file
    log_file_name = 'dem_2_stream_log.txt'
    log_console = logging.FileHandler(filename=os.path.join(
        hru.log_ws, log_file_name),
                                      mode='w')
    log_console.setLevel(logging.DEBUG)
    log_console.setFormatter(logging.Formatter('%(message)s'))
    logging.getLogger('').addHandler(log_console)
    logging.info('\nGSFLOW DEM To Streams')

    # Check whether lake parameters should be calculated
    try:
        set_lake_flag = inputs_cfg.getboolean('INPUTS', 'set_lake_flag')
    except ConfigParser.NoOptionError:
        set_lake_flag = False
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'set_lake_flag', set_lake_flag))

    # Model points
    model_inputs_path = inputs_cfg.get('INPUTS', 'model_points_path')
    try:
        model_points_zone_field = inputs_cfg.get('INPUTS',
                                                 'model_points_zone_field')
    except:
        model_points_zone_field = 'FID'
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'model_points_zone_field', model_points_zone_field))
    try:
        model_points_type_field = inputs_cfg.get('INPUTS',
                                                 'model_points_type_field')
    except:
        model_points_type_field = 'TYPE'
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'model_points_type_field', model_points_type_field))

    # Flow parameters
    flow_acc_threshold = inputs_cfg.getint('INPUTS', 'flow_acc_threshold')
    flow_length_threshold = inputs_cfg.getint('INPUTS',
                                              'flow_length_threshold')
    try:
        calc_flow_dir_points_flag = inputs_cfg.getboolean(
            'INPUTS', 'calc_flow_dir_points_flag')
    except ConfigParser.NoOptionError:
        calc_flow_dir_points_flag = False
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'calc_flow_dir_points_flag', calc_flow_dir_points_flag))
    try:
        lake_seg_offset = inputs_cfg.getint('INPUTS', 'lake_seg_offset')
    except ConfigParser.NoOptionError:
        lake_seg_offset = 0
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'lake_seg_offset', lake_seg_offset))
    if lake_seg_offset < 0:
        logging.error(
            '\nERROR: lake_seg_offset must be an integer greater than 0')
        sys.exit()

    # Check input paths
    dem_temp_ws = os.path.join(hru.param_ws, 'dem_rasters')
    dem_path = os.path.join(dem_temp_ws, 'dem.img')
    if not arcpy.Exists(dem_path):
        logging.error(
            '\nERROR: Projected/clipped DEM ({}) does not exist'
            '\nERROR: Try rerunning dem_parameters.py'.format(dem_path))
        sys.exit()
    if not arcpy.Exists(hru.polygon_path):
        logging.error('\nERROR: Fishnet ({}) does not exist'.format(
            hru.polygon_path))
        sys.exit()

    # Check model points
    if not os.path.isfile(model_inputs_path):
        logging.error('\nERROR: Model points shapefiles does not exist'
                      '\nERROR:   {}'.format(model_inputs_path))
        sys.exit()
    # model_points_path must be a point shapefile
    elif arcpy.Describe(model_inputs_path).datasetType != 'FeatureClass':
        logging.error('\nERROR: model_points_path must be a point shapefile')
        sys.exit()

    # Build output folder if necessary
    flow_temp_ws = os.path.join(hru.param_ws, 'flow_rasters')
    if not os.path.isdir(flow_temp_ws):
        os.mkdir(flow_temp_ws)

    # Output paths
    hru_type_path = os.path.join(flow_temp_ws, 'hru_type.img')
    dem_adj_path = os.path.join(flow_temp_ws, 'dem_adj.img')
    lake_id_path = os.path.join(flow_temp_ws, 'lake_id.img')
    dem_sink_path = os.path.join(flow_temp_ws, 'dem_sink.img')
    dem_fill_path = os.path.join(flow_temp_ws, 'dem_fill.img')
    flow_dir_path = os.path.join(flow_temp_ws, 'flow_dir.img')
    flow_dir_points = os.path.join(flow_temp_ws, 'flow_dir_points.shp')
    flow_acc_full_path = os.path.join(flow_temp_ws, 'flow_acc_full.img')
    flow_acc_sub_path = os.path.join(flow_temp_ws, 'flow_acc_sub.img')
    flow_mask_path = os.path.join(flow_temp_ws, 'flow_mask.img')
    stream_link_path = os.path.join(flow_temp_ws, 'stream_link.img')
    stream_link_a_path = os.path.join(flow_temp_ws, 'stream_link_a.img')
    stream_link_b_path = os.path.join(flow_temp_ws, 'stream_link_b.img')
    stream_order_path = os.path.join(flow_temp_ws, 'stream_order.img')
    stream_length_path = os.path.join(flow_temp_ws, 'stream_length.img')
    watersheds_path = os.path.join(flow_temp_ws, 'watersheds.img')
    outlet_path = os.path.join(flow_temp_ws, 'outlet.img')
    swale_path = os.path.join(flow_temp_ws, 'swale.img')
    subbasin_path = os.path.join(flow_temp_ws, 'subbasin.img')
    basin_path = os.path.join(flow_temp_ws, 'basin.img')
    streams_path = os.path.join(flow_temp_ws, 'streams.shp')
    model_points_path = os.path.join(flow_temp_ws, 'model_points.shp')

    # Set ArcGIS environment variables
    arcpy.CheckOutExtension('Spatial')
    env.overwriteOutput = True
    # env.pyramid = 'PYRAMIDS -1'
    env.pyramid = 'PYRAMIDS 0'
    env.workspace = flow_temp_ws
    env.scratchWorkspace = hru.scratch_ws

    # Set environment parameters
    env.extent = hru.extent
    env.cellsize = hru.cs
    env.outputCoordinateSystem = hru.sr

    # Read in model points shapefile
    logging.info('\nChecking model points shapefile')
    model_points_desc = arcpy.Describe(model_inputs_path)
    model_points_sr = model_points_desc.spatialReference
    logging.debug('  Points: {}'.format(model_inputs_path))
    logging.debug('  Points spat. ref.:  {}'.format(model_points_sr.name))
    logging.debug('  Points GCS:         {}'.format(model_points_sr.GCS.name))

    # If model points spat_ref doesn't match hru_param spat_ref
    # Project model points to hru_param spat ref
    # Otherwise, read model points directly
    if hru.sr.name != model_points_sr.name:
        logging.info('  Model points projection does not match fishnet.\n'
                     '  Projecting model points.\n')
        # Set preferred transforms
        transform_str = support.transform_func(hru.sr, model_points_sr)
        logging.debug('    Transform: {}'.format(transform_str))
        arcpy.Project_management(model_inputs_path, model_points_path, hru.sr,
                                 transform_str, model_points_sr)
    else:
        arcpy.Copy_management(model_inputs_path, model_points_path)
    model_points_lyr = 'model_points_lyr'
    arcpy.MakeFeatureLayer_management(model_points_path, model_points_lyr)

    # Check model_points_zone_field
    if model_points_zone_field.upper() in ['', 'FID', 'NONE']:
        model_points_fid_field = arcpy.Describe(model_points_path).OIDFieldName
        logging.warning('  NOTE: Using {}+1 to set {}'.format(
            model_points_fid_field, hru.subbasin_field))
        model_points_zone_field = 'ZONE_VALUE'
        if not arcpy.ListFields(model_points_path, model_points_zone_field):
            arcpy.AddField_management(model_points_path,
                                      model_points_zone_field, 'LONG')
        arcpy.CalculateField_management(
            model_points_path, model_points_zone_field,
            '!{}! + 1'.format(model_points_fid_field), 'PYTHON')
    elif not arcpy.ListFields(model_points_path, model_points_zone_field):
        logging.error(
            '\nERROR: model_points_zone_field {} does not exist\n'.format(
                model_points_zone_field))
        sys.exit()
    # Need to check that model_points_zone_field is an int type
    elif not [
            f.type for f in arcpy.Describe(model_points_path).fields
            if (f.name == model_points_zone_field
                and f.type in ['SmallInteger', 'Integer'])
    ]:
        logging.error(
            '\nERROR: model_points_zone_field {} must be an integer type\n'.
            format(model_points_zone_field))
        sys.exit()

    # Need to check that model_points_zone_field is all positive values
    if min([
            row[0] for row in arcpy.da.SearchCursor(model_points_path,
                                                    [model_points_zone_field])
    ]) <= 0:
        logging.error(
            '\nERROR: model_points_zone_field values must be positive\n'.
            format(model_points_zone_field))
        sys.exit()

    # Check that subbasin values increment from 1 to nsub
    logging.info('  Checking subbasin numbering')
    subbasin_id_list = sorted(
        list(
            set([
                row[0] for row in arcpy.da.SearchCursor(
                    model_points_path, [model_points_zone_field])
            ])))
    if subbasin_id_list != range(1, len(subbasin_id_list) + 1):
        logging.error('\nERROR: SUB_BASINs must be sequential starting from 1'
                      '\nERROR:   {}'.format(subbasin_id_list))
        sys.exit()
    subbasin_input_count = len(subbasin_id_list)
    logging.debug('    {} subbasins'.format(subbasin_input_count))

    # Check model point types
    logging.info('  Checking model point types')
    model_point_types = [
        str(r[0]).upper() for r in arcpy.da.SearchCursor(
            model_points_path, [model_points_type_field])
    ]
    if not set(model_point_types).issubset(set(['OUTLET', 'SUBBASIN', 'SWALE'
                                                ])):
        logging.error(
            '\nERROR: Unsupported model point type(s) found, exiting')
        logging.error('\n  Model point types: {}\n'.format(model_point_types))
        sys.exit()
        ##    elif not set(model_point_types).issubset(set(['OUTLET', 'SWALE'])):
        ##        logging.error(
        ##            '\nERROR: At least one model point must be an OUTLET or SWALE, '
        ##            'exiting\n')
        sys.exit()
    else:
        logging.debug('  {}'.format(', '.join(model_point_types)))

    # Check DEM field
    logging.info('\nAdding DEM fields if necessary')
    support.add_field_func(hru.polygon_path, hru.iseg_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.irunbound_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.flow_dir_field, 'LONG')
    support.add_field_func(hru.polygon_path, hru.dem_sink_field, 'DOUBLE')
    support.add_field_func(hru.polygon_path, hru.outflow_field, 'DOUBLE')

    if set_lake_flag:
        # Check lake cell elevations
        logging.info('\nChecking lake cell {}'.format(hru.dem_adj_field))
        lake_elev_dict = defaultdict(list)
        fields = [
            hru.type_field, hru.lake_id_field, hru.dem_adj_field, hru.id_field
        ]
        for row in arcpy.da.SearchCursor(hru.polygon_path, fields):
            if int(row[0]) != 2:
                continue
            lake_elev_dict[int(row[1])].append(float(row[2]))
        del fields
        logging.info('  {:>7} {:>12} {:>12} {:>12} {:>12}'.format(
            'Lake ID', 'Minimum', 'Mean', 'Maximum', 'Std. Dev.'))
        for lake_id, lake_elev_list in lake_elev_dict.items():
            lake_elev_array = np.array(lake_elev_list)
            logging.info('  {:7} {:12f} {:12f} {:12f} {:12f}'.format(
                lake_id, np.min(lake_elev_array), np.mean(lake_elev_array),
                np.max(lake_elev_array), np.std(lake_elev_array)))
            if np.std(lake_elev_array) > 1:
                logging.warning(
                    '  Please check the lake cell elevations\n'
                    '  They may need to be manually adjusted'.format(lake_id))
                raw_input('  Press ENTER to continue')
            del lake_elev_array

        # Build Lake raster
        logging.debug('  LAKE_ID')
        arcpy.PolygonToRaster_conversion(hru.polygon_path, hru.lake_id_field,
                                         lake_id_path, 'CELL_CENTER', '',
                                         hru.cs)
        lake_id_obj = arcpy.sa.Raster(lake_id_path)

    logging.info('\nExporting HRU polygon parameters to raster')
    logging.debug('  HRU_TYPE')
    arcpy.PolygonToRaster_conversion(hru.polygon_path, hru.type_field,
                                     hru_type_path, 'CELL_CENTER', '', hru.cs)
    hru_type_obj = arcpy.sa.Raster(hru_type_path)

    # Convert DEM_ADJ to raster
    logging.debug('  DEM_ADJ')
    arcpy.PolygonToRaster_conversion(hru.polygon_path, hru.dem_adj_field,
                                     dem_adj_path, 'CELL_CENTER', '', hru.cs)
    dem_adj_obj = arcpy.sa.Raster(dem_adj_path)
    # dem_adj_obj = arcpy.sa.Float(arcpy.sa.Raster(dem_adj_path))

    hru_polygon_lyr = 'hru_polygon_lyr'
    arcpy.MakeFeatureLayer_management(hru.polygon_path, hru_polygon_lyr)
    arcpy.SelectLayerByAttribute_management(hru_polygon_lyr, 'CLEAR_SELECTION')
    arcpy.CalculateField_management(hru_polygon_lyr, hru.outflow_field, 0,
                                    'PYTHON')

    if 'OUTLET' in model_point_types:
        arcpy.SelectLayerByAttribute_management(model_points_lyr,
                                                'NEW_SELECTION',
                                                '"TYPE" = \'OUTLET\'')

        arcpy.SelectLayerByLocation_management(hru_polygon_lyr, 'INTERSECT',
                                               model_points_lyr)
        arcpy.CalculateField_management(hru_polygon_lyr, hru.outflow_field, 1,
                                        'PYTHON')

        # The point of all of this code is to determine the flow direction
        #   at the outlet points since it won't be computed.
        # It might be easier to compute fill and flow dir. on the full raster
        logging.info('  Computing OUTLET point flow direction')

        # Get HRU values at outlet points
        outlet_points = [(int(r[0]), int(r[1])) for r in arcpy.da.SearchCursor(
            hru_polygon_lyr, [hru.col_field, hru.row_field])]

        # Get elevations and type of neighboring cells
        # Multiplying the cellsize by 1.5 is needed to get all possible
        #   neighbors but it can return extra cells that will need to be skipped
        # It might be easier to use the Select tool directly
        arcpy.SelectLayerByLocation_management(hru_polygon_lyr,
                                               'WITHIN_A_DISTANCE',
                                               model_points_lyr, 1.5 * hru.cs)
        elev_dict = dict()
        hru_type_dict = dict()
        fields = [
            hru.col_field, hru.row_field, hru.dem_adj_field, hru.type_field
        ]
        for row in arcpy.da.SearchCursor(hru_polygon_lyr, fields):
            elev_dict[(int(row[0]), int(row[1]))] = float(row[2])
            hru_type_dict[(int(row[0]), int(row[1]))] = int(row[3])

        # For each outlet cell, cycle through flow directions and find ?.
        # Outlet cells should exit to an inactive cell or out of the grid.
        outlet_flowdir = {}
        for outlet_pt in outlet_points:
            logging.debug('    Outlet Point: {}'.format(outlet_pt))
            outlet_slopes = []
            # Search non-diagonals first.
            for fd in [1, 4, 16, 64, 2, 8, 32, 128]:
                if support.next_row_col(fd, outlet_pt) not in elev_dict.keys():
                    # Don't compute other slopes if next cell is outside the grid
                    outlet_slopes.append([-9999, fd])
                    break
                elif hru_type_dict[support.next_row_col(fd, outlet_pt)] != 0:
                    # Only compute slope to inactive cells
                    continue
                else:
                    # Compute slope to next cell
                    slope = (elev_dict[support.next_row_col(fd, outlet_pt)] -
                             elev_dict[outlet_pt])
                    if fd in [2, 8, 32, 128]:
                        # For diagonals, adjust slope
                        # I think Arc approximates root(2) to 1.5
                        slope /= 1.5
                    outlet_slopes.append([slope, fd])
                logging.debug('    {:>3d} {}'.format(fd, slope))

            if not outlet_slopes:
                logging.error('\nERROR: The OUTLET model point is not at the '
                              'edge of the study area or model grid.\n'
                              '  Col: {0} Rol: {1}'.format(*outlet_pt))
                sys.exit()

            # Assign the flow direction with the steepest (positive) slope
            outlet_slope, outlet_fd = min(outlet_slopes)
            outlet_flowdir[outlet_pt] = outlet_fd
            if outlet_slope > 0:
                logging.warning(
                    '\n  WARNING: The OUTLET model point flow direction may '
                    'be invalid')
            logging.debug('    Flow Direction: {}'.format(outlet_fd))

        logging.info('  Building OUTLET point raster')
        outlet_array = np.zeros((hru.rows, hru.cols)).astype(np.uint8)
        for outlet_pt in outlet_points:
            outlet_array[outlet_pt[1] - 1,
                         outlet_pt[0] - 1] = outlet_flowdir[outlet_pt]
        support.array_to_raster(
            outlet_array, outlet_path,
            arcpy.Point(hru.extent.XMin, hru.extent.YMin, 0), hru.cs,
            outlet_array)
        outlet_obj = arcpy.sa.Raster(outlet_path)

    if 'SWALE' in model_point_types:
        logging.info('  Building SWALE point raster')
        arcpy.SelectLayerByAttribute_management(model_points_lyr,
                                                'NEW_SELECTION',
                                                '"TYPE" = \'SWALE\'')

        # DEADBEEF - Should SWALE points be written to OUTFLOWHRU.TXT?
        arcpy.SelectLayerByLocation_management(hru_polygon_lyr, 'INTERSECT',
                                               model_points_lyr)
        arcpy.CalculateField_management(hru_polygon_lyr, hru.outflow_field, 1,
                                        'PYTHON')

        arcpy.PointToRaster_conversion(model_points_lyr,
                                       model_points_type_field, swale_path, "",
                                       "", hru.cs)
        swale_obj = arcpy.sa.Raster(swale_path)
        arcpy.SelectLayerByAttribute_management(model_points_lyr,
                                                'CLEAR_SELECTION')

    arcpy.Delete_management(hru_polygon_lyr)

    logging.info('\nCalculating flow direction')
    # This will force all active cells to flow to an outlet
    logging.debug('  Setting DEM_ADJ values to 20000 for inactivate cells')
    dem_mod_obj = arcpy.sa.Con(hru_type_obj > 0, dem_adj_obj, 20000.0)
    if 'OUTLET' in model_point_types:
        logging.debug('  Setting DEM_ADJ values to NoData for OUTLET cells')
        dem_mod_obj = arcpy.sa.Con(arcpy.sa.IsNull(outlet_obj), dem_mod_obj)
    if 'SWALE' in model_point_types:
        logging.debug('  Setting DEM_ADJ values to NoData for SWALE cells')
        dem_mod_obj = arcpy.sa.Con(arcpy.sa.IsNull(swale_obj), dem_mod_obj)

    logging.info('  Filling DEM_ADJ (8-way)')
    dem_fill_obj = arcpy.sa.Fill(dem_mod_obj)
    del dem_mod_obj

    if 'OUTLET' in model_point_types:
        logging.debug('  Resetting OUTLET cell values')
        dem_fill_obj = arcpy.sa.Con(arcpy.sa.IsNull(outlet_obj), dem_fill_obj,
                                    dem_adj_obj)

    logging.info('  Calculating sinks (8-way)')
    # Threshold of 0.001 is needed to avoid noise from 32/64 bit conversion
    dem_sink_obj = arcpy.sa.Con(hru_type_obj > 0, dem_fill_obj - dem_adj_obj)
    dem_sink_obj = arcpy.sa.Con(dem_sink_obj > 0.001, dem_sink_obj)

    logging.info('  Calculating flow direction')
    flow_dir_obj = arcpy.sa.FlowDirection(dem_fill_obj, False)

    logging.debug('  Setting flow direction to NoData for inactive cells')
    flow_dir_obj = arcpy.sa.SetNull(hru_type_obj == 0, flow_dir_obj)

    if 'OUTLET' in model_point_types:
        logging.debug('  Resetting OUTLET cell flow direction')
        flow_dir_obj = arcpy.sa.Con(~arcpy.sa.IsNull(outlet_obj), outlet_obj,
                                    flow_dir_obj)
        del outlet_obj
    if 'SWALE' in model_point_types:
        logging.debug('  Resetting SWALE cell flow direction')
        flow_dir_obj = arcpy.sa.Con(~arcpy.sa.IsNull(swale_obj), 1,
                                    flow_dir_obj)
        del swale_obj

    logging.debug('  Resetting DEM_ADJ values for inactive cell')
    dem_fill_obj = arcpy.sa.Con(hru_type_obj == 0, dem_adj_obj, dem_fill_obj)

    flow_dir_obj.save(flow_dir_path)
    dem_fill_obj.save(dem_fill_path)
    dem_sink_obj.save(dem_sink_path)
    del dem_sink_obj

    # Save flow direction as points
    if calc_flow_dir_points_flag:
        logging.info('\nFlow direction points')
        # ArcGIS fails for raster_to_x conversions on a network path
        # You have to go through an in_memory file first
        flow_dir_temp = os.path.join('in_memory', 'flow_dir')
        arcpy.RasterToPoint_conversion(flow_dir_path, flow_dir_temp)
        try:
            arcpy.CopyFeatures_management(flow_dir_temp, flow_dir_points)
        except:
            time.sleep(1)
            logging.warning('Copy feature failed')
        arcpy.Delete_management(flow_dir_temp)
        del flow_dir_temp

        # Reclassify flow directions to angles, assuming 1 is 0
        remap_cb = ('def Reclass(value):\n' + '    if value == 1: return 0\n' +
                    '    elif value == 2: return 45\n' +
                    '    elif value == 4: return 90\n' +
                    '    elif value == 8: return 135\n' +
                    '    elif value == 16: return 180\n' +
                    '    elif value == 32: return 225\n' +
                    '    elif value == 64: return 270\n' +
                    '    elif value == 128: return 315\n')
        arcpy.CalculateField_management(flow_dir_points, 'grid_code',
                                        'Reclass(!{}!)'.format('grid_code'),
                                        'PYTHON', remap_cb)

    # Write flow direction to hru_polygon
    logging.debug('  Extracting flow direction at points')
    vt_list = [[flow_dir_path, hru.flow_dir_field]]
    mem_point_path = os.path.join('in_memory', 'hru_point')
    arcpy.CopyFeatures_management(hru.point_path, mem_point_path)
    arcpy.sa.ExtractMultiValuesToPoints(mem_point_path, vt_list, 'NONE')
    logging.debug('  Reading flow direction values at point')
    data_dict = defaultdict(dict)
    fields = [hru.flow_dir_field, hru.fid_field]
    with arcpy.da.SearchCursor(mem_point_path, fields) as s_cursor:
        for row in s_cursor:
            # Set nodata cells to 0
            if row[0] is not None and row[1] is not None:
                data_dict[int(row[1])][hru.flow_dir_field] = int(row[0])
            del row
    logging.debug('  Writing flow direction values to polygon')
    fields = [hru.flow_dir_field, hru.fid_field]
    with arcpy.da.UpdateCursor(hru.polygon_path, fields) as u_cursor:
        for row in u_cursor:
            row_dict = data_dict.get(int(row[-1]), None)
            for i, field in enumerate(fields[:-1]):
                if row_dict:
                    row[i] = row_dict[field]
                else:
                    row[i] = 0
            u_cursor.updateRow(row)
            del row_dict, row

    # DEADBEEF - This whole section seems to only be needed if the outflows
    #   are not specified by the user.
    # # Subbasins
    # # Select the HRU cells that intersect the subbasin point cells
    # logging.debug('  Reading input subbasin points')
    # hru_polygon_lyr = 'hru_polygon_lyr'
    # arcpy.MakeFeatureLayer_management(hru.polygon_path, hru_polygon_lyr)
    # arcpy.SelectLayerByLocation_management(
    #     hru_polygon_lyr, 'intersect', model_points_path)
    # input_xy_dict = dict()
    # fields = [hru.col_field, hru.row_field, hru.x_field, hru.y_field]
    # for row in arcpy.da.SearchCursor(hru_polygon_lyr, fields):
    #     input_xy_dict[(int(row[0]), int(row[1]))] = (int(row[2]), int(row[3]))
    # arcpy.Delete_management(hru_polygon_lyr)
    # del hru_polygon_lyr
    # # for k,v in input_xy_dict.items():
    # #    logging.debug('    {} {}'.format(k,v))

    # logging.info('\nBuilding all subbasin points')
    # # First calculate downstream cell for all cells
    # logging.debug('  Calculating downstream cells')
    # out_cell_dict = dict()
    # hru_type_dict = dict()
    # cell_xy_dict = dict()
    # fields = [
    #     hru.type_field, hru.flow_dir_field, hru.id_field,
    #     hru.col_field, hru.row_field, hru.x_field, hru.y_field]
    # for row in arcpy.da.SearchCursor(hru.polygon_path, fields):
    #     cell = (int(row[3]), int(row[4]))
    #     out_cell_dict[cell] = support.next_row_col(int(row[1]), cell)
    #     hru_type_dict[cell] = int(row[0])
    #     cell_xy_dict[cell] = (int(row[5]), int(row[6]))

    # # Identify all active/lake cells that exit the model
    # #   or flow to an inactive cell
    # logging.debug('  Identifying active cells that exit the model')
    # out_cell_xy_list = []
    # for cell, cell_xy in sorted(cell_xy_dict.items()):
    #     #  DEADBEEF - This is finding exit cells that aren't already gauges
    #     # if cell in input_xy_dict.keys():
    #     #    continue
    #     # elif cell not in hru_type_dict.keys():
    #     if cell not in hru_type_dict.keys():
    #         continue
    #     elif hru_type_dict[cell] not in [1, 2]:
    #         continue
    #     elif cell not in out_cell_dict.keys():
    #         continue
    #     elif out_cell_dict[cell] not in hru_type_dict.keys():
    #         out_cell_xy_list.append(cell_xy)
    #     elif (out_cell_dict[cell] in hru_type_dict.keys() and
    #           hru_type_dict[out_cell_dict[cell]] not in [1, 2]):
    #         out_cell_xy_list.append(cell_xy)

    # # Outflow cells exit the model to inactive cells or out of the domain
    # # These cells will be used to set the OUTFLOW_HRU.DAT for CRT
    # #   in crt_fill_parameters and stream_parameters
    # logging.info('  Flag outflow cells')
    # fields = [hru.type_field, hru.x_field, hru.y_field, hru.outflow_field]
    # with arcpy.da.UpdateCursor(hru.polygon_path, fields) as u_cursor:
    #     for row in u_cursor:
    #         cell_xy = (row[1], row[2])
    #         # Inactive cells can't be outflow cells
    #         if int(row[0]) == 0:
    #             continue
    #         elif out_cell_xy_list and cell_xy in out_cell_xy_list:
    #             row[3] = 1
    #         else:
    #             row[3] = 0
    #         u_cursor.updateRow(row)
    # del out_cell_dict, hru_type_dict, cell_xy_dict

    # DEADBEEF - This was added for sinks or ocean so that there would be
    #   subbasin points along the edge?
    # fields = ['SHAPE@XY', model_points_zone_field]
    # with arcpy.da.InsertCursor(model_points_path, fields) as insert_c:
    #     for out_cell_xy in sorted(out_cell_xy_list):
    #         insert_c.insertRow([out_cell_xy, subbasin_input_count + 1])
    # del fields
    # del out_cell_xy_list

    # Flow Accumulation
    logging.info('\nCalculating initial flow accumulation')
    flow_acc_full_obj = arcpy.sa.FlowAccumulation(flow_dir_obj)
    logging.info('  Only keeping flow_acc >= {}'.format(flow_acc_threshold))
    flow_acc_full_obj = arcpy.sa.Con(flow_acc_full_obj >= flow_acc_threshold,
                                     flow_acc_full_obj)
    flow_acc_full_obj.save(flow_acc_full_path)

    # Flow accumulation and stream link with lakes
    logging.info('\nCalculating flow accumulation & stream link (w/ lakes)')
    flow_acc_obj = arcpy.sa.Con((hru_type_obj >= 1) & (hru_type_obj <= 3),
                                flow_acc_full_obj)
    stream_link_obj = arcpy.sa.StreamLink(flow_acc_obj, flow_dir_obj)
    stream_link_obj.save(stream_link_a_path)
    del flow_acc_obj, stream_link_obj

    # Flow accumulation and stream link without lakes
    logging.info('Calculating flow accumulation & stream link (w/o lakes)')
    flow_acc_obj = arcpy.sa.Con((hru_type_obj == 1) | (hru_type_obj == 3),
                                flow_acc_full_obj)
    # flow_acc_obj.save(flow_acc_sub_path)
    stream_link_obj = arcpy.sa.StreamLink(flow_acc_obj, flow_dir_obj)
    stream_link_obj.save(stream_link_b_path)
    del flow_acc_obj, stream_link_obj

    # Initial Stream Link
    # logging.info('\nCalculating initial stream link')
    # stream_link_obj = StreamLink(flow_acc_obj, flow_dir_obj)
    # stream_link_obj.save(stream_link_path)
    # Calculate stream link with and without lakes
    # Initial Stream Order (w/ lakes)
    logging.info('Calculating stream order (w/ lakes)')
    logging.debug('  Using SHREVE ordering so after 1st order are removed, ' +
                  '2nd order will only be dangles')
    stream_order_obj = arcpy.sa.StreamOrder(stream_link_a_path, flow_dir_obj,
                                            'SHREVE')
    stream_order_obj.save(stream_order_path)

    # Stream Length (cell count w/o lakes)
    logging.info('Calculating stream length (cell count w/o lakes)')
    stream_length_obj = arcpy.sa.Lookup(stream_link_b_path, 'Count')
    stream_length_obj.save(stream_length_path)

    # Filter 1st order segments
    logging.info(
        '\nFilter all 1st order streams with length < {}' +
        '\nKeep all higher order streams'.format(flow_length_threshold))
    # Stream length is nodata for lakes, so put lakes back in
    # This removes short 1st order streams off of lakes
    flow_mask_obj = ((hru_type_obj == 3) | (hru_type_obj == 2) |
                     (stream_order_obj >= 2) |
                     ((stream_order_obj == 1) &
                      (stream_length_obj >= flow_length_threshold)))
    flow_mask_obj.save(flow_mask_path)
    flow_acc_sub_obj = arcpy.sa.Con(flow_mask_obj, flow_acc_full_obj)
    flow_acc_sub_obj.save(flow_acc_sub_path)
    del flow_mask_obj, stream_order_obj, stream_length_obj

    # Final Stream Link
    logging.info('\nCalculating final stream link')
    stream_link_obj = arcpy.sa.StreamLink(flow_acc_sub_obj, flow_dir_obj)
    # Get count of streams for automatically setting lake_seg_offset
    if not lake_seg_offset:
        lake_seg_count = int(
            arcpy.GetCount_management(stream_link_obj).getOutput(0))
        n = 10**math.floor(math.log10(lake_seg_count))
        lake_seg_offset = int(math.ceil((lake_seg_count + 1) / n)) * int(n)
        logging.info('  lake_segment_offset was not set in the input file\n' +
                     '  Using automatic lake segment offset: {}'.format(
                         lake_seg_offset))
    elif set_lake_flag:
        logging.info(
            '  Using manual lake segment offset: {}'.format(lake_seg_offset))

    # Include lake cells into 'stream_link' before calculating watersheds
    # Watershed function doesn't work for negative values
    # Convert lakes to large positive numbers for Watershed
    # ISEG needs to be negative values though
    if set_lake_flag:
        logging.info(
            '  Including lakes as {0} + {1}\n'
            '  This will allow for a watershed/subbasin for the lakes\n'
            '  {2} will be save as negative of {0} though'.format(
                hru.lake_id_field, lake_seg_offset, hru.iseg_field))
        stream_link_obj = arcpy.sa.Con((hru_type_obj == 2),
                                       (lake_id_obj + lake_seg_offset),
                                       stream_link_obj)
    stream_link_obj.save(stream_link_path)

    # Watersheds
    logging.info('Calculating watersheds')
    watersheds_obj = arcpy.sa.Watershed(flow_dir_obj, stream_link_obj)
    watersheds_obj.save(watersheds_path)
    del stream_link_obj, watersheds_obj

    # Subbasins
    logging.info('Calculating subbasins')
    subbasin_obj = arcpy.sa.Watershed(flow_dir_obj, model_points_path,
                                      model_points_zone_field)
    subbasin_obj.save(subbasin_path)
    del subbasin_obj

    # Basins
    logging.info('Calculating basins')
    basin_obj = arcpy.sa.Basin(flow_dir_obj)
    basin_obj.save(basin_path)
    del basin_obj

    # Clear subbasin value if HRU_TYPE is 0
    logging.info('Clearing subbasin ID for inactive cells')
    subbasin_obj = arcpy.sa.SetNull(hru_type_obj,
                                    arcpy.sa.Raster(subbasin_path), 'VALUE=0')
    subbasin_obj.save(subbasin_path)
    del subbasin_obj
    del hru_type_obj

    # Stream polylines
    logging.info('Calculating stream polylines')
    # ArcGIS fails for raster_to_x conversions on a network path
    # You have to go through an in_memory file first
    streams_temp = os.path.join('in_memory', 'streams')
    arcpy.sa.StreamToFeature(stream_link_path, flow_dir_obj, streams_temp,
                             'NO_SIMPLIFY')
    arcpy.CopyFeatures_management(streams_temp, streams_path)
    arcpy.Delete_management(streams_temp)
    del streams_temp

    # Write values to hru_polygon
    logging.info('\nExtracting stream parameters')
    vt_list = [
        [watersheds_path, hru.irunbound_field],
        [stream_link_path, hru.iseg_field],
        # [flow_dir_path, hru.flow_dir_field],
        [subbasin_path, hru.subbasin_field],
        [hru_type_path, hru.type_field]
    ]
    mem_point_path = os.path.join('in_memory', 'hru_point')
    arcpy.CopyFeatures_management(hru.point_path, mem_point_path)
    arcpy.sa.ExtractMultiValuesToPoints(mem_point_path, vt_list, 'NONE')
    del vt_list

    # Read values from points
    logging.info('  Reading cell values')
    data_dict = defaultdict(dict)
    fields = [
        hru.irunbound_field, hru.iseg_field, hru.subbasin_field,
        hru.type_field, hru.fid_field
    ]
    # fields = [
    #    hru.irunbound_field, hru.iseg_field, hru.flow_dir_field,
    #    hru.subbasin_field, hru.type_field, hru.fid_field]
    with arcpy.da.SearchCursor(mem_point_path, fields) as s_cursor:
        for row in s_cursor:
            for i, field in enumerate(fields[:-1]):
                # Set nodata or inactive cells to 0
                if row[i] is None or (int(row[-2]) == 0):
                    data_dict[int(row[-1])][field] = 0
                else:
                    data_dict[int(row[-1])][field] = int(row[i])
            del row
    del fields

    # ISEG for lake cells must be -1 * LAKE_ID, not LAKE_ID + OFFSET
    for k in data_dict.keys():
        irunbound = data_dict[k][hru.irunbound_field]
        iseg = data_dict[k][hru.iseg_field]
        if irunbound > lake_seg_offset:
            data_dict[k][hru.irunbound_field] = lake_seg_offset - irunbound
        if iseg > lake_seg_offset:
            data_dict[k][hru.iseg_field] = lake_seg_offset - iseg

    # data_dict = dict([(k,v) for k,v in data_dict.items()])
    # Write values to polygon
    logging.info('  Writing values to polygons')
    fields = [
        hru.irunbound_field, hru.iseg_field, hru.subbasin_field,
        hru.type_field, hru.fid_field
    ]
    # fields = [
    #    hru.irunbound_field, hru.iseg_field, hru.flow_dir_field,
    #    hru.subbasin_field, hru.type_field, hru.fid_field]
    with arcpy.da.UpdateCursor(hru.polygon_path, fields) as u_cursor:
        for row in u_cursor:
            row_dict = data_dict.get(int(row[-1]), None)
            for i, field in enumerate(fields[:-1]):
                if row_dict:
                    row[i] = row_dict[field]
                else:
                    row[i] = 0
            u_cursor.updateRow(row)
            del row_dict, row
    del fields

    # Write sink values to hru_polygon
    vt_list = []
    if arcpy.Exists(dem_sink_path):
        vt_list.append([dem_sink_path, hru.dem_sink_field])
    if vt_list:
        logging.info('\nExtracting sink values')
        for vt_item in vt_list:
            logging.debug('  {}: {}'.format(vt_item[1],
                                            os.path.basename(vt_item[0])))
        mem_point_path = os.path.join('in_memory', 'hru_point')
        arcpy.CopyFeatures_management(hru.point_path, mem_point_path)
        arcpy.sa.ExtractMultiValuesToPoints(mem_point_path, vt_list, 'NONE')

        # Read sink values from points
        logging.info('  Reading sink values')
        data_dict = defaultdict(dict)
        fields = [field for path, field in vt_list] + [hru.fid_field]
        with arcpy.da.SearchCursor(mem_point_path, fields) as s_cursor:
            for row in s_cursor:
                for i, field in enumerate(fields[:-1]):
                    # Set nodata or inactive cells to 0
                    if row[i] is None:
                        data_dict[int(row[-1])][field] = 0
                    else:
                        data_dict[int(row[-1])][field] = float(row[i])
                del row

        # Write sink values to polygon
        logging.info('  Writing sink values to polygons')
        fields = [field for path, field in vt_list] + [hru.fid_field]
        with arcpy.da.UpdateCursor(hru.polygon_path, fields) as u_cursor:
            for row in u_cursor:
                row_dict = data_dict.get(int(row[-1]), None)
                for i, field in enumerate(fields[:-1]):
                    if row_dict:
                        row[i] = row_dict[field]
                    else:
                        row[i] = 0
                u_cursor.updateRow(row)
                del row_dict, row

    # Cleanup
    arcpy.Delete_management(mem_point_path)
    del mem_point_path, vt_list, data_dict, field

    # Re-Calculate HRU_ELEV
    # logging.info('Calculating HRU_ELEV from DEM_ADJ')
    # logging.info('  Converting from meters to feet')
    # arcpy.CalculateField_management(
    #    hru.polygon_path, hru_elev_field,
    #    # Convert meters to feet
    #    '!{}! * 3.28084'.format(dem_adj_field), 'PYTHON')

    # Cleanup
    del dem_fill_obj
    if set_lake_flag:
        del lake_id_obj
    del flow_dir_obj
    del flow_acc_full_obj
    del flow_acc_sub_obj
def prms_template_fill(config_path):
    """Fill PRMS Parameter Template File

    Parameters
    ----------
    config_path : str
        Project configuration file (.ini) path.

    Returns
    -------
    None

    """
    param_formats = {1: '{:d}', 2: '{:f}', 3: '{:f}', 4: '{}'}

    # Initialize hru_parameters class
    hru = support.HRUParameters(config_path)

    # Open input parameter config file
    inputs_cfg = ConfigParser.ConfigParser()
    try:
        inputs_cfg.readfp(open(config_path))
    except Exception as e:
        logging.error('\nERROR: Config file could not be read, '
                      'is not an input file, or does not exist\n'
                      '  config_file = {}\n'
                      '  Exception: {}\n'.format(config_path, e))
        sys.exit()

    # Log DEBUG to file
    log_file_name = 'prms_template_log.txt'
    log_console = logging.FileHandler(filename=os.path.join(
        hru.log_ws, log_file_name),
                                      mode='w')
    log_console.setLevel(logging.DEBUG)
    log_console.setFormatter(logging.Formatter('%(message)s'))
    logging.getLogger('').addHandler(log_console)
    logging.info('\nFilling PRMS Parameter File Template')

    # Read parameters from config file
    hru.polygon_path = inputs_cfg.get('INPUTS', 'hru_fishnet_path')
    hru.fid_field = inputs_cfg.get('INPUTS', 'orig_fid_field')
    parameter_ws = inputs_cfg.get('INPUTS', 'parameter_folder')
    try:
        prms_parameter_ws = inputs_cfg.get('INPUTS', 'prms_parameter_folder')
    except ConfigParser.NoOptionError:
        prms_parameter_ws = inputs_cfg.get('INPUTS', 'parameter_folder')
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'prms_parameter_ws', prms_parameter_ws))
    prms_dimen_csv_path = inputs_cfg.get('INPUTS', 'prms_dimen_csv_path')
    prms_param_csv_path = inputs_cfg.get('INPUTS', 'prms_param_csv_path')

    # Get input DEM units and desired output HRU_ELEV units
    dem_units = inputs_cfg.get('INPUTS', 'dem_units').lower()
    dem_unit_types = {
        'meters': 'meter',
        'm': 'meter',
        'meter': 'meter',
        'feet': 'feet',
        'ft': 'meter',
        'foot': 'meter',
    }
    try:
        dem_units = dem_unit_types[dem_units]
    except:
        logging.error(
            '\nERROR: DEM unit "{}" is not supported\n'.format(dem_units))
        sys.exit()
    elev_units = inputs_cfg.getint('INPUTS', 'elev_units')
    elev_unit_types = {0: 'feet', 1: 'meter'}
    try:
        elev_units = elev_unit_types[elev_units]
    except:
        logging.error(
            '\nERROR: elev_units "{}" is not supported\n'.format(elev_units))
        sys.exit()
    if dem_units == 'feet' and elev_units == 'meter':
        elev_unit_scalar = 0.3048
    elif dem_units == 'meter' and elev_units == 'feet':
        elev_unit_scalar = (1.0 / 0.3048)
    else:
        elev_unit_scalar = 1.0

    # Temperature calculation method
    try:
        temp_calc_method = inputs_cfg.get('INPUTS',
                                          'temperature_calc_method').upper()
    except:
        temp_calc_method = '1STA'
        logging.info('  Defaulting temperature_calc_method = {}'.format(
            temp_calc_method))
    temp_calc_options = ['ZONES', 'LAPSE', '1STA']
    if temp_calc_method not in temp_calc_options:
        logging.error(
            '\nERROR: Invalid temperature calculation method ({})\n  '
            'Valid methods are: {}'.format(temp_calc_method,
                                           ', '.join(temp_calc_options)))
        sys.exit()

    # Write parameter/dimensions to separate files based on "PARAM_FILE"
    #   value in prms_parameters.csv and prms_dimensions.csv
    try:
        single_param_file_flag = inputs_cfg.getboolean(
            'INPUTS', 'single_param_file_flag')
    except ConfigParser.NoOptionError:
        single_param_file_flag = False
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'single_param_file_flag', single_param_file_flag))
    if single_param_file_flag:
        try:
            single_param_file_name = inputs_cfg.get('INPUTS',
                                                    'single_param_file_name')
        except ConfigParser.NoOptionError:
            single_param_file_name = 'prms_inputs.param'
            logging.info('  Missing INI parameter, setting {} = {}'.format(
                'single_param_file_name', single_param_file_name))

    # Write nhru gridded parameters as single column or array
    try:
        param_column_flag = inputs_cfg.getboolean('INPUTS',
                                                  'param_column_flag')
    except ConfigParser.NoOptionError:
        param_column_flag = False
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'param_column_flag', param_column_flag))

    # Scratch workspace
    try:
        scratch_name = inputs_cfg.get('INPUTS', 'scratch_name')
    except ConfigParser.NoOptionError:
        scratch_name = 'in_memory'
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'scratch_name', scratch_name))

    # Cascades
    crt_ws = os.path.join(parameter_ws, 'cascade_work')
    gw_ws = os.path.join(parameter_ws, 'cascade_gw_work')
    crt_dimension_path = os.path.join(crt_ws, 'parameter_dimensions.txt')
    crt_parameter_path = os.path.join(crt_ws, 'cascade.param')
    crt_gw_dimension_path = os.path.join(gw_ws, 'parameter_dimensions.txt')
    crt_gw_parameter_path = os.path.join(gw_ws, 'groundwater_cascade.param')

    # Strings to search PRMS parameter file for
    # Newline character is required after title
    file_header_str = 'PRMS parameter file generated with gsflow-arcpy-tools version X\n'
    # file_header_str = 'Default file generated by model\nVersion: 1.7'
    dimen_header_str = '** Dimensions **'
    param_header_str = '** Parameters **'
    break_str = '####'

    # Check input paths
    if not arcpy.Exists(hru.polygon_path):
        logging.error('\nERROR: The fishnet does not exist\n  {}'.format(
            hru.polygon_path))
        sys.exit()
    # if not os.path.isfile(prms_template_path):
    #    logging.error('\nERROR: The template parameter file does not exist\n')
    #    sys.exit()
    if not os.path.isfile(prms_dimen_csv_path):
        logging.error(
            '\nERROR: The dimensions CSV file does not exist\n  {}'.format(
                prms_dimen_csv_path))
        sys.exit()
    if not os.path.isfile(prms_param_csv_path):
        logging.error(
            '\nERROR: The parameters CSV file does not exist\n  {}'.format(
                prms_param_csv_path))
        sys.exit()

    if not os.path.isdir(crt_ws):
        logging.error(
            '\nERROR: Cascades folder does not exist'
            '\nERROR:   {}'
            '\nERROR: Try re-running CRT using stream_parameters.py\n'.format(
                crt_ws))
        sys.exit()
    elif not os.path.isfile(crt_dimension_path):
        logging.error(
            '\nERROR: Cascades dimension file does not exist'
            '\nERROR:   {}'
            '\nERROR: Try re-running CRT using stream_parameters.py\n'.format(
                crt_dimension_path))
        sys.exit()
    elif not os.path.isfile(crt_parameter_path):
        logging.error(
            '\nERROR: Cascades parameter file does not exist'
            '\nERROR:   {}'
            '\nERROR: Try re-running CRT using stream_parameters.py\n'.format(
                crt_parameter_path))
        sys.exit()

    if not os.path.isdir(gw_ws):
        logging.error(
            '\nERROR: Groundwater cascades folder does not exist'
            '\nERROR:   {}'
            '\nERROR: Try re-running CRT using stream_parameters.py\n'.format(
                gw_ws))
        sys.exit()
    elif not os.path.isfile(crt_gw_dimension_path):
        logging.error(
            '\nERROR: Groundwater cascades dimension file does not exist'
            '\nERROR:   {}'
            '\nERROR: Try re-running CRT using stream_parameters.py\n'.format(
                crt_gw_dimension_path))
        sys.exit()
    elif not os.path.isfile(crt_gw_parameter_path):
        logging.error(
            '\nERROR: Groundwater cascades parameter file does not exist'
            '\nERROR:   {}'
            '\nERROR: Try re-running CRT using stream_parameters\n'.format(
                crt_gw_parameter_path))
        sys.exit()

    # Get number of cells in fishnet
    fishnet_count = int(
        arcpy.GetCount_management(hru.polygon_path).getOutput(0))
    logging.info('  Fishnet cells: {}'.format(fishnet_count))

    # Read in dimensions from CSV
    logging.info('\nReading dimensions CSV')
    dimen_names = dict()
    dimen_files = dict()
    dimen_sizes = dict()
    with open(prms_dimen_csv_path, 'r') as input_f:
        dimen_lines = input_f.readlines()
    input_f.close()
    # Dimensions can be set to a value, a field, or not set
    dimen_lines = [l.strip().split(',') for l in dimen_lines]
    header = dimen_lines[0]
    for line in dimen_lines[1:]:
        dimen_name = line[header.index('NAME')]
        dimen_names[dimen_name] = dimen_name
        logging.debug('  {}'.format(dimen_name))

        # What should the default parameter file name be if not set?
        if single_param_file_flag:
            dimen_file = os.path.join(prms_parameter_ws,
                                      single_param_file_name)
        elif 'PARAM_FILE' not in header:
            dimen_file = os.path.join(prms_parameter_ws, 'prms_inputs.param')
            logging.info('  PARAM_FILE field not in dimensions CSV\n'
                         '  Defaulting to {}'.format(dimen_file))
        elif line[header.index('PARAM_FILE')] == '':
            dimen_file = os.path.join(prms_parameter_ws, 'prms_inputs.param')
            logging.info('  PARAM_FILE value not set for dimension: {}\n'
                         '  Defaulting to {}'.format(dimen_name, dimen_file))
        else:
            dimen_file = os.path.join(
                prms_parameter_ws, line[header.index('PARAM_FILE')] + '.param')
        dimen_files[dimen_name] = dimen_file

        dimen_size = line[header.index('SIZE')]
        if dimen_size.lower() in ['calculated', 'config_file']:
            dimen_sizes[dimen_name] = dimen_size
        elif not dimen_size:
            dimen_sizes[dimen_name] = ''
        else:
            # Don't force to integer type unless necessary since values are
            # written back out as strings
            dimen_sizes[dimen_name] = dimen_size
            # dimen_sizes[dimen_name] = int(dimen_size)
        del dimen_size

    # Set CALCULATED dimension values
    # These parameters equal the fishnet cell count
    for dimen_name in ['ngw', 'ngwcell', 'nhru', 'nhrucell', 'nssr']:
        if dimen_sizes[dimen_name].lower() == 'calculated':
            dimen_sizes[dimen_name] = fishnet_count
            logging.info('  {} = {}'.format(dimen_name,
                                            dimen_sizes[dimen_name]))

    # Getting number of lakes
    if dimen_sizes['nlake'].lower() == 'calculated':
        logging.info('\nCalculating number of lakes')
        #logging.info('  Lake cells are {} >= 0'.format(hru.lake_id_field))
        value_fields = (hru.id_field, hru.lake_id_field)
        with arcpy.da.SearchCursor(hru.polygon_path, value_fields) as s_cursor:
            dimen_sizes['nlake'] = max(
                list([int(row[1]) for row in s_cursor if int(row[1]) >= 0]))
        logging.info('  nlakes = {}'.format(dimen_sizes['nlake']))

    # Getting number of lake cells
    if dimen_sizes['nlake_hrus'].lower() == 'calculated':
        logging.info('\nCalculating number of lake cells')
        logging.info('  Lake cells are {} >= 0'.format(hru.lake_id_field))
        value_fields = (hru.id_field, hru.lake_id_field)
        with arcpy.da.SearchCursor(hru.polygon_path, value_fields) as s_cursor:
            dimen_sizes['nlake_hrus'] = len(
                list([int(row[1]) for row in s_cursor if int(row[1]) >= 0]))
        logging.info('  nlake cells = {}'.format(dimen_sizes['nlake_hrus']))

    # Getting number of stream cells
    if dimen_sizes['nreach'].lower() == 'calculated':
        logging.info('Calculating number of stream cells')
        logging.info('  Stream cells are {} >= 0'.format(hru.krch_field))
        value_fields = (hru.id_field, hru.krch_field)
        with arcpy.da.SearchCursor(hru.polygon_path, value_fields) as s_cursor:
            dimen_sizes['nreach'] = len(
                list([int(row[1]) for row in s_cursor if int(row[1]) > 0]))
        logging.info('  nreach = {}'.format(dimen_sizes['nreach']))

    # Getting number of stream segments
    if dimen_sizes['nsegment'].lower() == 'calculated':
        logging.info('Calculating number of unique stream segments')
        logging.info('  Stream segments are {} >= 0'.format(hru.iseg_field))
        value_fields = (hru.id_field, hru.iseg_field)
        with arcpy.da.SearchCursor(hru.polygon_path, value_fields) as s_cursor:
            dimen_sizes['nsegment'] = len(
                list(set([int(row[1]) for row in s_cursor
                          if int(row[1]) > 0])))
        logging.info('  nsegment = {}'.format(dimen_sizes['nsegment']))

    # Getting number of subbasins
    if dimen_sizes['nsub'].lower() == 'calculated':
        logging.info('Calculating number of unique subbasins')
        logging.info('  Subbasins are {} >= 0'.format(hru.subbasin_field))
        value_fields = (hru.id_field, hru.subbasin_field)
        with arcpy.da.SearchCursor(hru.polygon_path, value_fields) as s_cursor:
            dimen_sizes['nsub'] = len(
                list(set([int(row[1]) for row in s_cursor
                          if int(row[1]) > 0])))
        logging.info('  nsub = {}'.format(dimen_sizes['nsub']))

    # Read in CRT cascade dimensions
    if dimen_sizes['ncascade'].lower() == 'calculated':
        logging.info('\nReading CRT dimensions')
        logging.debug('  {}'.format(crt_dimension_path))
        with open(crt_dimension_path, 'r') as input_f:
            crt_dimen_lines = [line.strip() for line in input_f.readlines()]
        input_f.close()
        if not crt_dimen_lines:
            logging.error('\nERROR: The CRT dimensions file is empty\n')
            sys.exit()
        crt_dimen_break_i_list = [
            i for i, x in enumerate(crt_dimen_lines) if x == break_str
        ]
        for i in crt_dimen_break_i_list:
            if crt_dimen_lines[i + 1] not in ['ncascade']:
                continue
            logging.info('  {} = {}'.format(crt_dimen_lines[i + 1],
                                            crt_dimen_lines[i + 2]))
            dimen_sizes[crt_dimen_lines[i + 1]] = int(crt_dimen_lines[i + 2])
        del crt_dimen_lines, crt_dimen_break_i_list

    # Read in CRT groundwater cascade dimensions
    if dimen_sizes['ncascdgw'].lower() == 'calculated':
        logging.info('\nReading CRT groundwater cascade dimensions')
        logging.debug('  {}'.format(crt_gw_dimension_path))
        with open(crt_gw_dimension_path, 'r') as input_f:
            crt_dimen_lines = [line.strip() for line in input_f.readlines()]
        input_f.close()
        if not crt_dimen_lines:
            logging.error(
                '\nERROR: The CRT groundwater dimensions file is empty\n')
            sys.exit()
        crt_dimen_break_i_list = [
            i for i, x in enumerate(crt_dimen_lines) if x == break_str
        ]
        for i in crt_dimen_break_i_list:
            if crt_dimen_lines[i + 1] not in ['ncascdgw']:
                continue
            logging.info('  {} = {}'.format(crt_dimen_lines[i + 1],
                                            crt_dimen_lines[i + 2]))
            dimen_sizes[crt_dimen_lines[i + 1]] = int(crt_dimen_lines[i + 2])
        del crt_dimen_lines, crt_dimen_break_i_list

    # Set CONFIG file dimension values
    config_file_dimensions = [
        d_name for d_name, d_size in sorted(dimen_sizes.items())
        if type(d_size) is str and d_size.lower() == 'config_file'
    ]
    if config_file_dimensions:
        logging.info('Reading configuration file dimensions')
        for dimen_name in config_file_dimensions:
            logging.info('  {}'.format(dimen_name))
            try:
                dimen_sizes[dimen_name] = inputs_cfg.getint(
                    'INPUTS', dimen_name)
            except ConfigParser.NoOptionError:
                logging.error(
                    '  Dimension set to "config_file" in {} but not found in '
                    'config file, exiting'.format(
                        os.path.basename(prms_dimen_csv_path)))

    # Link HRU fishnet field names to parameter names in '.param'
    param_names = dict()
    param_files = dict()
    param_dimen_counts = dict()
    param_dimen_names = dict()
    param_value_counts = dict()
    param_types = dict()
    param_defaults = dict()
    param_values = defaultdict(dict)

    # Read in parameters from CSV
    logging.info('\nReading parameters CSV')
    with open(prms_param_csv_path, 'r') as input_f:
        param_lines = input_f.readlines()
    input_f.close()
    param_lines = [l.strip().split(',') for l in param_lines]
    header = param_lines[0]
    for line in param_lines[1:]:
        # Get parameters from CSV line
        param_name = line[header.index('NAME')]
        logging.debug('  {}'.format(param_name))
        # This assumes multiple dimensions are separated by semicolon
        dimen_names = line[header.index('DIMENSION_NAMES')].split(';')

        # What should the default parameter file name be if not set?
        if single_param_file_flag:
            param_file = os.path.join(prms_parameter_ws,
                                      single_param_file_name)
        elif 'PARAM_FILE' not in header:
            param_file = os.path.join(prms_parameter_ws, 'prms_inputs.param')
            logging.info('  PARAM_FILE field not in parameters CSV\n'
                         '  Defaulting to {}'.format(param_file))
        elif line[header.index('PARAM_FILE')] == '':
            param_file = os.path.join(prms_parameter_ws, 'prms_inputs.param')
            logging.info('  PARAM_FILE value not set for parameter: {}\n'
                         '  Defaulting to {}'.format(param_name, param_file))
        else:
            param_file = os.path.join(
                prms_parameter_ws, line[header.index('PARAM_FILE')] + '.param')

        # Check that parameter type is 1, 2, 3, or 4
        param_type = int(line[header.index('TYPE')])
        if param_type not in [1, 2, 3, 4]:
            logging.error('\nERROR: Parameter type {} is invalid'
                          '\nERROR: {}'.format(param_type, line))
            sys.exit()

        # This will initially read defaults in as a list
        param_default = line[header.index('DEFAULT_VALUE'):]

        # Removing empty strings avoids checking ints/floats
        param_default = [l for l in param_default if l]

        # For empty lists, set to none
        if not param_default:
            param_default = None
        # For single value lists, get first value
        # Check that param_default is a number or field name
        elif len(param_default) == 1:
            param_default = param_default[0]
            if isfloat(param_default) and param_type == 1:
                param_default = int(param_default)
            elif isfloat(param_default) and param_type in [2, 3]:
                param_default = float(param_default)
            elif param_default.lower() in [
                    'calculated', 'config_file', 'crt_file'
            ]:
                pass
            elif arcpy.ListFields(hru.polygon_path, param_default):
                pass
            else:
                logging.error('\nERROR: Default value {} was not parsed'
                              '\nERROR: {}'.format(param_default, line))
                sys.exit()
        # For multi-value lists, convert values to int/float
        elif len(param_default) >= 2:
            if param_type == 1:
                param_default = map(int, param_default)
            elif param_type in [2, 3]:
                param_default = map(float, param_default)
            else:
                logging.error('\nERROR: Default value {} was not parsed'
                              '\nERROR: {}'.format(param_default, line))
                sys.exit()

        # Check that dimension names are valid
        for dimen_name in dimen_names:
            if dimen_name not in dimen_sizes.keys():
                logging.error('\nERROR: The dimension {} is not set in the '
                              'dimension CSV file'.format(dimen_name))
                sys.exit()

        # Calculate number of dimensions
        dimen_count = str(len(dimen_names))

        # Calculate number of values
        values_count = prod(
            [int(dimen_sizes[dn]) for dn in dimen_names if dimen_sizes[dn]])

        # Write parameter to dictionaries
        param_names[param_name] = param_name
        param_files[param_name] = param_file
        param_dimen_counts[param_name] = dimen_count
        param_dimen_names[param_name] = dimen_names
        param_value_counts[param_name] = values_count
        param_types[param_name] = param_type
        param_defaults[param_name] = param_default

    # Apply default values to full dimension
    logging.info('\nSetting static parameters from defaults')
    for param_name, param_default in param_defaults.items():
        param_value_count = param_value_counts[param_name]
        # Skip if not set
        if param_default is None:
            continue
        # Skip if still a string (field names)
        elif type(param_default) is str:
            continue
        # For float/int, apply default across dimension size
        elif type(param_default) is float or type(param_default) is int:
            for i in range(param_value_count):
                param_values[param_name][i] = param_default
        # For lists of floats, match up one-to-one for now
        elif len(param_default) == param_value_count:
            for i in range(param_value_count):
                param_values[param_name][i] = param_default[i]
        else:
            logging.error('\nERROR: The default value(s) ({0}) could not be '
                          'broadcast to the dimension length ({1})'.format(
                              param_default, param_value_count))
            sys.exit()

    # Set CONFIG file parameter values
    config_file_parameters = [
        p_name for p_name, p_value in sorted(param_defaults.items())
        if type(p_value) is str and p_value.lower() == 'config_file'
    ]
    if config_file_parameters:
        logging.info('Reading configuration file parameters')
        for param_name in config_file_parameters:
            logging.info('  {}'.format(param_name))
            try:
                values = inputs_cfg.get('INPUTS', param_name)
            except ConfigParser.NoOptionError:
                logging.error(
                    '  Parameter set to "config_file" in {} but not found in '
                    'config file, exiting'.format(
                        os.path.basename(prms_dimen_csv_path)))

            # Convert comma separate strings to lists
            param_values[param_name] = {
                i: v
                for i, v in enumerate(values.split(','))
            }

            # Convert the strings to the appropriate type
            if param_types[param_name] == 1:
                param_values[param_name] = {
                    k: int(v)
                    for k, v in param_values[param_name].items()
                }
            elif param_types[param_name] in [2, 3]:
                param_values[param_name] = {
                    k: float(v)
                    for k, v in param_values[param_name].items()
                }

            # Try and honor dimension value from CSV
            # Repeat values if actual value count doesn't match expected count
            #   (from dimensions)
            # For now, only apply to INI parameters with a single value
            #   and dimensions greater than 1
            param_value_count = param_value_counts[param_name]
            if ((len(param_values[param_name]) != param_value_count)
                    and (len(param_values[param_name]) == 1)
                    and (param_value_count > 1)):
                value = param_values[param_name].copy()
                param_values[param_name] = {}
                for i in range(param_value_count):
                    param_values[param_name][i] = value[0]

    # Read in HRU parameter data from fishnet polygon
    logging.info('\nReading in variable parameters from fishnet')
    param_fields = {
        k: v
        for k, v in param_defaults.items()
        if (type(v) is str
            and v.lower() not in ['calculated', 'config_file', 'crt_file'])
    }
    value_fields = param_fields.values()

    # Use HRU_ID to uniquely identify each cell
    if hru.id_field not in value_fields:
        value_fields.append(hru.id_field)
    hru_id_i = value_fields.index(hru.id_field)

    # Read in each cell parameter value
    with arcpy.da.SearchCursor(hru.polygon_path, value_fields) as s_cursor:
        for row in s_cursor:
            for field_i, (param, field) in enumerate(param_fields.items()):
                if param_types[param] == 1:
                    param_values[param][row[hru_id_i]] = int(row[field_i])
                elif param_types[param] in [2, 3]:
                    param_values[param][row[hru_id_i]] = float(row[field_i])
                elif param_types[param] == 4:
                    param_values[param][row[hru_id_i]] = row[field_i]
                # param_values[param][row[hru_id_i]] = row[field_i]

    # Calculate number of columns
    with arcpy.da.SearchCursor(hru.polygon_path,
                               (hru.id_field, hru.col_field)) as s_cursor:
        ncol = len(list(set([int(row[1]) for row in s_cursor])))

    # # DEADBEEF - Per Rich this is not needed anymore
    # # The following will override the parameter CSV values
    # # Calculate basin_area from active cells (land and lake)
    # logging.info('\nCalculating basin area')
    # param_names['basin_area'] = 'basin_area'
    # param_dimen_counts['basin_area'] = 1
    # param_dimen_names['basin_area'] = ['one']
    # param_value_counts['basin_area'] = dimen_sizes['one']
    # param_types['basin_area'] = 2
    # value_fields = (hru.id_field, hru.type_field, hru.area_field)
    # with arcpy.da.SearchCursor(hru.polygon_path, value_fields) as s_cursor:
    #     param_values['basin_area'][0] = sum(
    #         [float(row[2]) for row in s_cursor if int(row[1]) >= 1])
    # logging.info('  basin_area = {} acres'.format(
    #     param_values['basin_area'][0]))

    # Convert DEM_ADJ units (if necessary)
    if elev_unit_scalar != 1.0:
        logging.info('\nScaling DEM_ADJ units')
        logging.info('  DEM Units:  {}'.format(dem_units))
        logging.info('  Elev Units: {}'.format(elev_units))
        logging.info('  Multiplier: {}'.format(elev_unit_scalar))
        param_values['hru_elev'] = {
            k: v * elev_unit_scalar
            for k, v in param_values['hru_elev'].items()
        }

    # Calculate mean monthly maximum temperature for all active cells
    logging.info('\nCalculating tmax_index')
    logging.info('  Converting Celsius to Farenheit')
    param_names['tmax_index'] = 'tmax_index'
    param_dimen_counts['tmax_index'] = 1
    param_dimen_names['tmax_index'] = ['nmonths']
    param_value_counts['tmax_index'] = int(dimen_sizes['nmonths'])
    param_types['tmax_index'] = 2
    tmax_field_list = ['TMAX_{:02d}'.format(m) for m in range(1, 13)]
    for i, tmax_field in enumerate(tmax_field_list):
        tmax_values = [
            row[1] for row in arcpy.da.SearchCursor(
                hru.polygon_path, (hru.type_field, tmax_field),
                where_clause='"{}" >= 1'.format(hru.type_field))
        ]
        tmax_c = sum(tmax_values) / len(tmax_values)
        tmax_f = 1.8 * tmax_c + 32
        param_values['tmax_index'][i] = tmax_f
        logging.info('  {} = {}'.format(tmax_field,
                                        param_values['tmax_index'][i]))
        del tmax_values

    logging.info('\nCalculating tmax_adj/tmin_adj')
    param_names['tmax_adj'] = 'tmax_adj'
    param_names['tmin_adj'] = 'tmin_adj'
    param_types['tmax_adj'] = 2
    param_types['tmin_adj'] = 2
    if temp_calc_method in ['ZONES']:
        param_dimen_counts['tmax_adj'] = 2
        param_dimen_counts['tmin_adj'] = 2
        param_dimen_names['tmax_adj'] = ['nhru', 'nmonths']
        param_dimen_names['tmin_adj'] = ['nhru', 'nmonths']
        param_value_counts['tmax_adj'] = 12 * fishnet_count
        param_value_counts['tmin_adj'] = 12 * fishnet_count

        # Read the Tmax/Tmin adjust values from the shapefile
        # This could probably be simplified to a single search cursor pass
        tmax_adj_values = []
        tmin_adj_values = []
        tmax_adj_field_list = [
            'TMX_ADJ_{:02d}'.format(m) for m in range(1, 13)
        ]
        tmin_adj_field_list = [
            'TMN_ADJ_{:02d}'.format(m) for m in range(1, 13)
        ]
        for i, tmax_adj_field in enumerate(tmax_adj_field_list):
            tmax_adj_values.extend([
                float(row[1]) for row in sorted(
                    arcpy.da.SearchCursor(hru.polygon_path, (hru.id_field,
                                                             tmax_adj_field)))
            ])
        for i, tmin_adj_field in enumerate(tmin_adj_field_list):
            tmin_adj_values.extend([
                float(row[1]) for row in sorted(
                    arcpy.da.SearchCursor(hru.polygon_path, (hru.id_field,
                                                             tmin_adj_field)))
            ])
        for i, value in enumerate(tmax_adj_values):
            param_values['tmax_adj'][i] = value
        for i, value in enumerate(tmin_adj_values):
            param_values['tmin_adj'][i] = value
        del tmax_adj_values, tmin_adj_values

        # # This needs to be tested/compared with values from the above approach
        # # Process the tmax/tmin values in one pass of the search cursor
        # fields = [hru.id_field] + tmax_adj_field_list + tmin_adj_field_list
        # with arcpy.da.SearchCursor(hru.polygon_path, fields) as search_c:
        #     for r_i, row in enumerate(sorted(search_c)):
        #         for f_i in range(12):
        #             param_values['tmax_adj'][r_i * f_i] = float(row[f_i + 1])
        #             param_values['tmin_adj'][r_i * f_i] = float(row[f_i + 13])
        #         # for f_i in range(len(tmax_adj_field_list):

        # Set/override hru_tsta using HRU_TSTA field
        param_names['hru_tsta'] = 'hru_tsta'
        param_dimen_counts['hru_tsta'] = 1
        param_dimen_names['hru_tsta'] = ['nhru']
        param_value_counts['hru_tsta'] = fishnet_count
        param_types['hru_tsta'] = 1
        fields = (hru.id_field, 'HRU_TSTA')
        with arcpy.da.SearchCursor(hru.polygon_path, fields) as search_c:
            for row_i, row in enumerate(sorted(search_c)):
                param_values['hru_tsta'][row_i] = int(row[1])

        # DEADBEEF - Do these parameters need to be set or overridden
        # ntemp, elev_units, basin_tsta, hru_tlaps, tsta_elev

    elif temp_calc_method in ['1STA', 'LAPSE']:
        # Set the tmax_adj/tmin_adj dimensions
        param_dimen_counts['tmax_adj'] = 1
        param_dimen_counts['tmin_adj'] = 1
        param_dimen_names['tmax_adj'] = ['nhru']
        param_dimen_names['tmin_adj'] = ['nhru']
        param_value_counts['tmax_adj'] = fishnet_count
        param_value_counts['tmin_adj'] = fishnet_count

        # Read the tmax_adj/tmin_adj parameter values from the shapefile
        fields = (hru.id_field, 'TMAX_ADJ', 'TMIN_ADJ')
        with arcpy.da.SearchCursor(hru.polygon_path, fields) as search_c:
            for row_i, row in enumerate(sorted(search_c)):
                param_values['tmax_adj'][row_i] = float(row[1])
                param_values['tmin_adj'][row_i] = float(row[2])

    logging.info('\nCalculating rain_adj/snow_adj')
    ratio_field_list = ['PPT_RT_{:02d}'.format(m) for m in range(1, 13)]
    param_names['rain_adj'] = 'rain_adj'
    param_dimen_counts['rain_adj'] = 2
    param_dimen_names['rain_adj'] = ['nhru', 'nmonths']
    param_value_counts['rain_adj'] = 12 * fishnet_count
    param_types['rain_adj'] = 2

    param_names['snow_adj'] = 'snow_adj'
    param_dimen_counts['snow_adj'] = 2
    param_dimen_names['snow_adj'] = ['nhru', 'nmonths']
    param_value_counts['snow_adj'] = 12 * fishnet_count
    param_types['snow_adj'] = 2

    ratio_values = []
    for i, ratio_field in enumerate(ratio_field_list):
        ratio_values.extend([
            float(row[1]) for row in sorted(
                arcpy.da.SearchCursor(hru.polygon_path, (hru.id_field,
                                                         ratio_field)))
        ])
    for i, value in enumerate(ratio_values):
        param_values['rain_adj'][i] = value
        param_values['snow_adj'][i] = value
    del ratio_values

    logging.info('\nCalculating subbasin_down')
    param_names['subbasin_down'] = 'subbasin_down'
    param_dimen_counts['subbasin_down'] = 1
    param_dimen_names['subbasin_down'] = ['nsub']
    param_value_counts['subbasin_down'] = dimen_sizes['nsub']
    param_types['subbasin_down'] = 1
    # Get list of subbasins and downstream cell for each stream/lake cell
    # Downstream is calculated from flow direction
    # logging.info('Cell out-flow dictionary')
    cell_dict = dict()
    fields = [
        hru.type_field, hru.krch_field, hru.lake_id_field, hru.subbasin_field,
        hru.flow_dir_field, hru.col_field, hru.row_field, hru.id_field
    ]
    for row in arcpy.da.SearchCursor(hru.polygon_path, fields):
        # Skip inactive cells
        if int(row[0]) == 0:
            continue
        # Skip non-lake and non-stream cells
        elif (int(row[1]) == 0 and int(row[2]) == 0):
            continue
        # Read in parameters
        cell = (int(row[5]), int(row[6]))
        # support.next_row_col(FLOW_DIR, CELL)
        # HRU_ID, SUBBASIN, NEXT_CELL
        cell_dict[cell] = [
            int(row[7]),
            int(row[3]),
            support.next_row_col(int(row[4]), cell)
        ]
        del cell

    # Get subset of cells if subbasin != next_subbasin
    subbasin_list = []
    # CELL, (HRU_ID, SUBBASIN, NEXT_CELL)
    # for cell, row in cell_dict.items():
    for cell, (hru_id, subbasin, next_cell) in cell_dict.items():
        # Skip cells that are already subbasin 0 (inactive?)
        # If next cell isn't in list, assume next cell is out of the model
        #   and set exit gauge subbasin to 0
        # If the subbasin of the current cell doesn't match the subbasin
        #   of the next cell, save the down subbasin
        if subbasin == 0:
            continue
        elif next_cell not in cell_dict.keys():
            if [subbasin, 0] not in subbasin_list:
                subbasin_list.append([subbasin, 0])
        elif subbasin != cell_dict[next_cell][1]:
            subbasin_list.append([subbasin, cell_dict[next_cell][1]])
    for i, (subbasin, subbasin_down) in enumerate(sorted(subbasin_list)):
        param_values['subbasin_down'][i] = subbasin_down
        logging.debug('  {}'.format(param_values['subbasin_down'][i]))
    del subbasin_list

    # Switch SWALE points back to hru_type 1 or 2
    logging.info('\nResetting SWALE point HRU_TYPE')
    fields = [hru.type_field, hru.id_field, hru.lake_id_field]
    for row in arcpy.da.SearchCursor(hru.polygon_path, fields):
        # Skip inactive cells
        if int(row[0]) != 3:
            continue
        elif int(row[2]) > 0:
            param_values['hru_type'][row[1]] = 2
        else:
            param_values['hru_type'][row[1]] = 1

    # # DEADBEEF - lake_hru is not used in PRMS 3.0.X or gsflow
    # #   It is used in PRMS 4.0 though
    # # lake_hru parameter
    # logging.info('\nCalculating LAKE_HRU from HRU_ID for all lake HRU\'s')
    # param_names['lake_hru'] = 'lake_hru'
    # param_dimen_counts['lake_hru'] = 1
    # param_dimen_names['lake_hru'] = ['nlake']
    # param_value_counts['lake_hru'] = dimen_sizes['nlake']
    # param_types['lake_hru'] = 1
    # lake_hru_id_list = [
    #    row[1] for row in arcpy.da.SearchCursor(
    #        hru.polygon_path, (hru.type_field, hru.id_field))
    #    if int(row[0]) == 2]
    # for i,lake_hru_id in enumerate(sorted(lake_hru_id_list)):
    #    # logging.debug('  {} {}'.format(i, lake_hru_id))
    #    param_values['lake_hru'][i] = lake_hru_id

    # Read in CRT parameters
    logging.info('\nReading CRT parameters')
    with open(crt_parameter_path, 'r') as input_f:
        crt_param_lines = [line.strip() for line in input_f.readlines()]
    input_f.close()
    # Using enumerate iterator to get .next method
    crt_param_enumerate = enumerate(crt_param_lines)
    for crt_param_line in crt_param_enumerate:
        if crt_param_line[1] == break_str:
            # Skip break string
            crt_param_line = crt_param_enumerate.next()
            # Read parameter name and get next line
            param_name = crt_param_line[1]
            param_names[param_name] = param_name
            crt_param_line = crt_param_enumerate.next()
            # Read dimension count and get next line
            param_dimen_counts[param_name] = int(crt_param_line[1])
            crt_param_line = crt_param_enumerate.next()
            # For each dimen (based on count) read in dimension name
            param_dimen_names[param_name] = []
            for dimen_i in range(param_dimen_counts[param_name]):
                param_dimen_names[param_name].append(crt_param_line[1])
                crt_param_line = crt_param_enumerate.next()
            # Read in number of parameter values
            param_value_counts[param_name] = int(crt_param_line[1])
            crt_param_line = crt_param_enumerate.next()
            # Read in parameter type
            param_types[param_name] = int(crt_param_line[1])
            # Read in parameter values
            # Get next in loop is place intentionally
            # Placing  after getting the value causes it to skip next break
            for i in range(param_value_counts[param_name]):
                crt_param_line = crt_param_enumerate.next()
                if param_types[param_name] == 1:
                    param_values[param_name][i] = int(crt_param_line[1])
                if param_types[param_name] in [2, 3]:
                    param_values[param_name][i] = float(crt_param_line[1])
                if param_types[param_name] == 4:
                    param_values[param_name][i] = crt_param_line[1]

    # Read in CRT groundwater parameters
    logging.info('Reading CRT groundwater parameters')
    with open(crt_gw_parameter_path, 'r') as input_f:
        crt_param_lines = [line.strip() for line in input_f.readlines()]
    input_f.close()
    # Using enumerate iterator to get .next method
    crt_param_enumerate = enumerate(crt_param_lines)
    for crt_param_line in crt_param_enumerate:
        if crt_param_line[1] == break_str:
            # Skip break string
            crt_param_line = crt_param_enumerate.next()
            # Read parameter name and get next line
            param_name = crt_param_line[1]
            param_names[param_name] = param_name
            crt_param_line = crt_param_enumerate.next()
            # Read dimension count and get next line
            param_dimen_counts[param_name] = int(crt_param_line[1])
            crt_param_line = crt_param_enumerate.next()
            # For each dimen (based on count) read in dimension name
            param_dimen_names[param_name] = []
            for dimen_i in range(param_dimen_counts[param_name]):
                param_dimen_names[param_name].append(crt_param_line[1])
                crt_param_line = crt_param_enumerate.next()
            # Read in number of parameter values
            param_value_counts[param_name] = int(crt_param_line[1])
            crt_param_line = crt_param_enumerate.next()
            # Read in parameter type
            param_types[param_name] = int(crt_param_line[1])
            # Read in parameter values
            # Get next in loop is place intentionally
            # Placing  after getting the value causes it to skip next break
            for i in range(param_value_counts[param_name]):
                crt_param_line = crt_param_enumerate.next()
                if param_types[param_name] == 1:
                    param_values[param_name][i] = int(crt_param_line[1])
                if param_types[param_name] in [2, 3]:
                    param_values[param_name][i] = float(crt_param_line[1])
                if param_types[param_name] == 4:
                    param_values[param_name][i] = crt_param_line[1]
    del crt_param_enumerate, crt_param_lines

    # # Add lake HRU's to groundwater cascades
    # logging.info('Modifying CRT groundwater parameters for all lake HRU\'s')
    # logging.info('  gw_up_id = HRU_ID (lake)')
    # logging.info('  gw_down_id = 0')
    # # logging.info('  gw_strmseg_down_id = OUTSEG')
    # logging.info('  gw_strmseg_down_id = 2')
    # logging.info('  gw_pct_up = 1')
    # field_list = [hru.type_field, hru.id_field, hru.outseg_field,
    #              hru.outflow_field]
    # lake_hru_id_dict = dict([
    #    (row[1], row[2])
    #    for row in arcpy.da.SearchCursor(hru.polygon_path, field_list)
    #    if int(row[0]) == 2 and int(row[3]) == 0])
    # for lake_hru_id, outseg in sorted(lake_hru_id_dict.items()):
    #    # if lake_hru_id == 9128:
    #        # print lake_hru_id, outseg
    #    # raw_input('ENTER')
    #    i = dimen_sizes['ncascdgw']
    #    dimen_sizes['ncascdgw'] += 1
    #    param_values['gw_up_id'][i] = lake_hru_id
    #    param_values['gw_down_id'][i] = 0
    #    # DEADBEEF - PRMS didn't like when set to OUTSEG, but 2 worked?
    #    # param_values['gw_strmseg_down_id'][i] = outseg
    #    param_values['gw_strmseg_down_id'][i] = 2
    #    # DEADBEEF - Trying 0
    #    # param_values['gw_strmseg_down_id'][i] = 0
    #    param_values['gw_pct_up'][i] = 1.00
    #    # print param_values['gw_up_id'][i]
    #    # print param_values['gw_down_id'][i]
    #    # print param_values['gw_strmseg_down_id'][i]
    #    # print param_values['gw_pct_up'][i]
    # param_value_counts['gw_up_id'] = int(dimen_sizes['ncascdgw'])
    # param_value_counts['gw_down_id'] = int(dimen_sizes['ncascdgw'])
    # param_value_counts['gw_strmseg_down_id'] = int(dimen_sizes['ncascdgw'])
    # param_value_counts['gw_pct_up'] = int(dimen_sizes['ncascdgw'])
    # logging.info('  ncascade = {}'.format(dimen_sizes['ncascade']))
    # logging.info('  ncascdgw = {}'.format(dimen_sizes['ncascdgw']))
    # # raw_input('ENTER')

    # DEADBEEF
    # Override -999 values
    # logging.info('\nChanging SOIL_MOIST_MAX nodata (-999) to 2')
    # for i,v in param_values['soil_moist_max'].items():
    #    if v == -999: param_values['soil_moist_max'][i] = 2
    # logging.info('Changing SOIL_RECHR_MAX nodata (-999) to 1')
    # for i,v in param_values['soil_rechr_max'].items():
    #    if v == -999: param_values['soil_rechr_max'][i] = 1
    # logging.info('Changing SAT_THRESHOLD nodata (-999) to 4')
    # for i,v in param_values['sat_threshold'].items():
    #    if v == -999: param_values['sat_threshold'][i] = 4

    # Override negative values
    # logging.info('Changing negative SSR2GW_RATE (< 0) to 0.1 (PRMS default)')
    # for i,v in param_values['ssr2gw_rate'].items():
    #    if v < 0: param_values['ssr2gw_rate'][i] = 0.1
    # raw_input('ENTER')

    # Write dimensions/parameters to PRMS param file
    logging.info('\nWriting parameter file(s)')
    prms_parameter_paths = sorted(
        list(set(param_files.values() + dimen_files.values())))

    for prms_parameter_path in prms_parameter_paths:
        logging.info('{}'.format(prms_parameter_path))
        if os.path.isfile(prms_parameter_path):
            logging.debug('  Removing existing file')
            os.remove(prms_parameter_path)
        # Get parameters and dimensions for each file
        param_name_list = sorted([
            p_name for p_name, p_file in param_files.items()
            if p_file == prms_parameter_path
        ])
        dimen_name_list = sorted([
            d_name for d_name, d_file in dimen_files.items()
            if d_file == prms_parameter_path
        ])

        with open(prms_parameter_path, 'w') as output_f:
            output_f.write(file_header_str + '\n')

            # Write dimensions
            if dimen_name_list:
                output_f.write(dimen_header_str + '\n')
                logging.debug('  Set dimensions')
            for dimen_name in dimen_name_list:
                try:
                    dimen_size = dimen_sizes[dimen_name]
                except KeyError:
                    continue
                if (type(dimen_size) is str
                        and dimen_size.lower() in ['calculated']):
                    logging.debug(
                        '    Dimension {} not calculated'.format(dimen_size))
                    continue
                logging.debug('    {}'.format(dimen_name))
                output_f.write(break_str + '\n')
                output_f.write(dimen_name + '\n')
                output_f.write(str(dimen_size) + '\n')

            # Then write set parameters
            if param_name_list:
                output_f.write(param_header_str + '\n')
                logging.debug('  Set parameters')
            for param_name in param_name_list:
                if param_name not in param_values.keys():
                    # logging.debug(param_name)
                    continue
                logging.debug('    {}'.format(param_name))

                output_f.write(break_str + '\n')
                output_f.write('{}\n'.format(param_name))
                output_f.write('{}\n'.format(param_dimen_counts[param_name]))
                for dimen_name in param_dimen_names[param_name]:
                    output_f.write(dimen_name + '\n')
                output_f.write(str(param_value_counts[param_name]) + '\n')
                param_type = param_types[param_name]
                output_f.write(str(param_type) + '\n')

                # Get list of values sorted by parameter name
                sorted_param_values = [
                    v for i, v in sorted(param_values[param_name].items())
                ]

                # If dimension is "nhru", write values as an array.
                # Write blocks of values for each row
                if ('nhru' in param_dimen_names[param_name]
                        and not param_column_flag):
                    n = ncol
                else:
                    n = 1

                for i in range(0, len(sorted_param_values), n):
                    values_str = ' '.join([
                        param_formats[param_type].format(v)
                        for v in sorted_param_values[i:i + n]
                    ])
                    output_f.write(values_str + '\n')

        # Close file
        output_f.close()