Example #1
0
def main():
    from Preprocessing.utilities import nhd_states

    # Set parameters
    overwrite = True

    # Set initial paths
    nhd_path = os.path.join("..", "bin", "Preprocessed", "CondensedNHD")
    output_path = os.path.join(r"..\bin\Preprocessed\Navigators",
                               "region_{}.npz")

    # Loop through regions
    for region in nhd_states.keys():

        print(region)

        # Designate paths
        nhd_table = HydroTable(region, nhd_path)

        nodes, times, conversion_dict = get_nodes(nhd_table)
        attributes = np.array([times])

        outlets = identify_outlets(nhd_table, conversion_dict)

        paths, attribute_matrix = upstream_trace(nodes, outlets, attributes)

        path_map = map_paths(paths)

        paths, attribute_matrix, start_cols = collapse_array(
            paths, attribute_matrix)

        conversion_array = dict_to_array(conversion_dict)

        write_outfile(output_path.format(region), paths, path_map,
                      conversion_array, attribute_matrix[0])
def main():
    from Preprocessing.utilities import nhd_states

    # Set initial paths
    nhd_path = os.path.join("..", "bin", "Preprocessed", "CondensedNHD")
    nav_path = os.path.join("..", "bin", "Preprocessed", "Navigators")
    volume_path = os.path.join(r"T:\NationalData\LakeMorphometry",
                               "region_{}.dbf")
    output_path = os.path.join(r"..\bin\Preprocessed\LakeFiles",
                               "region_{}.npz")

    # Loop through regions
    for region in nhd_states.keys():
        # Read tables and set output path
        nhd_table = HydroTable(region, nhd_path)
        nav = Navigator(region, nav_path)
        volume_table_path = volume_path.format(region)
        outfile_path = output_path.format(region)

        # Get a table of all lentic reaches, with the COMID of the reach and waterbody
        reservoir_table = nhd_table[["comid", "wb_comid", "hydroseq",
                                     "qma"]].rename(columns={'qma': 'flow'})

        # Get the outlets for each reservoir
        reservoir_table = identify_outlets(reservoir_table)

        # Get residence times
        reservoir_table = get_residence_times(reservoir_table,
                                              volume_table_path)

        # Count number of reservoirs upstream of each reservoir
        reservoir_table = make_lake_bins(nav, reservoir_table)

        # Save table
        save_table(reservoir_table, outfile_path)
Example #3
0
def main():
    condensed_nhd_path = os.path.join("..", "bin", "Preprocessed", "CondensedNHD")
    output_path = os.path.join("..", "bin", "Preprocessed", "FlowFiles")

    for region in nhd_states.keys():
        nhd_table = HydroTable(region, condensed_nhd_path)
        out_table = os.path.join(output_path, "region_{}.npz".format(region))
        extract_flow_data(nhd_table, out_table)
Example #4
0
def get_nhd(nhd_dir=r"T:\NationalData\NHDPlusV2"):
    all_paths = defaultdict()
    regions = list(nhd_states.keys())
    region_dirs = {"NHDPlus{}".format(region) for region in regions}
    for root_dir, sub_dirs, _ in os.walk(nhd_dir):
        if set(sub_dirs) & region_dirs:
            for sub_dir in sub_dirs:
                region = sub_dir.lstrip("NHDPlus")
                if region in regions:
                    all_paths[sub_dir.lstrip("NHDPlus")] = os.path.join(
                        root_dir, sub_dir)
    return OrderedDict(sorted(all_paths.items()))
Example #5
0
def main():
    nhd_dir = r"..\bin\Preprocessed\CondensedNHD"
    scenario_dir = r"..\bin\Preprocessed\ScenarioMatrices"
    scenario_format = r"([A-Z]{2})_scenarios_agg_(\d{6}).txt"
    recipe_path = r"..\bin\Preprocessed\Recipes"
    recipe_format = "recipes_(\d{8})cdl_(\d{4}).txt"
    output_dir = os.path.join(r"..\bin\Preprocessed\RecipeMaps")
    region_filter = ('07', )  # 'all'

    # Get an index of all scenario names for each state and date
    scenario_files = get_scenario_index(scenario_dir, scenario_format)

    # Identify all recipe files and index by HUC-2 and year
    recipe_files = assemble_files(recipe_path, recipe_format,
                                  nhd_states.keys())

    for region, states in nhd_states.items():

        regional_recipes = recipe_files.get(region)

        out_file = os.path.join(output_dir, "region_" + region)
        if os.path.exists(out_file):
            os.remove(out_file)

        if regional_recipes is not None:

            if region in region_filter or region_filter == 'all':
                # Get a list of all recipes and scenarios in the region for aliasing and structuring output
                # scenario_index = compile_scenario_index(states, scenario_files)
                #scenario_index.to_csv(r"..\bin\temp_index.txt")
                print("Reading scenario index...")
                scenario_index = pd.Series.from_csv(r"..\bin\temp_index.txt")

                # Get all the reach IDs in the region
                #inventory, n_lines = inventory_files(regional_recipes)
                #inventory.to_csv(r"..\bin\temp_inventory.txt")
                #print(n_lines)
                print("Reading inventory file...")
                inventory = pd.read_csv(r"..\bin\temp_inventory.txt")
                n_lines = 96637799

                # Unpack recipe files into matrix
                print("Generating recipe map...")
                generate_recipe_map(scenario_index, inventory, n_lines,
                                    out_file)