def advanced_shallow_lake_filtering_driver( input_unfilled_orography_file, input_unfilled_orography_fieldname, input_filled_orography_file, input_filled_orography_fieldname, output_unfilled_orography_file, output_unfilled_orography_fieldname, minimum_depth_threshold): input_unfilled_orography = \ iodriver.advanced_field_loader(input_unfilled_orography_file, field_type='Orography', fieldname=input_unfilled_orography_fieldname) input_filled_orography = \ iodriver.advanced_field_loader(input_filled_orography_file, field_type='Orography', fieldname=input_filled_orography_fieldname) output_unfilled_orography = \ field.Field(np.ascontiguousarray(input_unfilled_orography.get_data(), dtype=np.float64), grid=input_unfilled_orography.get_grid()) lake_operators_wrapper.filter_out_shallow_lakes(output_unfilled_orography.get_data(), np.ascontiguousarray(input_filled_orography.\ get_data(), dtype=np.float64), minimum_depth_threshold) iodriver.advanced_field_writer( output_unfilled_orography_file, output_unfilled_orography, fieldname=output_unfilled_orography_fieldname)
def main(rdirs_filename, output_filename, grid_type, **grid_kwargs): """Top level function for cumulative flow to cell flow map generation Inputs: rdir_filename: string; full path to the file contain the input river direction field output_filename: string; full path of the target file to write the generated cumulative flow to cell field to grid_type: string; a keyword specifying the grid type of the input and output fields **grid_kwargs (optional): keyword dictionary; any parameters of the input and output grid that are required Returns: Nothing """ rdirs = iodriver.load_field(rdirs_filename, iodriver.get_file_extension(rdirs_filename), "Generic", grid_type=grid_type, **grid_kwargs) nlat, nlong = rdirs.get_grid().get_grid_dimensions() paths_map = field.Field(create_hypothetical_river_paths_map( riv_dirs=rdirs.get_data(), lsmask=None, use_f2py_func=True, use_f2py_sparse_iterator=True, nlat=nlat, nlong=nlong), grid=grid_type, **grid_kwargs) iodriver.write_field(output_filename, paths_map, iodriver.get_file_extension(output_filename))
def advanced_water_redistribution_driver( input_lake_numbers_file, input_lake_numbers_fieldname, input_lake_centers_file, input_lake_centers_fieldname, input_water_to_redistribute_file, input_water_to_redistribute_fieldname, output_water_redistributed_to_lakes_file, output_water_redistributed_to_lakes_fieldname, output_water_redistributed_to_rivers_file, output_water_redistributed_to_rivers_fieldname, coarse_grid_type, **coarse_grid_kwargs): lake_numbers = iodriver.advanced_field_loader( input_lake_numbers_file, field_type='Generic', fieldname=input_lake_numbers_fieldname) lake_centers = iodriver.advanced_field_loader( input_lake_centers_file, field_type='Generic', fieldname=input_lake_centers_fieldname) water_to_redistribute = \ iodriver.advanced_field_loader(input_water_to_redistribute_file, field_type='Generic', fieldname=input_water_to_redistribute_fieldname) fine_grid = lake_numbers.get_grid() fine_shape = lake_numbers.get_data().shape coarse_grid = grid.makeGrid(coarse_grid_type, **coarse_grid_kwargs) water_redistributed_to_lakes = field.Field( np.zeros(fine_shape, dtype=np.float64, order='C'), fine_grid) water_redistributed_to_rivers = field.Field( coarse_grid.create_empty_field(np.float64), coarse_grid) lake_operators_wrapper.redistribute_water( np.ascontiguousarray(lake_numbers.get_data(), dtype=np.int32), np.ascontiguousarray(lake_centers.get_data(), dtype=np.int32), np.ascontiguousarray(water_to_redistribute.get_data(), dtype=np.float64), water_redistributed_to_lakes.get_data(), water_redistributed_to_rivers.get_data()) iodriver.advanced_field_writer( output_water_redistributed_to_lakes_file, water_redistributed_to_lakes, fieldname=output_water_redistributed_to_lakes_fieldname) iodriver.advanced_field_writer( output_water_redistributed_to_rivers_file, water_redistributed_to_rivers, fieldname=output_water_redistributed_to_rivers_fieldname)
def advanced_main(rdirs_filename, output_filename, rdirs_fieldname, output_fieldname): rdirs = iodriver.advanced_field_loader(rdirs_filename, field_type="Generic", fieldname=rdirs_fieldname) nlat, nlong = rdirs.get_grid().get_grid_dimensions() paths_map = field.Field(create_hypothetical_river_paths_map( riv_dirs=rdirs.get_data(), lsmask=None, use_f2py_func=True, use_f2py_sparse_iterator=True, nlat=nlat, nlong=nlong), grid=rdirs.get_grid()) iodriver.advanced_field_writer(target_filename=output_filename, field=paths_map, fieldname=output_fieldname)
def main(filename, output_filename, loop_logfile, use_cpp_code=True, grid_type='HD', **grid_kwargs): """Generates a file with numbered catchments from a given river flow direction file Inputs: filename: string; the input file of river directions output_filename: string; the target file for the output numbered catchments loop_logfile: string; an input file of catchments with loop to be updated use_cpp_alg: bool; use the Cpp code if True otherwise use the Fortran code grid_type: string; a keyword giving the type of grid being used **grid_kwargs(optional): keyword dictionary; the parameter of the grid to be used (if required) Returns: Nothing Produces the numbered catchments where the numbering is in descending order of size; also update the loop log file to reflect the relabelling of catchements and runs a check on the type of catchments generated (which are placed in a log file with the same basename as the output catchments but with the extension '.log'). """ rdirs = iodriver.load_field( filename, file_type=iodriver.get_file_extension(filename), field_type='Generic', grid_type=grid_type, **grid_kwargs) if use_cpp_code: catchments = compute_catchments_cpp(rdirs.get_data(), loop_logfile) else: catchment_types, catchments = compute_catchments( rdirs.get_data(), loop_logfile) check_catchment_types(catchment_types, logfile=path.splitext(output_filename)[0] + ".log") numbered_catchments = field.Field(renumber_catchments_by_size( catchments, loop_logfile), grid=grid_type, **grid_kwargs) iodriver.write_field( filename=output_filename, field=numbered_catchments, file_type=iodriver.get_file_extension(output_filename))
def reduce_connected_areas_to_points(input_minima_file, input_minima_fieldname, output_minima_file, output_minima_fieldname, use_diagonals=True): input_minima = iodriver.advanced_field_loader( input_minima_file, field_type='Generic', fieldname=input_minima_fieldname) minima_array = np.ascontiguousarray(input_minima.get_data(), dtype=np.int32) lake_operators_wrapper.reduce_connected_areas_to_points( minima_array, use_diagonals) output_minima = field.Field(minima_array, grid=input_minima.get_grid()) iodriver.advanced_field_writer(output_minima_file, output_minima, fieldname=output_minima_fieldname)
def extract_lake_volumes(flood_volume_thresholds, basin_catchment_numbers,merge_types): lake_mask = basin_catchment_numbers.greater_than_value(0) lake_volumes = field.makeEmptyField("Generic",np.float64, grid_type=lake_mask.get_grid()) lake_mask_reduced = lake_mask.copy() lake_mask_reduced.change_dtype(np.int32) lake_operators_wrapper.reduce_connected_areas_to_points(lake_mask_reduced.get_data(),True) lake_mask_inv = lake_mask.copy() lake_mask_inv.invert_data() orography = field.makeEmptyField("Generic",np.float64, grid_type=lake_mask.get_grid()) orography.get_data()[lake_mask_inv.get_data()] = 3.0 orography.get_data()[lake_mask.get_data()] = 1.0 orography.get_data()[lake_mask_reduced.get_data().astype(bool)] = 0.0 lake_mask_inv.change_dtype(np.int32) rdirs = determine_river_directions.determine_river_directions(orography, lake_mask_reduced, truesinks=None, always_flow_to_sea=True, use_diagonal_nbrs=True, mark_pits_as_true_sinks=False) rdirs.get_data()[lake_mask_inv.get_data()] = -1 simplified_basin_catchment_numbers = None temporary_file = tempfile.NamedTemporaryFile(delete = False) try: temporary_file.close() dummy_loop_log_filepath = temporary_file.name simplified_basin_catchment_numbers = \ field.Field(cc.compute_catchments_cpp(rdirs.get_data(),loop_logfile=dummy_loop_log_filepath), grid=lake_mask.get_grid()) finally: os.remove(temporary_file.name) for i in range(1,simplified_basin_catchment_numbers.find_maximum()+1): if i in simplified_basin_catchment_numbers.get_data(): single_lake_mask = lake_mask.copy() single_lake_mask.\ get_data()[simplified_basin_catchment_numbers.get_data() != i] = False single_lake_flood_volume_thresholds = flood_volume_thresholds.copy() single_lake_flood_volume_thresholds.\ get_data()[np.logical_not(single_lake_mask.get_data())] = 0.0 single_lake_flood_volume_thresholds.\ get_data()[np.logical_and(merge_types.get_data() != 10, merge_types.get_data() != 11)] = 0.0 lake_volumes.get_data()[single_lake_mask.get_data()] = np.sum(single_lake_flood_volume_thresholds.get_data()) return lake_volumes
def advanced_burn_carved_rivers_driver( input_orography_file, input_orography_fieldname, input_rdirs_file, input_rdirs_fieldname, input_minima_file, input_minima_fieldname, input_lakemask_file, input_lakemask_fieldname, output_orography_file, output_orography_fieldname, add_slope=False, max_exploration_range=0, minimum_height_change_threshold=0.0, short_path_threshold=0, short_minimum_height_change_threshold=0.0): input_orography = iodriver.advanced_field_loader( input_orography_file, field_type='Orography', fieldname=input_orography_fieldname) input_rdirs = iodriver.advanced_field_loader( input_rdirs_file, field_type='Orography', fieldname=input_rdirs_fieldname) input_minima = iodriver.advanced_field_loader( input_minima_file, field_type='Orography', fieldname=input_minima_fieldname) input_lakemask = iodriver.advanced_field_loader( input_lakemask_file, field_type='Generic', fieldname=input_lakemask_fieldname) output_orography = field.Field(np.ascontiguousarray( input_orography.get_data(), dtype=np.float64), grid=input_orography.get_grid()) lake_operators_wrapper.burn_carved_rivers( output_orography.get_data(), np.ascontiguousarray(input_rdirs.get_data(), dtype=np.float64), np.ascontiguousarray(input_minima.get_data(), dtype=np.int32), np.ascontiguousarray(input_lakemask.get_data(), dtype=np.int32), add_slope, max_exploration_range, minimum_height_change_threshold, short_path_threshold, short_minimum_height_change_threshold) iodriver.advanced_field_writer(output_orography_file, output_orography, fieldname=output_orography_fieldname)
def advanced_main(filename, fieldname, output_filename, output_fieldname, loop_logfile, use_cpp_alg=True): rdirs = iodriver.advanced_field_loader(filename, field_type='Generic', fieldname=fieldname) nlat, nlon = rdirs.get_grid_dimensions() if use_cpp_alg: catchments = compute_catchments_cpp(rdirs.get_data(), loop_logfile) else: catchment_types, catchments = compute_catchments( rdirs.get_data(), loop_logfile) check_catchment_types(catchment_types, logfile=path.splitext(output_filename)[0] + ".log") numbered_catchments = field.Field(renumber_catchments_by_size( catchments, loop_logfile), grid=rdirs.get_grid()) iodriver.advanced_field_writer(target_filename=output_filename, field=numbered_catchments, fieldname=output_fieldname)
def advanced_basin_evaluation_driver( input_minima_file, input_minima_fieldname, input_raw_orography_file, input_raw_orography_fieldname, input_corrected_orography_file, input_corrected_orography_fieldname, input_cell_areas_file, input_cell_areas_fieldname, input_prior_fine_rdirs_file, input_prior_fine_rdirs_fieldname, input_prior_fine_catchments_file, input_prior_fine_catchments_fieldname, input_coarse_catchment_nums_file, input_coarse_catchment_nums_fieldname, input_coarse_rdirs_file, input_coarse_rdirs_fieldname, combined_output_filename, output_filepath, output_filelabel, output_basin_catchment_nums_filepath=None): input_minima = iodriver.advanced_field_loader( input_minima_file, field_type='Generic', fieldname=input_minima_fieldname) input_raw_orography = iodriver.advanced_field_loader( input_raw_orography_file, field_type='Orography', fieldname=input_raw_orography_fieldname) input_corrected_orography = iodriver.advanced_field_loader( input_corrected_orography_file, field_type='Orography', fieldname=input_corrected_orography_fieldname) input_cell_areas = iodriver.advanced_field_loader( input_cell_areas_file, field_type='Generic', fieldname=input_cell_areas_fieldname) input_prior_fine_rdirs = iodriver.advanced_field_loader( input_prior_fine_rdirs_file, field_type='RiverDirections', fieldname=input_prior_fine_rdirs_fieldname) input_prior_fine_catchments = iodriver.advanced_field_loader( input_prior_fine_catchments_file, field_type='Generic', fieldname=input_prior_fine_catchments_fieldname) input_coarse_catchment_nums = iodriver.advanced_field_loader( input_coarse_catchment_nums_file, field_type='Generic', fieldname=input_coarse_catchment_nums_fieldname) input_coarse_rdirs = iodriver.advanced_field_loader( input_coarse_rdirs_file, field_type='Generic', fieldname=input_coarse_rdirs_fieldname) fine_grid = input_raw_orography.get_grid() fine_shape = input_raw_orography.get_data().shape connection_volume_thresholds = field.Field( np.zeros(fine_shape, dtype=np.float64, order='C'), fine_grid) flood_volume_thresholds = field.Field( np.zeros(fine_shape, dtype=np.float64, order='C'), fine_grid) flood_next_cell_lat_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) flood_next_cell_lon_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) connect_next_cell_lat_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) connect_next_cell_lon_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) flood_force_merge_lat_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) flood_force_merge_lon_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) connect_force_merge_lat_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) connect_force_merge_lon_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) flood_redirect_lat_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) flood_redirect_lon_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) connect_redirect_lat_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) connect_redirect_lon_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) additional_flood_redirect_lat_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) additional_flood_redirect_lon_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) additional_connect_redirect_lat_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) additional_connect_redirect_lon_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) flood_local_redirect = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) connect_local_redirect = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) additional_flood_local_redirect = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) additional_connect_local_redirect = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) merge_points = field.Field(np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) if output_basin_catchment_nums_filepath is not None: basin_catchment_numbers = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) else: basin_catchment_numbers = None evaluate_basins_wrapper.evaluate_basins( minima_in_int=np.ascontiguousarray(input_minima.get_data(), dtype=np.int32), raw_orography_in=np.ascontiguousarray(input_raw_orography.get_data(), dtype=np.float64), corrected_orography_in=np.ascontiguousarray( input_corrected_orography.get_data(), dtype=np.float64), cell_areas_in=np.ascontiguousarray(input_cell_areas.get_data(), dtype=np.float64), connection_volume_thresholds_in=connection_volume_thresholds.get_data( ), flood_volume_thresholds_in=flood_volume_thresholds.get_data(), prior_fine_rdirs_in=np.ascontiguousarray( input_prior_fine_rdirs.get_data(), dtype=np.float64), prior_coarse_rdirs_in=np.ascontiguousarray( input_coarse_rdirs.get_data(), dtype=np.float64), prior_fine_catchments_in=np.ascontiguousarray( input_prior_fine_catchments.get_data(), dtype=np.int32), coarse_catchment_nums_in=np.ascontiguousarray( input_coarse_catchment_nums.get_data(), dtype=np.int32), flood_next_cell_lat_index_in=flood_next_cell_lat_index.get_data(), flood_next_cell_lon_index_in=flood_next_cell_lon_index.get_data(), connect_next_cell_lat_index_in=connect_next_cell_lat_index.get_data(), connect_next_cell_lon_index_in=connect_next_cell_lon_index.get_data(), flood_force_merge_lat_index_in=flood_force_merge_lat_index.get_data(), flood_force_merge_lon_index_in=flood_force_merge_lon_index.get_data(), connect_force_merge_lat_index_in=connect_force_merge_lat_index. get_data(), connect_force_merge_lon_index_in=connect_force_merge_lon_index. get_data(), flood_redirect_lat_index_in=flood_redirect_lat_index.get_data(), flood_redirect_lon_index_in=flood_redirect_lon_index.get_data(), connect_redirect_lat_index_in=connect_redirect_lat_index.get_data(), connect_redirect_lon_index_in=connect_redirect_lon_index.get_data(), additional_flood_redirect_lat_index_in= additional_flood_redirect_lat_index.get_data(), additional_flood_redirect_lon_index_in= additional_flood_redirect_lon_index.get_data(), additional_connect_redirect_lat_index_in= additional_connect_redirect_lat_index.get_data(), additional_connect_redirect_lon_index_in= additional_connect_redirect_lon_index.get_data(), flood_local_redirect_out_int=flood_local_redirect.get_data(), connect_local_redirect_out_int=connect_local_redirect.get_data(), additional_flood_local_redirect_out_int=additional_flood_local_redirect .get_data(), additional_connect_local_redirect_out_int= additional_connect_local_redirect.get_data(), merge_points_out_int=merge_points.get_data(), basin_catchment_numbers_in=basin_catchment_numbers.get_data()) connection_volume_thresholds_filename = path.join( output_filepath, "connect_vts_" + output_filelabel + ".nc") iodriver.advanced_field_writer(connection_volume_thresholds_filename, connection_volume_thresholds, fieldname='connection_volume_thresholds') flood_volume_thresholds_filename = path.join( output_filepath, "flood_vts_" + output_filelabel + ".nc") iodriver.advanced_field_writer(flood_volume_thresholds_filename, flood_volume_thresholds, fieldname='flood_volume_thresholds') flood_next_cell_lat_index_filename = path.join( output_filepath, "flood_nci_lat_" + output_filelabel + ".nc") iodriver.advanced_field_writer(flood_next_cell_lat_index_filename, flood_next_cell_lat_index, fieldname='flood_next_cell_lat_index') flood_next_cell_lon_index_filename = path.join( output_filepath, "flood_nci_lon_" + output_filelabel + ".nc") iodriver.advanced_field_writer(flood_next_cell_lon_index_filename, flood_next_cell_lon_index, fieldname='flood_next_cell_lon_index') connect_next_cell_lat_index_filename = path.join( output_filepath, "connect_nci_lat_" + output_filelabel + ".nc") iodriver.advanced_field_writer(connect_next_cell_lat_index_filename, connect_next_cell_lat_index, fieldname='connect_next_cell_lat_index') connect_next_cell_lon_index_filename = path.join( output_filepath, "connect_nci_lon_" + output_filelabel + ".nc") iodriver.advanced_field_writer(connect_next_cell_lon_index_filename, connect_next_cell_lon_index, fieldname='connect_next_cell_lon_index') flood_force_merge_lat_index_filename = path.join( output_filepath, "flood_fmi_lat_" + output_filelabel + ".nc") iodriver.advanced_field_writer(flood_force_merge_lat_index_filename, flood_force_merge_lat_index, fieldname='flood_force_merge_lat_index') flood_force_merge_lon_index_filename = path.join( output_filepath, "flood_fmi_lon_" + output_filelabel + ".nc") iodriver.advanced_field_writer(flood_force_merge_lon_index_filename, flood_force_merge_lon_index, fieldname='flood_force_merge_lon_index') connect_force_merge_lat_index_filename = path.join( output_filepath, "connect_fmi_lat_" + output_filelabel + ".nc") iodriver.advanced_field_writer(connect_force_merge_lat_index_filename, connect_force_merge_lat_index, fieldname='connect_force_merge_lat_index') connect_force_merge_lon_index_filename = path.join( output_filepath, "connect_fmi_lon_" + output_filelabel + ".nc") iodriver.advanced_field_writer(connect_force_merge_lon_index_filename, connect_force_merge_lon_index, fieldname='connect_force_merge_lon_index') flood_redirect_lat_index_filename = path.join( output_filepath, "flood_ri_lat_" + output_filelabel + ".nc") iodriver.advanced_field_writer(flood_redirect_lat_index_filename, flood_redirect_lat_index, fieldname='flood_redirect_lat_index') flood_redirect_lon_index_filename = path.join( output_filepath, "flood_ri_lon_" + output_filelabel + ".nc") iodriver.advanced_field_writer(flood_redirect_lon_index_filename, flood_redirect_lon_index, fieldname='flood_redirect_lon_index') connect_redirect_lat_index_filename = path.join( output_filepath, "connect_ri_lat_" + output_filelabel + ".nc") iodriver.advanced_field_writer(connect_redirect_lat_index_filename, connect_redirect_lat_index, fieldname='connect_redirect_lat_index') connect_redirect_lon_index_filename = path.join( output_filepath, "connect_ri_lon_" + output_filelabel + ".nc") iodriver.advanced_field_writer(connect_redirect_lon_index_filename, connect_redirect_lon_index, fieldname='connect_redirect_lon_index') additional_flood_redirect_lat_index_filename = path.join( output_filepath, "additional_flood_ri_lat_" + output_filelabel + ".nc") iodriver.advanced_field_writer( additional_flood_redirect_lat_index_filename, additional_flood_redirect_lat_index, fieldname='additional_flood_redirect_lat_index') additional_flood_redirect_lon_index_filename = path.join( output_filepath, "additional_flood_ri_lon_" + output_filelabel + ".nc") iodriver.advanced_field_writer( additional_flood_redirect_lon_index_filename, additional_flood_redirect_lon_index, fieldname='additional_flood_redirect_lon_index') additional_connect_redirect_lat_index_filename = path.join( output_filepath, "additional_connect_ri_lat_" + output_filelabel + ".nc") iodriver.advanced_field_writer( additional_connect_redirect_lat_index_filename, additional_connect_redirect_lat_index, fieldname='additional_connect_redirect_lat_index') additional_connect_redirect_lon_index_filename = path.join( output_filepath, "additional_connect_ri_lon_" + output_filelabel + ".nc") iodriver.advanced_field_writer( additional_connect_redirect_lon_index_filename, additional_connect_redirect_lon_index, fieldname='additional_connect_redirect_lon_index') flood_local_redirect_filename = path.join( output_filepath, "flood_local_r_" + output_filelabel + ".nc") iodriver.advanced_field_writer(flood_local_redirect_filename, flood_local_redirect, fieldname='flood_local_redirect') connect_local_redirect_filename = path.join( output_filepath, "connect_local_r_" + output_filelabel + ".nc") iodriver.advanced_field_writer(connect_local_redirect_filename, connect_local_redirect, fieldname='connect_local_redirect') additional_flood_local_redirect_filename = path.join( output_filepath, "additional_flood_local_r_" + output_filelabel + ".nc") iodriver.advanced_field_writer(additional_flood_local_redirect_filename, additional_flood_local_redirect, fieldname='additional_flood_local_redirect') additional_connect_local_redirect_filename = path.join( output_filepath, "additional_connect_local_r_" + output_filelabel + ".nc") iodriver.advanced_field_writer( additional_connect_local_redirect_filename, additional_connect_local_redirect, fieldname='additional_connect_local_redirect') merge_points_filename = path.join( output_filepath, "merge_points_" + output_filelabel + ".nc") iodriver.advanced_field_writer(merge_points_filename, merge_points, fieldname='merge_points') lake_centers_filename = path.join( output_filepath, "lake_centers_" + output_filelabel + ".nc") iodriver.advanced_field_writer(lake_centers_filename, input_minima, fieldname="lake_centers") individual_field_filenames = [ connection_volume_thresholds_filename, flood_volume_thresholds_filename, flood_next_cell_lat_index_filename, flood_next_cell_lon_index_filename, connect_next_cell_lat_index_filename, connect_next_cell_lon_index_filename, flood_force_merge_lat_index_filename, flood_force_merge_lon_index_filename, connect_force_merge_lat_index_filename, connect_force_merge_lon_index_filename, flood_redirect_lat_index_filename, flood_redirect_lon_index_filename, connect_redirect_lat_index_filename, connect_redirect_lon_index_filename, additional_flood_redirect_lat_index_filename, additional_flood_redirect_lon_index_filename, additional_connect_redirect_lat_index_filename, additional_connect_redirect_lon_index_filename, flood_local_redirect_filename, connect_local_redirect_filename, additional_flood_local_redirect_filename, additional_connect_local_redirect_filename, merge_points_filename, lake_centers_filename ] cdo_inst = cdo.Cdo() cdo_inst.merge(input=" ".join(individual_field_filenames), output=combined_output_filename) for individual_field_filename in individual_field_filenames: os.remove(individual_field_filename) if output_basin_catchment_nums_filepath is not None: iodriver.advanced_field_writer(output_basin_catchment_nums_filepath, basin_catchment_numbers, fieldname="basin_catchment_numbers")
def no_intermediaries_combine_hydrosheds_plus_rdirs_from_corrected_orog_driver( self): """Combines Hydrosheds river direction with those from a corrected orography and possibly removes sinks""" config = self._read_and_validate_config() final_loops_log_filename = path.splitext( self.output_catchments_filename)[0] + '_loops.log' if self.rerun_post_processing_only is not None: final_rdirs = \ iodriver.advanced_field_loader(self.rerun_post_processing_only, field_type="RiverDirections", fieldname=config.get("input_fieldname_options", "river_directions_to_reprocess_fieldname")) else: orography_filename = config.get( "input_options", "ten_minute_corrected_orography_filename") orography = iodriver.advanced_field_loader( orography_filename, field_type='Orography', fieldname=config.get( "input_fieldname_options", "ten_minute_corrected_orography_fieldname")) hydrosheds_rdirs_au_af_sa_10min_filename = \ config.get("input_options","ten_minute_hydrosheds_au_auf_sa_river_directions_filename") hydrosheds_rdirs = \ iodriver.advanced_field_loader(hydrosheds_rdirs_au_af_sa_10min_filename, field_type="RiverDirections", fieldname=config.get("input_fieldname_options", "ten_minute_hydrosheds_au_auf_sa_river_directions_fieldname")) second_intermediate_loops_log_filename = tempfile.mkstemp( suffix=".txt", prefix="loops_log_second_int", dir="")[1] truesinks_dummy = field.makeEmptyField("Generic", np.bool_, grid_type="LatLong10min") truesinks_dummy.set_all(False) if self.true_sinks_filename is not None: use_true_sinks = True truesinks = iodriver.advanced_field_loader( self.true_sinks_filename, field_type="Generic", fieldname=config.get("input_fieldname_options", "input_truesinks_fieldname")) else: use_true_sinks = False if config.getboolean( "general_options", "replace_internal_basins_with_rdirs_with_truesinks"): warnings.warn( "Option replace_internal_basins_with_rdirs_with_truesinks " "ignored when no true sinks file is specified") first_intermediate_rdirs = field.makeEmptyField( "RiverDirections", np.float64, grid_type="LatLong10min") if use_true_sinks: first_intermediate_rdirs_no_sinks = field.makeEmptyField( "RiverDirections", np.float64, grid_type="LatLong10min") ls_mask = iodriver.advanced_field_loader( self.ls_mask_filename, field_type="Generic", fieldname=config.get("input_fieldname_options", "input_landsea_mask_fieldname")) ls_mask.set_data( np.ascontiguousarray(ls_mask.get_data(), dtype=np.int32)) next_cell_lat_index_in_dummy = np.zeros(ls_mask.get_data().shape, dtype=np.int32, order='C') next_cell_lon_index_in_dummy = np.zeros(ls_mask.get_data().shape, dtype=np.int32, order='C') catchment_nums_dummy = np.zeros(ls_mask.get_data().shape, dtype=np.int32, order='C') fill_sinks_wrapper.fill_sinks_cpp_func(orography_array= np.ascontiguousarray(orography.get_data(), #@UndefinedVariable dtype=np.float64), method = 4, use_ls_mask = True, landsea_in = np.ascontiguousarray(ls_mask.get_data(), dtype=np.int32), set_ls_as_no_data_flag = False, use_true_sinks = False, true_sinks_in = np.ascontiguousarray(truesinks_dummy.\ get_data(), dtype=np.int32), next_cell_lat_index_in = next_cell_lat_index_in_dummy, next_cell_lon_index_in = next_cell_lon_index_in_dummy, rdirs_in = first_intermediate_rdirs.get_data() if not use_true_sinks else first_intermediate_rdirs_no_sinks.get_data(), catchment_nums_in = catchment_nums_dummy, prefer_non_diagonal_initial_dirs = False) if use_true_sinks: fill_sinks_wrapper.fill_sinks_cpp_func( orography_array=np.ascontiguousarray( orography.get_data(), #@UndefinedVariable dtype=np.float64), method=4, use_ls_mask=True, landsea_in=np.ascontiguousarray(ls_mask.get_data(), dtype=np.int32), set_ls_as_no_data_flag=False, use_true_sinks=True, true_sinks_in=np.ascontiguousarray(truesinks.get_data(), dtype=np.int32), next_cell_lat_index_in=next_cell_lat_index_in_dummy, next_cell_lon_index_in=next_cell_lon_index_in_dummy, rdirs_in=first_intermediate_rdirs.get_data(), catchment_nums_in=catchment_nums_dummy, prefer_non_diagonal_initial_dirs=False) second_intermediate_rdirs = utilities.splice_rdirs( rdirs_matching_ls_mask=first_intermediate_rdirs, ls_mask=ls_mask, other_rdirs=hydrosheds_rdirs) second_intermediate_catchments = comp_catchs.compute_catchments_cpp( second_intermediate_rdirs.get_data(), second_intermediate_loops_log_filename) second_intermediate_catchments = field.Field(comp_catchs.\ renumber_catchments_by_size(second_intermediate_catchments, second_intermediate_loops_log_filename), grid="LatLong10min") if config.getboolean("general_options", "keep_all_internal_basins"): third_intermediate_rdirs = second_intermediate_rdirs else: third_intermediate_rdirs = \ utilities.remove_endorheic_basins(rdirs=second_intermediate_rdirs, catchments=second_intermediate_catchments, rdirs_without_endorheic_basins= first_intermediate_rdirs_no_sinks if (use_true_sinks and not config.getboolean('general_options', 'replace_internal_basins_with_rdirs_with_truesinks')) else first_intermediate_rdirs, replace_only_catchments=([int(value) for value in config.get('general_options', 'replace_only_catchments').\ split(",")] if config.get('general_options', 'replace_only_catchments') else []), exclude_catchments=([int(value) for value in config.get('general_options', 'exclude_catchments').\ split(",")] if config.get('general_options', 'exclude_catchments') else []) ) third_intermediate_flowtocell = field.\ CumulativeFlow(create_hypothetical_river_paths_map(riv_dirs=third_intermediate_rdirs.get_data(), lsmask=None, use_f2py_func=True, use_f2py_sparse_iterator=True, nlat=1080,nlong=2160), grid='LatLong10min') third_intermediate_rdirs.make_contiguous() third_intermediate_flowtocell.make_contiguous() first_intermediate_rdirs.make_contiguous() if use_true_sinks: first_intermediate_rdirs_no_sinks.make_contiguous() final_rdirs = utilities.replace_streams_downstream_from_loop( third_intermediate_rdirs, cumulative_flow=third_intermediate_flowtocell, other_rdirs=first_intermediate_rdirs if not use_true_sinks else first_intermediate_rdirs_no_sinks) final_catchments = comp_catchs.compute_catchments_cpp( final_rdirs.get_data(), final_loops_log_filename) final_catchments = field.Field(comp_catchs.\ renumber_catchments_by_size(final_catchments, final_loops_log_filename), grid="LatLong10min") final_flowtocell = field.CumulativeFlow( create_hypothetical_river_paths_map( riv_dirs=final_rdirs.get_data(), lsmask=None, use_f2py_func=True, use_f2py_sparse_iterator=True, nlat=1080, nlong=2160), grid='LatLong10min') if self.rerun_post_processing_only is None: iodriver.advanced_field_writer( self.output_rdirs_filename, final_rdirs, fieldname=config.get("output_fieldname_options", "output_river_directions_fieldname")) iodriver.advanced_field_writer(self.output_catchments_filename, final_catchments, fieldname=config.get( "output_fieldname_options", "output_catchments_fieldname")) iodriver.advanced_field_writer(self.output_cumulative_flow_filename, final_flowtocell, fieldname=config.get( "output_fieldname_options", "output_cumulative_flow_fieldname")) if self.rerun_post_processing_only is None: os.remove(second_intermediate_loops_log_filename)
def generate_test_data(self): coarse_grid = grid.makeGrid(grid_type="LatLong", nlat=4, nlong=4) fine_grid = grid.makeGrid(grid_type="LatLong", nlat=20, nlong=20) fine_shape = (20, 20) input_minima = field.Field( np.array([[ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, True, False, False, False, False, False, False, False, False, True, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ]]), fine_grid) input_raw_orography = field.Field( np.array([[ 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0 ], [ 9.0, 9.0, 9.0, 3.0, 3.0, 3.0, 9.0, 1.0, 1.0, 9.0, 1.0, 1.0, 1.0, 1.0, 9.0, 9.0, 2.0, 2.0, 2.0, 9.0 ], [ 9.0, 1.0, 9.0, 3.0, 3.0, 3.0, 6.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 7.0, 7.0, 2.0, 2.0, 2.0, 9.0 ], [ 9.0, 1.0, 9.0, 3.0, 3.0, 3.0, 9.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 9.0, 9.0, 2.0, 2.0, 2.0, 9.0 ], [ 9.0, 1.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 1.0, 1.0, 1.0, 9.0, 9.0, 2.0, 2.0, 2.0, 9.0 ], [ 9.0, 1.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 1.0, 1.0, 1.0, 9.0, 9.0, 9.0, 6.0, 9.0, 9.0 ], [ 9.0, 1.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 5.0, 9.0, 9.0, 9.0, 3.0, 3.0, 3.0, 9.0 ], [ 9.0, 1.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 4.0, 4.0, 4.0, 9.0, 9.0, 3.0, 3.0, 3.0, 9.0 ], [ 9.0, 1.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 4.0, 4.0, 4.0, 9.0, 9.0, 3.0, 3.0, 3.0, 9.0 ], [ 9.0, 1.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 4.0, 4.0, 4.0, 9.0, 9.0, 3.0, 3.0, 3.0, 9.0 ], [ 9.0, 1.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 4.0, 4.0, 4.0, 9.0, 9.0, 3.0, 3.0, 3.0, 9.0 ], [ 9.0, 1.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 4.0, 4.0, 4.0, 9.0, 9.0, 3.0, 3.0, 3.0, 9.0 ], [ 9.0, 1.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 4.0, 4.0, 4.0, 9.0, 9.0, 3.0, 3.0, 3.0, 9.0 ], [ 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 8.0, 9.0 ], [ 9.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 9.0, 9.0, 9.0, 9.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0 ], [ 9.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 9.0, 7.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 9.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 9.0, 7.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 9.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 8.0, 7.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 7.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 7.0, 0.0, 0.0, 0.0, 0.0, 0.0 ]]), fine_grid) input_corrected_orography = field.Field( np.array([[ 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0 ], [ 9.0, 9.0, 9.0, 3.0, 3.0, 3.0, 9.0, 1.0, 1.0, 9.0, 1.0, 1.0, 1.0, 1.0, 9.0, 9.0, 2.0, 2.0, 2.0, 9.0 ], [ 9.0, 1.0, 9.0, 3.0, 3.0, 3.0, 6.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 7.0, 7.0, 2.0, 2.0, 2.0, 9.0 ], [ 9.0, 1.0, 9.0, 3.0, 3.0, 3.0, 9.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 9.0, 9.0, 2.0, 2.0, 2.0, 9.0 ], [ 9.0, 1.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 1.0, 1.0, 1.0, 9.0, 9.0, 2.0, 2.0, 2.0, 9.0 ], [ 9.0, 1.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 1.0, 1.0, 1.0, 9.0, 9.0, 9.0, 6.0, 9.0, 9.0 ], [ 9.0, 1.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 5.0, 9.0, 9.0, 9.0, 3.0, 3.0, 3.0, 9.0 ], [ 9.0, 1.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 4.0, 4.0, 4.0, 9.0, 9.0, 3.0, 3.0, 3.0, 9.0 ], [ 9.0, 1.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 4.0, 4.0, 4.0, 9.0, 9.0, 3.0, 3.0, 3.0, 9.0 ], [ 9.0, 1.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 4.0, 4.0, 4.0, 9.0, 9.0, 3.0, 3.0, 3.0, 9.0 ], [ 9.0, 1.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 4.0, 4.0, 4.0, 9.0, 9.0, 3.0, 3.0, 3.0, 9.0 ], [ 9.0, 1.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 4.0, 4.0, 4.0, 9.0, 9.0, 3.0, 3.0, 3.0, 9.0 ], [ 9.0, 1.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 4.0, 4.0, 4.0, 9.0, 9.0, 3.0, 3.0, 3.0, 9.0 ], [ 9.0, 7.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 8.0, 9.0 ], [ 9.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 9.0, 9.0, 9.0, 9.0, 7.0, 7.0, 7.0, 7.0, 7.0, 7.0 ], [ 9.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 9.0, 7.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 9.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 9.0, 7.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 9.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 6.0, 8.0, 7.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 7.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], [ 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 7.0, 0.0, 0.0, 0.0, 0.0, 0.0 ]]), fine_grid) input_cell_areas = field.Field( np.ones(fine_shape, dtype=np.float64, order='C'), fine_grid) input_lsmask = field.Field( np.array([[ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ]]), fine_grid) input_truesinks = field.Field( np.array([[ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ], [ False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False ]]), fine_grid) input_prior_fine_rdirs = determine_river_directions( input_corrected_orography, input_lsmask, input_truesinks, always_flow_to_sea=True, use_diagonal_nbrs=True, mark_pits_as_true_sinks=True) input_prior_fine_catchments = \ field.Field(compute_catchments_cpp(input_prior_fine_rdirs.get_data(), "/Users/thomasriddick/Documents/" "data/temp/loop_from_test_data_gen.log"), fine_grid) input_coarse_catchment_nums = field.Field( np.array([[4, 1, 2, 3], [4, 4, 6, 5], [4, 4, 6, 5], [7, 7, 7, 8]]), coarse_grid) input_coarse_rdirs = field.Field( np.array([[5, 5, 5, 5], [2, 2, 5, 5], [5, 4, 8, 8], [8, 7, 4, 0]]), coarse_grid) connection_volume_thresholds = field.Field( np.zeros(fine_shape, dtype=np.float64, order='C'), fine_grid) flood_volume_thresholds = field.Field( np.zeros(fine_shape, dtype=np.float64, order='C'), fine_grid) flood_next_cell_lat_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) flood_next_cell_lon_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) connect_next_cell_lat_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) connect_next_cell_lon_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) flood_force_merge_lat_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) flood_force_merge_lon_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) connect_force_merge_lat_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) connect_force_merge_lon_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) flood_redirect_lat_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) flood_redirect_lon_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) connect_redirect_lat_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) connect_redirect_lon_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) additional_flood_redirect_lat_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) additional_flood_redirect_lon_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) additional_connect_redirect_lat_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) additional_connect_redirect_lon_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) flood_local_redirect = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) connect_local_redirect = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) additional_flood_local_redirect = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) additional_connect_local_redirect = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) merge_points = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) basin_catchment_numbers = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) evaluate_basins_wrapper.evaluate_basins( minima_in_int=np.ascontiguousarray(input_minima.get_data(), dtype=np.int32), raw_orography_in=np.ascontiguousarray( input_raw_orography.get_data(), dtype=np.float64), corrected_orography_in=np.ascontiguousarray( input_corrected_orography.get_data(), dtype=np.float64), cell_areas_in=np.ascontiguousarray(input_cell_areas.get_data(), dtype=np.float64), connection_volume_thresholds_in=connection_volume_thresholds. get_data(), flood_volume_thresholds_in=flood_volume_thresholds.get_data(), prior_fine_rdirs_in=np.ascontiguousarray( input_prior_fine_rdirs.get_data(), dtype=np.float64), prior_coarse_rdirs_in=np.ascontiguousarray( input_coarse_rdirs.get_data(), dtype=np.float64), prior_fine_catchments_in=np.ascontiguousarray( input_prior_fine_catchments.get_data(), dtype=np.int32), coarse_catchment_nums_in=np.ascontiguousarray( input_coarse_catchment_nums.get_data(), dtype=np.int32), flood_next_cell_lat_index_in=flood_next_cell_lat_index.get_data(), flood_next_cell_lon_index_in=flood_next_cell_lon_index.get_data(), connect_next_cell_lat_index_in=connect_next_cell_lat_index. get_data(), connect_next_cell_lon_index_in=connect_next_cell_lon_index. get_data(), flood_force_merge_lat_index_in=flood_force_merge_lat_index. get_data(), flood_force_merge_lon_index_in=flood_force_merge_lon_index. get_data(), connect_force_merge_lat_index_in=connect_force_merge_lat_index. get_data(), connect_force_merge_lon_index_in=connect_force_merge_lon_index. get_data(), flood_redirect_lat_index_in=flood_redirect_lat_index.get_data(), flood_redirect_lon_index_in=flood_redirect_lon_index.get_data(), connect_redirect_lat_index_in=connect_redirect_lat_index.get_data( ), connect_redirect_lon_index_in=connect_redirect_lon_index.get_data( ), additional_flood_redirect_lat_index_in= additional_flood_redirect_lat_index.get_data(), additional_flood_redirect_lon_index_in= additional_flood_redirect_lon_index.get_data(), additional_connect_redirect_lat_index_in= additional_connect_redirect_lat_index.get_data(), additional_connect_redirect_lon_index_in= additional_connect_redirect_lon_index.get_data(), flood_local_redirect_out_int=flood_local_redirect.get_data(), connect_local_redirect_out_int=connect_local_redirect.get_data(), additional_flood_local_redirect_out_int= additional_flood_local_redirect.get_data(), additional_connect_local_redirect_out_int= additional_connect_local_redirect.get_data(), merge_points_out_int=merge_points.get_data(), basin_catchment_numbers_in=basin_catchment_numbers.get_data()) self.print_field_for_julia(connection_volume_thresholds, "connection_volume_thresholds", "Float64", grid_in="lake_grid") self.print_field_for_julia(flood_volume_thresholds, "flood_volume_thresholds", "Float64", grid_in="lake_grid") self.print_field_for_julia(flood_next_cell_lat_index, "flood_next_cell_lat_index", "Int64", grid_in="lake_grid") self.print_field_for_julia(flood_next_cell_lon_index, "flood_next_cell_lon_index", "Int64", grid_in="lake_grid") self.print_field_for_julia(connect_next_cell_lat_index, "connect_next_cell_lat_index", "Int64", grid_in="lake_grid") self.print_field_for_julia(connect_next_cell_lon_index, "connect_next_cell_lon_index", "Int64", grid_in="lake_grid") self.print_field_for_julia(flood_force_merge_lat_index, "flood_force_merge_lat_index", "Int64", grid_in="lake_grid") self.print_field_for_julia(flood_force_merge_lon_index, "flood_force_merge_lon_index", "Int64", grid_in="lake_grid") self.print_field_for_julia(connect_force_merge_lat_index, "connect_force_merge_lat_index", "Int64", grid_in="lake_grid") self.print_field_for_julia(connect_force_merge_lon_index, "connect_force_merge_lon_index", "Int64", grid_in="lake_grid") self.print_field_for_julia(flood_redirect_lat_index, "flood_redirect_lat_index", "Int64", grid_in="lake_grid") self.print_field_for_julia(flood_redirect_lon_index, "flood_redirect_lon_index", "Int64", grid_in="lake_grid") self.print_field_for_julia(connect_redirect_lat_index, "connect_redirect_lat_index", "Int64", grid_in="lake_grid") self.print_field_for_julia(connect_redirect_lon_index, "connect_redirect_lon_index", "Int64", grid_in="lake_grid") self.print_field_for_julia(additional_flood_redirect_lat_index, "additional_flood_redirect_lat_index", "Int64", grid_in="lake_grid") self.print_field_for_julia(additional_flood_redirect_lon_index, "additional_flood_redirect_lon_index", "Int64", grid_in="lake_grid") self.print_field_for_julia(additional_connect_redirect_lat_index, "additional_connect_redirect_lat_index", "Int64", grid_in="lake_grid") self.print_field_for_julia(additional_connect_redirect_lon_index, "additional_connect_redirect_lon_index", "Int64", grid_in="lake_grid") self.print_field_for_julia(flood_local_redirect, "flood_local_redirect", "Bool", grid_in="lake_grid") self.print_field_for_julia(connect_local_redirect, "connect_local_redirect", "Bool", grid_in="lake_grid") self.print_field_for_julia(additional_flood_local_redirect, "additional_flood_local_redirect", "Bool", grid_in="lake_grid") self.print_field_for_julia(additional_connect_local_redirect, "additional_connect_local_redirect", "Bool", grid_in="lake_grid") self.print_field_for_julia(merge_points, "merge_points", "Int64", grid_in="lake_grid") self.print_field_for_julia(input_minima, "input_minima", "Bool", grid_in="lake_grid")
def connect_coarse_lake_catchments(coarse_catchments,lake_centers,basin_catchment_numbers, flood_next_cell_index_lat,flood_next_cell_index_lon, flood_redirect_lat,flood_redirect_lon, additional_flood_redirect_lat,additional_flood_redirect_lon, local_redirect,additional_local_redirect, merge_types,river_directions,scale_factor = 3, correct_cumulative_flow=False, cumulative_flow=None): if correct_cumulative_flow: if cumulative_flow is None or river_directions is None: raise RuntimeError("Required input files for cumulative flow correction not provided") old_coarse_catchments = coarse_catchments.copy() lake_centers_array = np.argwhere(lake_centers.get_data()) lake_centers_list = [lake_centers_array[i,:].tolist() for i in range(lake_centers_array.shape[0])] overflow_catchments = field.makeEmptyField(field_type='Generic',dtype=np.int64, grid_type=lake_centers.get_grid()) overflow_coords_lats = field.makeEmptyField(field_type='Generic',dtype=np.int64, grid_type=lake_centers.get_grid()) overflow_coords_lons = field.makeEmptyField(field_type='Generic',dtype=np.int64, grid_type=lake_centers.get_grid()) for lake_center_coords in lake_centers_list: basin_number = basin_catchment_numbers.get_data()[tuple(lake_center_coords)] while True: secondary_merge_coords = np.argwhere( np.logical_and( np.logical_or(merge_types.get_data() == 10, merge_types.get_data() == 11), basin_catchment_numbers.get_data() == basin_number))[0,:].tolist() double_merge = np.any(np.logical_and(merge_types.get_data() == 11, basin_catchment_numbers.get_data() == basin_number)) basin_number = basin_catchment_numbers.get_data()[flood_next_cell_index_lat.get_data()[tuple(secondary_merge_coords)], flood_next_cell_index_lon.get_data()[tuple(secondary_merge_coords)]] if basin_number == 0: is_local_redirect = ((local_redirect.get_data()[tuple(secondary_merge_coords)]) if not double_merge else (additional_local_redirect.get_data()[tuple(secondary_merge_coords)])) if is_local_redirect: if double_merge: basin_number = \ basin_catchment_numbers.\ get_data()[additional_flood_redirect_lat.get_data()[tuple(secondary_merge_coords)], additional_flood_redirect_lon.get_data()[tuple(secondary_merge_coords)]] else: basin_number = \ basin_catchment_numbers.\ get_data()[flood_redirect_lat.get_data()[tuple(secondary_merge_coords)], flood_redirect_lon.get_data()[tuple(secondary_merge_coords)]] else: if double_merge: overflow_catchment = \ coarse_catchments.get_data()[additional_flood_redirect_lat.get_data()[tuple(secondary_merge_coords)], additional_flood_redirect_lon.get_data()[tuple(secondary_merge_coords)]] overflow_coords = (additional_flood_redirect_lat.get_data()[tuple(secondary_merge_coords)], additional_flood_redirect_lon.get_data()[tuple(secondary_merge_coords)]) else: overflow_catchment = \ coarse_catchments.get_data()[flood_redirect_lat.get_data()[tuple(secondary_merge_coords)], flood_redirect_lon.get_data()[tuple(secondary_merge_coords)]] overflow_coords = (flood_redirect_lat.get_data()[tuple(secondary_merge_coords)], flood_redirect_lon.get_data()[tuple(secondary_merge_coords)]) break overflow_catchments.get_data()[tuple(lake_center_coords)] = overflow_catchment overflow_coords_lats.get_data()[tuple(lake_center_coords)] = overflow_coords[0] overflow_coords_lons.get_data()[tuple(lake_center_coords)] = overflow_coords[1] #specific to latlon grid sink_points_array = np.argwhere(np.logical_or(river_directions.get_data() == 5, river_directions.get_data() == -2)) sink_points_list = [sink_points_array[i,:].tolist() for i in range(sink_points_array.shape[0])] catchment_trees = CatchmentTrees() sink_point_cumulative_flow_redirect_lat = field.makeEmptyField(field_type='Generic',dtype=np.int64, grid_type=coarse_catchments.get_grid()) sink_point_cumulative_flow_redirect_lon = field.makeEmptyField(field_type='Generic',dtype=np.int64, grid_type=coarse_catchments.get_grid()) for sink_point in sink_points_list: sink_point_coarse_catchment = coarse_catchments.get_data()[tuple(sink_point)] #Specific to lat-lon grids overflow_catch_fine_cells_in_coarse_cell = overflow_catchments.get_data()[sink_point[0]*scale_factor:(sink_point[0]+1)*scale_factor, sink_point[1]*scale_factor:(sink_point[1]+1)*scale_factor] overflow_coords_lat_fine_cells_in_coarse_cell = overflow_coords_lats.get_data()[sink_point[0]*scale_factor:(sink_point[0]+1)*scale_factor, sink_point[1]*scale_factor:(sink_point[1]+1)*scale_factor] overflow_coords_lon_fine_cells_in_coarse_cell = overflow_coords_lons.get_data()[sink_point[0]*scale_factor:(sink_point[0]+1)*scale_factor, sink_point[1]*scale_factor:(sink_point[1]+1)*scale_factor] overflow_catchment_list = overflow_catch_fine_cells_in_coarse_cell[overflow_catch_fine_cells_in_coarse_cell != 0].tolist() if not overflow_catchment_list: continue overflow_catchment_counters = Counter(overflow_catchment_list) highest_count = max(overflow_catchment_counters.values()) overflow_catchment = [ value for value,count in overflow_catchment_counters.items() if count == highest_count][0] if sink_point_coarse_catchment == overflow_catchment: continue overflow_catchment_fine_coords_within_coarse_cell = \ tuple(np.argwhere(overflow_catch_fine_cells_in_coarse_cell == overflow_catchment)[0,:].tolist()) sink_point_cumulative_flow_redirect_lat.get_data()[tuple(sink_point)] = \ overflow_coords_lat_fine_cells_in_coarse_cell[overflow_catchment_fine_coords_within_coarse_cell] sink_point_cumulative_flow_redirect_lon.get_data()[tuple(sink_point)] = \ overflow_coords_lon_fine_cells_in_coarse_cell[overflow_catchment_fine_coords_within_coarse_cell] catchment_trees.add_link(sink_point_coarse_catchment,overflow_catchment) for supercatchment_number,tree in catchment_trees.primary_catchments.items(): for subcatchments_num in tree.get_all_subcatchment_nums(): coarse_catchments.get_data()[subcatchments_num == coarse_catchments.get_data()] = \ supercatchment_number if correct_cumulative_flow: while catchment_trees.all_catchments: upstream_catchments = catchment_trees.pop_leaves() for upstream_catchment in upstream_catchments: if np.any(np.logical_and(np.logical_or(river_directions.get_data() == 5, river_directions.get_data() == -2), old_coarse_catchments.get_data() == upstream_catchment)): upstream_catchment_center = \ tuple(np.argwhere(np.logical_and(np.logical_or(river_directions.get_data() == 5, river_directions.get_data() == -2), old_coarse_catchments.get_data() == upstream_catchment))[0,:].tolist()) update_cumulative_flow(upstream_catchment_center, (sink_point_cumulative_flow_redirect_lat.get_data()[upstream_catchment_center], sink_point_cumulative_flow_redirect_lon.get_data()[upstream_catchment_center]), cumulative_flow,river_directions) return field.Field(cc.renumber_catchments_by_size(coarse_catchments.get_data()),type="Generic", grid=coarse_catchments.get_grid()),cumulative_flow return field.Field(cc.renumber_catchments_by_size(coarse_catchments.get_data()),type="Generic", grid=coarse_catchments.get_grid()),None