def testLoadingFieldWithRXGridDesc(self): """Test loading from a file using a rDXxDY grid description""" loaded_field = iodriver.advanced_field_loader(filename=join( data_dir, "HDdata", "catchmentmaps", "upscaled", "catchmentmap_unsorted_ICE5G_" "data_ALG4_sinkless_0k_" "20160714_121938.nc"), grid_desc="r0.5x0.5", adjust_orientation=True) self.assertEqual( loaded_field.data[62][517], 14149.0, "Field data has not been loaded and" " oriented properly") self.assertAlmostEqual(loaded_field.grid.lon_points[0], -179.75, msg="Field data grid is" " not correctly" " oriented") self.assertAlmostEqual(loaded_field.grid.lat_points[0], 89.75, msg="Field data grid is" " not correctly" " oriented")
def testLoadingFieldWithGridFileDesc(self): """Test loading from a file using a grid description file""" half_degree_grid_desc_text =\ """ gridtype = lonlat gridsize = 259200 xsize = 720 ysize = 360 xfirst = -179.75 xinc = 0.5 yfirst = 89.75 yinc = -0.5 """ with open(self.half_degree_grid_desc, 'w') as f: f.write(textwrap.dedent(half_degree_grid_desc_text)) loaded_field = iodriver.advanced_field_loader( filename=join( data_dir, "HDdata", "catchmentmaps", "upscaled", "catchmentmap_unsorted_ICE5G_" "data_ALG4_sinkless_0k_" "20160714_121938.nc"), grid_desc_file=self.half_degree_grid_desc, adjust_orientation=True) self.assertEqual( loaded_field.data[62][157], 14149.0, "Field data has not been loaded and" " oriented properly") self.assertAlmostEqual(loaded_field.grid.lon_points[0], -179.75, msg="Field data grid is" " not correctly" " oriented") self.assertAlmostEqual(loaded_field.grid.lat_points[0], 89.75, msg="Field data grid is" " not correctly" " oriented")
def advanced_main(filename, fieldname, output_filename, output_fieldname, loop_logfile, use_cpp_alg=True): rdirs = iodriver.advanced_field_loader(filename, field_type='Generic', fieldname=fieldname) nlat, nlon = rdirs.get_grid_dimensions() if use_cpp_alg: catchments = compute_catchments_cpp(rdirs.get_data(), loop_logfile) else: catchment_types, catchments = compute_catchments( rdirs.get_data(), loop_logfile) check_catchment_types(catchment_types, logfile=path.splitext(output_filename)[0] + ".log") numbered_catchments = field.Field(renumber_catchments_by_size( catchments, loop_logfile), grid=rdirs.get_grid()) iodriver.advanced_field_writer(target_filename=output_filename, field=numbered_catchments, fieldname=output_fieldname)
def testFieldWritingAndLoadingWithLatLongFieldWithRotationNoAdjustment( self): example_field = field.makeEmptyField('Generic', dtype=np.int64, grid_type='HD') lat_points = np.linspace(89.75, -89.75, 360, endpoint=True) lon_points = np.linspace(-179.75, 179.75, 720, endpoint=True) example_field.set_grid_coordinates((lat_points, lon_points)) example_field.data[20, 20] = 1 example_field.data[200, 20] = 2 example_field.data[20, 200] = 3 example_field.data[200, 200] = 4 example_field.rotate_field_by_a_hundred_and_eighty_degrees() iodriver.advanced_field_writer(os.path.join( self.directory, "advancedfieldwritingandloadingtest.nc"), example_field, fieldname='test_field', clobber=True) loaded_field = iodriver.advanced_field_loader(os.path.join( self.directory, "advancedfieldwritingandloadingtest.nc"), fieldname='test_field', adjust_orientation=False) np.testing.assert_array_equal(example_field.get_data(), loaded_field.get_data())
def advanced_basin_evaluation_driver( input_minima_file, input_minima_fieldname, input_raw_orography_file, input_raw_orography_fieldname, input_corrected_orography_file, input_corrected_orography_fieldname, input_cell_areas_file, input_cell_areas_fieldname, input_prior_fine_rdirs_file, input_prior_fine_rdirs_fieldname, input_prior_fine_catchments_file, input_prior_fine_catchments_fieldname, input_coarse_catchment_nums_file, input_coarse_catchment_nums_fieldname, input_coarse_rdirs_file, input_coarse_rdirs_fieldname, combined_output_filename, output_filepath, output_filelabel, output_basin_catchment_nums_filepath=None): input_minima = iodriver.advanced_field_loader( input_minima_file, field_type='Generic', fieldname=input_minima_fieldname) input_raw_orography = iodriver.advanced_field_loader( input_raw_orography_file, field_type='Orography', fieldname=input_raw_orography_fieldname) input_corrected_orography = iodriver.advanced_field_loader( input_corrected_orography_file, field_type='Orography', fieldname=input_corrected_orography_fieldname) input_cell_areas = iodriver.advanced_field_loader( input_cell_areas_file, field_type='Generic', fieldname=input_cell_areas_fieldname) input_prior_fine_rdirs = iodriver.advanced_field_loader( input_prior_fine_rdirs_file, field_type='RiverDirections', fieldname=input_prior_fine_rdirs_fieldname) input_prior_fine_catchments = iodriver.advanced_field_loader( input_prior_fine_catchments_file, field_type='Generic', fieldname=input_prior_fine_catchments_fieldname) input_coarse_catchment_nums = iodriver.advanced_field_loader( input_coarse_catchment_nums_file, field_type='Generic', fieldname=input_coarse_catchment_nums_fieldname) input_coarse_rdirs = iodriver.advanced_field_loader( input_coarse_rdirs_file, field_type='Generic', fieldname=input_coarse_rdirs_fieldname) fine_grid = input_raw_orography.get_grid() fine_shape = input_raw_orography.get_data().shape connection_volume_thresholds = field.Field( np.zeros(fine_shape, dtype=np.float64, order='C'), fine_grid) flood_volume_thresholds = field.Field( np.zeros(fine_shape, dtype=np.float64, order='C'), fine_grid) flood_next_cell_lat_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) flood_next_cell_lon_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) connect_next_cell_lat_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) connect_next_cell_lon_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) flood_force_merge_lat_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) flood_force_merge_lon_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) connect_force_merge_lat_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) connect_force_merge_lon_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) flood_redirect_lat_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) flood_redirect_lon_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) connect_redirect_lat_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) connect_redirect_lon_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) additional_flood_redirect_lat_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) additional_flood_redirect_lon_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) additional_connect_redirect_lat_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) additional_connect_redirect_lon_index = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) flood_local_redirect = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) connect_local_redirect = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) additional_flood_local_redirect = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) additional_connect_local_redirect = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) merge_points = field.Field(np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) if output_basin_catchment_nums_filepath is not None: basin_catchment_numbers = field.Field( np.zeros(fine_shape, dtype=np.int32, order='C'), fine_grid) else: basin_catchment_numbers = None evaluate_basins_wrapper.evaluate_basins( minima_in_int=np.ascontiguousarray(input_minima.get_data(), dtype=np.int32), raw_orography_in=np.ascontiguousarray(input_raw_orography.get_data(), dtype=np.float64), corrected_orography_in=np.ascontiguousarray( input_corrected_orography.get_data(), dtype=np.float64), cell_areas_in=np.ascontiguousarray(input_cell_areas.get_data(), dtype=np.float64), connection_volume_thresholds_in=connection_volume_thresholds.get_data( ), flood_volume_thresholds_in=flood_volume_thresholds.get_data(), prior_fine_rdirs_in=np.ascontiguousarray( input_prior_fine_rdirs.get_data(), dtype=np.float64), prior_coarse_rdirs_in=np.ascontiguousarray( input_coarse_rdirs.get_data(), dtype=np.float64), prior_fine_catchments_in=np.ascontiguousarray( input_prior_fine_catchments.get_data(), dtype=np.int32), coarse_catchment_nums_in=np.ascontiguousarray( input_coarse_catchment_nums.get_data(), dtype=np.int32), flood_next_cell_lat_index_in=flood_next_cell_lat_index.get_data(), flood_next_cell_lon_index_in=flood_next_cell_lon_index.get_data(), connect_next_cell_lat_index_in=connect_next_cell_lat_index.get_data(), connect_next_cell_lon_index_in=connect_next_cell_lon_index.get_data(), flood_force_merge_lat_index_in=flood_force_merge_lat_index.get_data(), flood_force_merge_lon_index_in=flood_force_merge_lon_index.get_data(), connect_force_merge_lat_index_in=connect_force_merge_lat_index. get_data(), connect_force_merge_lon_index_in=connect_force_merge_lon_index. get_data(), flood_redirect_lat_index_in=flood_redirect_lat_index.get_data(), flood_redirect_lon_index_in=flood_redirect_lon_index.get_data(), connect_redirect_lat_index_in=connect_redirect_lat_index.get_data(), connect_redirect_lon_index_in=connect_redirect_lon_index.get_data(), additional_flood_redirect_lat_index_in= additional_flood_redirect_lat_index.get_data(), additional_flood_redirect_lon_index_in= additional_flood_redirect_lon_index.get_data(), additional_connect_redirect_lat_index_in= additional_connect_redirect_lat_index.get_data(), additional_connect_redirect_lon_index_in= additional_connect_redirect_lon_index.get_data(), flood_local_redirect_out_int=flood_local_redirect.get_data(), connect_local_redirect_out_int=connect_local_redirect.get_data(), additional_flood_local_redirect_out_int=additional_flood_local_redirect .get_data(), additional_connect_local_redirect_out_int= additional_connect_local_redirect.get_data(), merge_points_out_int=merge_points.get_data(), basin_catchment_numbers_in=basin_catchment_numbers.get_data()) connection_volume_thresholds_filename = path.join( output_filepath, "connect_vts_" + output_filelabel + ".nc") iodriver.advanced_field_writer(connection_volume_thresholds_filename, connection_volume_thresholds, fieldname='connection_volume_thresholds') flood_volume_thresholds_filename = path.join( output_filepath, "flood_vts_" + output_filelabel + ".nc") iodriver.advanced_field_writer(flood_volume_thresholds_filename, flood_volume_thresholds, fieldname='flood_volume_thresholds') flood_next_cell_lat_index_filename = path.join( output_filepath, "flood_nci_lat_" + output_filelabel + ".nc") iodriver.advanced_field_writer(flood_next_cell_lat_index_filename, flood_next_cell_lat_index, fieldname='flood_next_cell_lat_index') flood_next_cell_lon_index_filename = path.join( output_filepath, "flood_nci_lon_" + output_filelabel + ".nc") iodriver.advanced_field_writer(flood_next_cell_lon_index_filename, flood_next_cell_lon_index, fieldname='flood_next_cell_lon_index') connect_next_cell_lat_index_filename = path.join( output_filepath, "connect_nci_lat_" + output_filelabel + ".nc") iodriver.advanced_field_writer(connect_next_cell_lat_index_filename, connect_next_cell_lat_index, fieldname='connect_next_cell_lat_index') connect_next_cell_lon_index_filename = path.join( output_filepath, "connect_nci_lon_" + output_filelabel + ".nc") iodriver.advanced_field_writer(connect_next_cell_lon_index_filename, connect_next_cell_lon_index, fieldname='connect_next_cell_lon_index') flood_force_merge_lat_index_filename = path.join( output_filepath, "flood_fmi_lat_" + output_filelabel + ".nc") iodriver.advanced_field_writer(flood_force_merge_lat_index_filename, flood_force_merge_lat_index, fieldname='flood_force_merge_lat_index') flood_force_merge_lon_index_filename = path.join( output_filepath, "flood_fmi_lon_" + output_filelabel + ".nc") iodriver.advanced_field_writer(flood_force_merge_lon_index_filename, flood_force_merge_lon_index, fieldname='flood_force_merge_lon_index') connect_force_merge_lat_index_filename = path.join( output_filepath, "connect_fmi_lat_" + output_filelabel + ".nc") iodriver.advanced_field_writer(connect_force_merge_lat_index_filename, connect_force_merge_lat_index, fieldname='connect_force_merge_lat_index') connect_force_merge_lon_index_filename = path.join( output_filepath, "connect_fmi_lon_" + output_filelabel + ".nc") iodriver.advanced_field_writer(connect_force_merge_lon_index_filename, connect_force_merge_lon_index, fieldname='connect_force_merge_lon_index') flood_redirect_lat_index_filename = path.join( output_filepath, "flood_ri_lat_" + output_filelabel + ".nc") iodriver.advanced_field_writer(flood_redirect_lat_index_filename, flood_redirect_lat_index, fieldname='flood_redirect_lat_index') flood_redirect_lon_index_filename = path.join( output_filepath, "flood_ri_lon_" + output_filelabel + ".nc") iodriver.advanced_field_writer(flood_redirect_lon_index_filename, flood_redirect_lon_index, fieldname='flood_redirect_lon_index') connect_redirect_lat_index_filename = path.join( output_filepath, "connect_ri_lat_" + output_filelabel + ".nc") iodriver.advanced_field_writer(connect_redirect_lat_index_filename, connect_redirect_lat_index, fieldname='connect_redirect_lat_index') connect_redirect_lon_index_filename = path.join( output_filepath, "connect_ri_lon_" + output_filelabel + ".nc") iodriver.advanced_field_writer(connect_redirect_lon_index_filename, connect_redirect_lon_index, fieldname='connect_redirect_lon_index') additional_flood_redirect_lat_index_filename = path.join( output_filepath, "additional_flood_ri_lat_" + output_filelabel + ".nc") iodriver.advanced_field_writer( additional_flood_redirect_lat_index_filename, additional_flood_redirect_lat_index, fieldname='additional_flood_redirect_lat_index') additional_flood_redirect_lon_index_filename = path.join( output_filepath, "additional_flood_ri_lon_" + output_filelabel + ".nc") iodriver.advanced_field_writer( additional_flood_redirect_lon_index_filename, additional_flood_redirect_lon_index, fieldname='additional_flood_redirect_lon_index') additional_connect_redirect_lat_index_filename = path.join( output_filepath, "additional_connect_ri_lat_" + output_filelabel + ".nc") iodriver.advanced_field_writer( additional_connect_redirect_lat_index_filename, additional_connect_redirect_lat_index, fieldname='additional_connect_redirect_lat_index') additional_connect_redirect_lon_index_filename = path.join( output_filepath, "additional_connect_ri_lon_" + output_filelabel + ".nc") iodriver.advanced_field_writer( additional_connect_redirect_lon_index_filename, additional_connect_redirect_lon_index, fieldname='additional_connect_redirect_lon_index') flood_local_redirect_filename = path.join( output_filepath, "flood_local_r_" + output_filelabel + ".nc") iodriver.advanced_field_writer(flood_local_redirect_filename, flood_local_redirect, fieldname='flood_local_redirect') connect_local_redirect_filename = path.join( output_filepath, "connect_local_r_" + output_filelabel + ".nc") iodriver.advanced_field_writer(connect_local_redirect_filename, connect_local_redirect, fieldname='connect_local_redirect') additional_flood_local_redirect_filename = path.join( output_filepath, "additional_flood_local_r_" + output_filelabel + ".nc") iodriver.advanced_field_writer(additional_flood_local_redirect_filename, additional_flood_local_redirect, fieldname='additional_flood_local_redirect') additional_connect_local_redirect_filename = path.join( output_filepath, "additional_connect_local_r_" + output_filelabel + ".nc") iodriver.advanced_field_writer( additional_connect_local_redirect_filename, additional_connect_local_redirect, fieldname='additional_connect_local_redirect') merge_points_filename = path.join( output_filepath, "merge_points_" + output_filelabel + ".nc") iodriver.advanced_field_writer(merge_points_filename, merge_points, fieldname='merge_points') lake_centers_filename = path.join( output_filepath, "lake_centers_" + output_filelabel + ".nc") iodriver.advanced_field_writer(lake_centers_filename, input_minima, fieldname="lake_centers") individual_field_filenames = [ connection_volume_thresholds_filename, flood_volume_thresholds_filename, flood_next_cell_lat_index_filename, flood_next_cell_lon_index_filename, connect_next_cell_lat_index_filename, connect_next_cell_lon_index_filename, flood_force_merge_lat_index_filename, flood_force_merge_lon_index_filename, connect_force_merge_lat_index_filename, connect_force_merge_lon_index_filename, flood_redirect_lat_index_filename, flood_redirect_lon_index_filename, connect_redirect_lat_index_filename, connect_redirect_lon_index_filename, additional_flood_redirect_lat_index_filename, additional_flood_redirect_lon_index_filename, additional_connect_redirect_lat_index_filename, additional_connect_redirect_lon_index_filename, flood_local_redirect_filename, connect_local_redirect_filename, additional_flood_local_redirect_filename, additional_connect_local_redirect_filename, merge_points_filename, lake_centers_filename ] cdo_inst = cdo.Cdo() cdo_inst.merge(input=" ".join(individual_field_filenames), output=combined_output_filename) for individual_field_filename in individual_field_filenames: os.remove(individual_field_filename) if output_basin_catchment_nums_filepath is not None: iodriver.advanced_field_writer(output_basin_catchment_nums_filepath, basin_catchment_numbers, fieldname="basin_catchment_numbers")
def latest_lake_version_vs_base_version_lakes_comparison(): colors = ColorPalette('default') dates = [0] dates.extend(list(range(15990, 15980, -10))) lsmask_sequence = [] glacier_mask_sequence = [] catchment_nums_sequence_latest = [] river_flow_sequence_latest = [] river_mouths_sequence_latest = [] lake_volumes_sequence_latest = [] lake_basin_numbers_sequence_latest = [] fine_river_flow_sequence_latest = [] orography_sequence_latest = [] catchment_nums_sequence_base = [] river_flow_sequence_base = [] river_mouths_sequence_base = [] lake_volumes_sequence_base = [] lake_basin_numbers_sequence_base = [] fine_river_flow_sequence_base = [] orography_sequence_base = [] analysis_base_dir = ("/Users/thomasriddick/Documents/data/" "lake_analysis_runs/lake_analysis_one_21_Jun_2021/") base_version = 0 for date in dates: latest_lakes_version = \ find_highest_version(analysis_base_dir + "lakes/results/" "diag_version_VERSION_NUMBER_date_{}".format(date)) latest_version_results_base_dir = (analysis_base_dir + "lakes/results/diag_version_{}_date_{}".\ format(latest_lakes_version,date)) base_version_results_base_dir = (analysis_base_dir + "lakes/results/diag_version_{}_date_{}".\ format(base_version,date)) rdirs = advanced_field_loader(filename=join( latest_version_results_base_dir, "30min_rdirs.nc"), time_slice=None, field_type="RiverDirections", fieldname="rdirs", adjust_orientation=True) lsmask_data = rdirs.get_lsmask() glacier_mask = advanced_field_loader( filename="/Users/thomasriddick/Documents/" "data/lake_transient_data/run_1/" "10min_glac_{}k.nc".format(date), time_slice=None, fieldname="glac", adjust_orientation=True) catchment_nums_latest = advanced_field_loader(filename=join( latest_version_results_base_dir, "30min_connected_catchments.nc"), time_slice=None, fieldname="catchments", adjust_orientation=True) river_flow_latest = advanced_field_loader(filename=join( latest_version_results_base_dir, "30min_flowtocell_connected.nc"), time_slice=None, fieldname="cumulative_flow", adjust_orientation=True) river_mouths_latest = advanced_field_loader( filename=join(latest_version_results_base_dir, "30min_flowtorivermouths_connected.nc"), time_slice=None, fieldname="cumulative_flow_to_ocean", adjust_orientation=True) lake_volumes_latest = advanced_field_loader(filename=join( latest_version_results_base_dir, "10min_lake_volumes.nc"), time_slice=None, fieldname="lake_volume", adjust_orientation=True) lake_basin_numbers_latest = advanced_field_loader( filename=join(latest_version_results_base_dir, "basin_catchment_numbers.nc"), time_slice=None, fieldname="basin_catchment_numbers", adjust_orientation=True) # fine_river_flow_latest = advanced_field_loader(filename=join(latest_version_results_base_dir, # "10min_flowtocell.nc"), # time_slice=None, # fieldname="cumulative_flow", # adjust_orientation=True) orography_latest = advanced_field_loader(filename=join( latest_version_results_base_dir, "10min_corrected_orog.nc"), time_slice=None, fieldname="corrected_orog", adjust_orientation=True) catchment_nums_base = advanced_field_loader(filename=join( base_version_results_base_dir, "30min_connected_catchments.nc"), time_slice=None, fieldname="catchments", adjust_orientation=True) river_flow_base = advanced_field_loader(filename=join( base_version_results_base_dir, "30min_flowtocell_connected.nc"), time_slice=None, fieldname="cumulative_flow", adjust_orientation=True) river_mouths_base = advanced_field_loader( filename=join(base_version_results_base_dir, "30min_flowtorivermouths_connected.nc"), time_slice=None, fieldname="cumulative_flow_to_ocean", adjust_orientation=True) lake_volumes_base = advanced_field_loader(filename=join( base_version_results_base_dir, "10min_lake_volumes.nc"), time_slice=None, fieldname="lake_volume", adjust_orientation=True) lake_basin_numbers_base = advanced_field_loader( filename=join(base_version_results_base_dir, "basin_catchment_numbers.nc"), time_slice=None, fieldname="basin_catchment_numbers", adjust_orientation=True) # fine_river_flow_base = advanced_field_loader(filename=join(base_version_results_base_dir, # "10min_flowtocell.nc"), # time_slice=None, # fieldname="cumulative_flow", # adjust_orientation=True) orography_base = advanced_field_loader(filename=join( base_version_results_base_dir, "10min_corrected_orog.nc"), time_slice=None, fieldname="corrected_orog", adjust_orientation=True) lsmask_sequence.append(lsmask_data) glacier_mask_sequence.append(glacier_mask.get_data()) catchment_nums_sequence_latest.append(catchment_nums_latest.get_data()) river_flow_sequence_latest.append(river_flow_latest.get_data()) river_mouths_sequence_latest.append(river_mouths_latest.get_data()) lake_volumes_sequence_latest.append(lake_volumes_latest.get_data()) lake_basin_numbers_sequence_latest.append( lake_basin_numbers_latest.get_data()) #fine_river_flow_sequence_latest.append(fine_river_flow_latest.get_data()) orography_sequence_latest.append(orography_latest.get_data()) catchment_nums_sequence_base.append(catchment_nums_base.get_data()) river_flow_sequence_base.append(river_flow_base.get_data()) river_mouths_sequence_base.append(river_mouths_base.get_data()) lake_volumes_sequence_base.append(lake_volumes_base.get_data()) lake_basin_numbers_sequence_base.append( lake_basin_numbers_base.get_data()) #fine_river_flow_sequence_base.append(fine_river_flow_base.get_data()) orography_sequence_base.append(orography_base.get_data()) super_fine_orography = advanced_field_loader( filename=join( "/Users/thomasriddick/" "Documents/data/HDdata/orographys", "srtm30plus_v6.nc"), time_slice=None, fieldname="topo", adjust_orientation=True).get_data() first_corrected_orography = advanced_field_loader( filename=join(analysis_base_dir, "corrections", "work", "pre_preliminary_tweak_orography.nc"), time_slice=None, fieldname="orog", adjust_orientation=True).get_data() second_corrected_orography = advanced_field_loader( filename=join(analysis_base_dir, "corrections", "work", "post_preliminary_tweak_orography.nc"), time_slice=None, fieldname="orog", adjust_orientation=True).get_data() third_corrected_orography = advanced_field_loader( filename=join(analysis_base_dir, "corrections", "work", "pre_final_tweak_orography.nc"), time_slice=None, fieldname="orog", adjust_orientation=True).get_data() fourth_corrected_orography = advanced_field_loader( filename=join(analysis_base_dir, "corrections", "work", "post_final_tweak_orography.nc"), time_slice=None, fieldname="orog", adjust_orientation=True).get_data() highest_true_sinks_version = find_highest_version( join(analysis_base_dir, "corrections", "true_sinks_fields", "true_sinks_field_version_" "VERSION_NUMBER.nc")) true_sinks = advanced_field_loader(filename=join(analysis_base_dir, "corrections","true_sinks_fields", "true_sinks_field_version_{}.nc".\ format(highest_true_sinks_version)), time_slice=None, fieldname="true_sinks", adjust_orientation=True).get_data() interactive_plots = InteractiveTimeslicePlots( colors, [ "lakev1", "cflow1", "orog1", "catch1", "cflowandlake1", "firstcorrorog", "fourthcorrorog", "lakebasinnums1", "truesinks" ], lsmask_sequence, glacier_mask_sequence, catchment_nums_sequence_latest, catchment_nums_sequence_base, river_flow_sequence_latest, river_flow_sequence_base, river_mouths_sequence_latest, river_mouths_sequence_base, lake_volumes_sequence_latest, lake_volumes_sequence_base, lake_basin_numbers_sequence_latest, lake_basin_numbers_sequence_base, None, #fine_river_flow_sequence_latest, None, #fine_river_flow_sequence_base, orography_sequence_latest, orography_sequence_base, super_fine_orography, first_corrected_orography, second_corrected_orography, third_corrected_orography, fourth_corrected_orography, true_sinks, date_sequence=dates, minflowcutoff=100, use_glacier_mask=False, zoomed=False, zoomed_section_bounds={})
def rivers_from_lake_corr_and_rivers_from_original_corr_comparison( show_animation=True): colors = ColorPalette('default') dates = [0] dates.extend(list(range(15990, 10990, -10))) lsmask_sequence = [] glacier_mask_sequence = [] catchment_nums_one_sequence = [] catchment_nums_two_sequence = [] river_flow_one_sequence = [] river_flow_two_sequence = [] river_mouths_one_sequence = [] river_mouths_two_sequence = [] default_version = 0 analysis_base_dir = ("/Users/thomasriddick/Documents/data/" "lake_analysis_runs/lake_analysis_one_21_Jun_2021/") use_latest_version = True for date in dates: if use_latest_version: lcorrs_version = \ find_highest_version(analysis_base_dir + "rivers/results/" "diag_version_VERSION_NUMBER_date_{}".format(date)) else: lcorrs_version = default_version if use_latest_version: ocorrs_version = \ find_highest_version(analysis_base_dir + "rivers/results/default_orog_corrs/" "diag_version_VERSION_NUMBER_date_{}".format(date)) else: ocorrs_version = default_version lcorrs_results_base_dir = ( analysis_base_dir + "rivers/results/diag_version_{}_date_{}".format( lcorrs_version, date)) ocorrs_results_base_dir = ( analysis_base_dir + "rivers/results/default_orog_corrs/" "diag_version_{}_date_{}".format(ocorrs_version, date)) rdirs = advanced_field_loader(filename=join(ocorrs_results_base_dir, "30min_rdirs.nc"), time_slice=None, field_type="RiverDirections", fieldname="rdirs", adjust_orientation=True) lsmask_data = rdirs.get_lsmask() glacier_mask = advanced_field_loader( filename="/Users/thomasriddick/Documents/" "data/lake_transient_data/run_1/" "10min_glac_{}k.nc".format(date), time_slice=None, fieldname="glac", adjust_orientation=True) catchment_nums_lcorrs = advanced_field_loader(filename=join( lcorrs_results_base_dir, "30min_catchments.nc"), time_slice=None, fieldname="catchments", adjust_orientation=True) catchment_nums_ocorrs = advanced_field_loader(filename=join( ocorrs_results_base_dir, "30min_catchments.nc"), time_slice=None, fieldname="catchments", adjust_orientation=True) river_flow_lcorrs = advanced_field_loader(filename=join( lcorrs_results_base_dir, "30min_flowtocell.nc"), time_slice=None, fieldname="cumulative_flow", adjust_orientation=True) river_flow_ocorrs = advanced_field_loader(filename=join( ocorrs_results_base_dir, "30min_flowtocell.nc"), time_slice=None, fieldname="cumulative_flow", adjust_orientation=True) river_mouths_lcorrs = advanced_field_loader( filename=join(lcorrs_results_base_dir, "30min_flowtorivermouths.nc"), time_slice=None, fieldname="cumulative_flow_to_ocean", adjust_orientation=True) river_mouths_ocorrs = advanced_field_loader( filename=join(ocorrs_results_base_dir, "30min_flowtorivermouths.nc"), time_slice=None, fieldname="cumulative_flow_to_ocean", adjust_orientation=True) lsmask_sequence.append(lsmask_data) glacier_mask_sequence.append(glacier_mask.get_data()) catchment_nums_one_sequence.append(catchment_nums_lcorrs.get_data()) catchment_nums_two_sequence.append(catchment_nums_ocorrs.get_data()) river_flow_one_sequence.append(river_flow_lcorrs.get_data()) river_flow_two_sequence.append(river_flow_ocorrs.get_data()) river_mouths_one_sequence.append(river_mouths_lcorrs.get_data()) river_mouths_two_sequence.append(river_mouths_ocorrs.get_data()) #For zero slice lsmask_zero = [lsmask_sequence[0]] glacier_mask_zero = [glacier_mask_sequence[0]], catchment_nums_one_zero = [catchment_nums_one_sequence[0]] catchment_nums_two_zero = [catchment_nums_two_sequence[0]] river_flow_one_zero = [river_flow_one_sequence[0]] river_flow_two_zero = [river_flow_two_sequence[0]] river_mouths_one_zero = [river_mouths_one_sequence[0]] river_mouths_two_zero = [river_mouths_two_sequence[0]] if show_animation: #generate_catchment_and_cflow_comp_sequence(colors, # lsmask_zero, # glacier_mask_zero, # catchment_nums_one_zero, # catchment_nums_two_zero, # river_flow_one_zero, # river_flow_two_zero, # river_mouths_one_zero, # river_mouths_two_zero, # [0], # minflowcutoff=100, # use_glacier_mask=False, # zoomed=False, # zoomed_section_bounds={}) fig = plt.figure() plt.subplot(111) date_text_sequence = [ fig.text(0.4, 0.075, "{} YBP".format(date)) for date in dates ] ims = generate_catchment_and_cflow_comp_sequence( colors, lsmask_sequence[1:], glacier_mask_sequence[1:], catchment_nums_one_sequence[1:], catchment_nums_two_sequence[1:], river_flow_one_sequence[1:], river_flow_two_sequence[1:], river_mouths_one_sequence[1:], river_mouths_two_sequence[1:], date_text_sequence[1:], minflowcutoff=100, use_glacier_mask=False, zoomed=False, zoomed_section_bounds={}) anim = animation.ArtistAnimation(fig, ims, interval=200, blit=False, repeat_delay=500) plt.show() else: interactive_plots = InteractiveTimeslicePlots( colors, ["comp"], lsmask_sequence[1:], glacier_mask_sequence[1:], catchment_nums_one_sequence[1:], catchment_nums_two_sequence[1:], river_flow_one_sequence[1:], river_flow_two_sequence[1:], river_mouths_one_sequence[1:], river_mouths_two_sequence[1:], None, None, None, None, None, None, None, dates[1:], minflowcutoff=100, use_glacier_mask=False, zoomed=False, zoomed_section_bounds={})
def no_intermediaries_combine_hydrosheds_plus_rdirs_from_corrected_orog_driver( self): """Combines Hydrosheds river direction with those from a corrected orography and possibly removes sinks""" config = self._read_and_validate_config() final_loops_log_filename = path.splitext( self.output_catchments_filename)[0] + '_loops.log' if self.rerun_post_processing_only is not None: final_rdirs = \ iodriver.advanced_field_loader(self.rerun_post_processing_only, field_type="RiverDirections", fieldname=config.get("input_fieldname_options", "river_directions_to_reprocess_fieldname")) else: orography_filename = config.get( "input_options", "ten_minute_corrected_orography_filename") orography = iodriver.advanced_field_loader( orography_filename, field_type='Orography', fieldname=config.get( "input_fieldname_options", "ten_minute_corrected_orography_fieldname")) hydrosheds_rdirs_au_af_sa_10min_filename = \ config.get("input_options","ten_minute_hydrosheds_au_auf_sa_river_directions_filename") hydrosheds_rdirs = \ iodriver.advanced_field_loader(hydrosheds_rdirs_au_af_sa_10min_filename, field_type="RiverDirections", fieldname=config.get("input_fieldname_options", "ten_minute_hydrosheds_au_auf_sa_river_directions_fieldname")) second_intermediate_loops_log_filename = tempfile.mkstemp( suffix=".txt", prefix="loops_log_second_int", dir="")[1] truesinks_dummy = field.makeEmptyField("Generic", np.bool_, grid_type="LatLong10min") truesinks_dummy.set_all(False) if self.true_sinks_filename is not None: use_true_sinks = True truesinks = iodriver.advanced_field_loader( self.true_sinks_filename, field_type="Generic", fieldname=config.get("input_fieldname_options", "input_truesinks_fieldname")) else: use_true_sinks = False if config.getboolean( "general_options", "replace_internal_basins_with_rdirs_with_truesinks"): warnings.warn( "Option replace_internal_basins_with_rdirs_with_truesinks " "ignored when no true sinks file is specified") first_intermediate_rdirs = field.makeEmptyField( "RiverDirections", np.float64, grid_type="LatLong10min") if use_true_sinks: first_intermediate_rdirs_no_sinks = field.makeEmptyField( "RiverDirections", np.float64, grid_type="LatLong10min") ls_mask = iodriver.advanced_field_loader( self.ls_mask_filename, field_type="Generic", fieldname=config.get("input_fieldname_options", "input_landsea_mask_fieldname")) ls_mask.set_data( np.ascontiguousarray(ls_mask.get_data(), dtype=np.int32)) next_cell_lat_index_in_dummy = np.zeros(ls_mask.get_data().shape, dtype=np.int32, order='C') next_cell_lon_index_in_dummy = np.zeros(ls_mask.get_data().shape, dtype=np.int32, order='C') catchment_nums_dummy = np.zeros(ls_mask.get_data().shape, dtype=np.int32, order='C') fill_sinks_wrapper.fill_sinks_cpp_func(orography_array= np.ascontiguousarray(orography.get_data(), #@UndefinedVariable dtype=np.float64), method = 4, use_ls_mask = True, landsea_in = np.ascontiguousarray(ls_mask.get_data(), dtype=np.int32), set_ls_as_no_data_flag = False, use_true_sinks = False, true_sinks_in = np.ascontiguousarray(truesinks_dummy.\ get_data(), dtype=np.int32), next_cell_lat_index_in = next_cell_lat_index_in_dummy, next_cell_lon_index_in = next_cell_lon_index_in_dummy, rdirs_in = first_intermediate_rdirs.get_data() if not use_true_sinks else first_intermediate_rdirs_no_sinks.get_data(), catchment_nums_in = catchment_nums_dummy, prefer_non_diagonal_initial_dirs = False) if use_true_sinks: fill_sinks_wrapper.fill_sinks_cpp_func( orography_array=np.ascontiguousarray( orography.get_data(), #@UndefinedVariable dtype=np.float64), method=4, use_ls_mask=True, landsea_in=np.ascontiguousarray(ls_mask.get_data(), dtype=np.int32), set_ls_as_no_data_flag=False, use_true_sinks=True, true_sinks_in=np.ascontiguousarray(truesinks.get_data(), dtype=np.int32), next_cell_lat_index_in=next_cell_lat_index_in_dummy, next_cell_lon_index_in=next_cell_lon_index_in_dummy, rdirs_in=first_intermediate_rdirs.get_data(), catchment_nums_in=catchment_nums_dummy, prefer_non_diagonal_initial_dirs=False) second_intermediate_rdirs = utilities.splice_rdirs( rdirs_matching_ls_mask=first_intermediate_rdirs, ls_mask=ls_mask, other_rdirs=hydrosheds_rdirs) second_intermediate_catchments = comp_catchs.compute_catchments_cpp( second_intermediate_rdirs.get_data(), second_intermediate_loops_log_filename) second_intermediate_catchments = field.Field(comp_catchs.\ renumber_catchments_by_size(second_intermediate_catchments, second_intermediate_loops_log_filename), grid="LatLong10min") if config.getboolean("general_options", "keep_all_internal_basins"): third_intermediate_rdirs = second_intermediate_rdirs else: third_intermediate_rdirs = \ utilities.remove_endorheic_basins(rdirs=second_intermediate_rdirs, catchments=second_intermediate_catchments, rdirs_without_endorheic_basins= first_intermediate_rdirs_no_sinks if (use_true_sinks and not config.getboolean('general_options', 'replace_internal_basins_with_rdirs_with_truesinks')) else first_intermediate_rdirs, replace_only_catchments=([int(value) for value in config.get('general_options', 'replace_only_catchments').\ split(",")] if config.get('general_options', 'replace_only_catchments') else []), exclude_catchments=([int(value) for value in config.get('general_options', 'exclude_catchments').\ split(",")] if config.get('general_options', 'exclude_catchments') else []) ) third_intermediate_flowtocell = field.\ CumulativeFlow(create_hypothetical_river_paths_map(riv_dirs=third_intermediate_rdirs.get_data(), lsmask=None, use_f2py_func=True, use_f2py_sparse_iterator=True, nlat=1080,nlong=2160), grid='LatLong10min') third_intermediate_rdirs.make_contiguous() third_intermediate_flowtocell.make_contiguous() first_intermediate_rdirs.make_contiguous() if use_true_sinks: first_intermediate_rdirs_no_sinks.make_contiguous() final_rdirs = utilities.replace_streams_downstream_from_loop( third_intermediate_rdirs, cumulative_flow=third_intermediate_flowtocell, other_rdirs=first_intermediate_rdirs if not use_true_sinks else first_intermediate_rdirs_no_sinks) final_catchments = comp_catchs.compute_catchments_cpp( final_rdirs.get_data(), final_loops_log_filename) final_catchments = field.Field(comp_catchs.\ renumber_catchments_by_size(final_catchments, final_loops_log_filename), grid="LatLong10min") final_flowtocell = field.CumulativeFlow( create_hypothetical_river_paths_map( riv_dirs=final_rdirs.get_data(), lsmask=None, use_f2py_func=True, use_f2py_sparse_iterator=True, nlat=1080, nlong=2160), grid='LatLong10min') if self.rerun_post_processing_only is None: iodriver.advanced_field_writer( self.output_rdirs_filename, final_rdirs, fieldname=config.get("output_fieldname_options", "output_river_directions_fieldname")) iodriver.advanced_field_writer(self.output_catchments_filename, final_catchments, fieldname=config.get( "output_fieldname_options", "output_catchments_fieldname")) iodriver.advanced_field_writer(self.output_cumulative_flow_filename, final_flowtocell, fieldname=config.get( "output_fieldname_options", "output_cumulative_flow_fieldname")) if self.rerun_post_processing_only is None: os.remove(second_intermediate_loops_log_filename)
def advanced_sinkless_flow_directions_generator(filename,output_filename,fieldname, output_fieldname,ls_mask_filename=None, truesinks_filename=None, catchment_nums_filename=None, ls_mask_fieldname=None, truesinks_fieldname=None, catchment_fieldname=None): orography = iodriver.advanced_field_loader(filename, field_type='Orography', fieldname=fieldname) grid_dims=orography.get_grid().get_grid_dimensions() grid_num_points = orography.get_grid().get_npoints() threshold_for_low_mem_version_usage = 500000000 if (grid_num_points < threshold_for_low_mem_version_usage): rdirs = np.empty(grid_dims,dtype=np.float64,order='C') if not truesinks_filename: truesinks = Field(np.empty((1,1),dtype=np.int32),grid='HD') use_true_sinks = False; else: use_true_sinks = True; truesinks = iodriver.advanced_field_loader(truesinks_filename, field_type='Generic', fieldname=truesinks_fieldname) if ls_mask_filename is None: use_ls_mask = False ls_mask = field.makeEmptyField(field_type='Generic',dtype=np.int32,grid_type='HD') else: use_ls_mask = True ls_mask = iodriver.advanced_field_loader(ls_mask_filename, field_type='Generic', fieldname=ls_mask_fieldname) catchment_nums = np.zeros(grid_dims,dtype=np.int32,order='C') next_cell_lat_index_in = np.zeros(grid_dims,dtype=np.int32,order='C') next_cell_lon_index_in = np.zeros(grid_dims,dtype=np.int32,order='C') if (grid_num_points >= threshold_for_low_mem_version_usage): rdirs = fill_sinks_wrapper_low_mem.fill_sinks_cpp_func(orography_array=np.ascontiguousarray(orography.get_data(), #@UndefinedVariable dtype=np.float64), method = 4, use_ls_mask = use_ls_mask, landsea_in = np.ascontiguousarray(ls_mask.get_data(), dtype=np.int32), set_ls_as_no_data_flag = False, use_true_sinks = use_true_sinks, true_sinks_in = np.ascontiguousarray(truesinks.get_data(), dtype=np.int32), next_cell_lat_index_in = next_cell_lat_index_in, next_cell_lon_index_in = next_cell_lon_index_in, catchment_nums_in = catchment_nums, prefer_non_diagonal_initial_dirs = False) else: fill_sinks_wrapper.fill_sinks_cpp_func(orography_array=np.ascontiguousarray(orography.get_data(), #@UndefinedVariable dtype=np.float64), method = 4, use_ls_mask = use_ls_mask, landsea_in = np.ascontiguousarray(ls_mask.get_data(), dtype=np.int32), set_ls_as_no_data_flag = False, use_true_sinks = use_true_sinks, true_sinks_in = np.ascontiguousarray(truesinks.get_data(), dtype=np.int32), next_cell_lat_index_in = next_cell_lat_index_in, next_cell_lon_index_in = next_cell_lon_index_in, rdirs_in = rdirs, catchment_nums_in = catchment_nums, prefer_non_diagonal_initial_dirs = False) iodriver.advanced_field_writer(output_filename,Field(rdirs,grid=orography.get_grid()), fieldname=output_fieldname) if catchment_nums_filename: iodriver.advanced_field_writer(catchment_nums_filename, field=Field(catchment_nums,grid=orography.get_grid()), fieldname=catchment_fieldname)
def advanced_loop_breaker_driver(input_coarse_rdirs_filepath,input_coarse_cumulative_flow_filepath, input_coarse_catchments_filepath,input_fine_rdirs_filepath, input_fine_cumulative_flow_filepath,output_updated_coarse_rdirs_filepath, input_coarse_rdirs_fieldname,input_coarse_cumulative_flow_fieldname, input_coarse_catchments_fieldname,input_fine_rdirs_fieldname, input_fine_cumulative_flow_fieldname,output_updated_coarse_rdirs_fieldname, loop_nums_list_filepath, scaling_factor): """Drive the FORTRAN code to remove more complex loops from a field of river directions Arguments: input_coarse_rdirs_filepath: string, full path to input coarse river directions to remove loops from input_coarse_cumulative_flow_filepath: string, full path to the input coarse cumulative flow file input_coarse_catchments_filepath: string, full path to the coarse input catchments file input_fine_rdirs_filepath: string, full path to the fine input river directions the coarse input river directions were upscaled from input_fine_cumulative_flow_filepath: string, full path to the catchments generated from the fine input river directions output_updated_coarse_rdirs_filepath: string, full path to write the coarse river direction with the specified loops removed too loop_nums_list_filepath: string, full path to the file contain the catchment numbers of the loops to remove, one per line, see code below for correct format for the first line Returns: nothing """ input_coarse_rdirs_field = iodriver.advanced_field_loader(input_coarse_rdirs_filepath, field_type='RiverDirections', fieldname=input_coarse_rdirs_fieldname) coarse_cumulative_flow_field =\ iodriver.advanced_field_loader(input_coarse_cumulative_flow_filepath, field_type='CumulativeFlow', fieldname=input_coarse_cumulative_flow_fieldname) coarse_catchments_field =\ iodriver.advanced_field_loader(input_coarse_catchments_filepath, field_type='Generic', fieldname=input_coarse_catchments_fieldname) fine_rdirs_field = iodriver.advanced_field_loader(input_fine_rdirs_filepath, field_type='RiverDirections', fieldname=input_fine_rdirs_fieldname) fine_cumulative_flow_field =\ iodriver.advanced_field_loader(input_fine_cumulative_flow_filepath, field_type='CumulativeFlow', fieldname=input_fine_cumulative_flow_fieldname) loop_nums_list = [] first_line_pattern = re.compile(r"^Loops found in catchments:$") with open(loop_nums_list_filepath,'r') as f: if not first_line_pattern.match(f.readline().strip()): raise RuntimeError("Format of the file with list of catchments to remove loops from" " is invalid") for line in f: loop_nums_list.append(int(line.strip())) print('Removing loops from catchments: ' + ", ".join(str(value) for value in loop_nums_list)) nlat_fine,nlon_fine = fine_rdirs_field.get_grid_dimensions() lat_pts_fine,lon_pts_fine = fine_rdirs_field.get_grid_coordinates() nlat_coarse,nlon_coarse,lat_pts_coarse,lon_pts_coarse = \ coordinate_scaling_utilities.generate_coarse_coords(nlat_fine,nlon_fine, lat_pts_fine,lon_pts_fine, scaling_factor) output_coarse_rdirs_field = run_loop_breaker(input_coarse_rdirs_field,coarse_cumulative_flow_field, coarse_catchments_field,fine_rdirs_field, fine_cumulative_flow_field,loop_nums_list, coarse_grid_type="LatLong",nlat=nlat_coarse, nlong=nlon_coarse) output_coarse_rdirs_field.set_grid_coordinates([lat_pts_coarse,lon_pts_coarse]) iodriver.advanced_field_writer(output_updated_coarse_rdirs_filepath, output_coarse_rdirs_field, fieldname=output_updated_coarse_rdirs_fieldname)
def apply_tweaks(self, initial_corrections_filename, output_corrections_filename, corrections_list_filename, corrections_type): if self.working_directory is None: raise RuntimeError("Applying tweaks requires a working directory to be specified") print("Note - All adjustments must be relative to a 180 degree W to 180 degree E," "90 degree N to 90 degree S grid") original_orography_filename = join(self.orography_path, "ice5g_v1_2_00_0k_10min.nc") intermediary_orography_filename = join(self.working_directory, "pre_{}_tweak_orography.nc".\ format("final" if corrections_type == CorrectionTypes.FINAL else "preliminary")) corrected_orography_filename = join(self.working_directory, "post_{}_tweak_orography.nc".\ format("final" if corrections_type == CorrectionTypes.FINAL else "preliminary")) utilities.advanced_apply_orog_correction_field(original_orography_filename= original_orography_filename, orography_corrections_filename= initial_corrections_filename, corrected_orography_filename= intermediary_orography_filename, original_orography_fieldname="orog", orography_corrections_fieldname="orog", corrected_orography_fieldname="orog") orography_field = iodriver.advanced_field_loader(intermediary_orography_filename, field_type='Orography', fieldname="orog") read_lines_from_file=False corrections_list = [] with open(corrections_list_filename,"r") as corr_list_file: if not self.first_line_pattern.match(corr_list_file.readline().strip()): raise RuntimeError("List of corrections being loaded has" " incorrect header format") for line in corr_list_file: if self.prelim_match.match(line): if corrections_type == CorrectionTypes.PRELIMINARY: read_lines_from_file=True else: read_lines_from_file=False elif self.final_match.match(line): if corrections_type == CorrectionTypes.FINAL: read_lines_from_file=True else: read_lines_from_file=False elif self.comment_line_match.match(line): continue elif read_lines_from_file: corrections_list.append(tuple(int(value) if i < 2 else float(value) \ for i,value in enumerate(line.strip().split(",")))) for lat,lon,height in corrections_list: print("Correcting height of lat={0},lon={1} to {2} m".format(lat,lon,height)) orography_field.get_data()[lat,lon] = height iodriver.advanced_field_writer(corrected_orography_filename, orography_field, fieldname="orog") utilities.advanced_orog_correction_field_generator(original_orography_filename= original_orography_filename, corrected_orography_filename= corrected_orography_filename, orography_corrections_filename= output_corrections_filename, original_orography_fieldname= "orog", corrected_orography_fieldname= "orog", orography_corrections_fieldname= "orog")
def extract_data_to_run_lake_model(self, lake_parameters_filepath, hd_parameters_filepath, section_coords, language="Fortran", scale_factor=3.0, lake_initial_conditions_filepath=None, hd_initial_conditions_filepath=None, write_to_text_file_filename=None): extracted_data = "" if section_coords["min_lat"]%scale_factor != 0.0: section_coords["min_lat"] = int(math.ceil(section_coords["min_lat"]/3.0))*3 if section_coords["min_lon"]%scale_factor != 0.0: section_coords["min_lon"] = int(math.ceil(section_coords["min_lon"]/3.0))*3 if (section_coords["max_lat"]+1)%scale_factor != 0.0: section_coords["max_lat"] = int(math.floor(section_coords["max_lat"]/3.0))*3 - 1 if (section_coords["max_lon"]+1)%scale_factor != 0.0: section_coords["max_lon"] = int(math.floor(section_coords["max_lon"]/3.0))*3 - 1 coarse_section_coords = {"min_lat":int(math.floor(section_coords["min_lat"]/scale_factor)), "max_lat":int(math.floor(section_coords["max_lat"]/scale_factor)), "min_lon":int(math.floor(section_coords["min_lon"]/scale_factor)), "max_lon":int(math.floor(section_coords["max_lon"]/scale_factor))} print("Extracting section:") print("min lat: {}".format(section_coords["min_lat"])) print("max lat: {}".format(section_coords["max_lat"])) print("min lon: {}".format(section_coords["min_lon"])) print("max lon: {}".format(section_coords["max_lon"])) print("Extracting coarse section:") print("min lat: {}".format(coarse_section_coords["min_lat"])) print("max lat: {}".format(coarse_section_coords["max_lat"])) print("min lon: {}".format(coarse_section_coords["min_lon"])) print("max lon: {}".format(coarse_section_coords["max_lon"])) merge_points = iodriver.advanced_field_loader(lake_parameters_filepath, field_type='Generic', fieldname="merge_points") extracted_data += merge_points.extract_data(section_coords,"merge_points", "integer",language) lake_centers = iodriver.advanced_field_loader(lake_parameters_filepath, field_type='Generic', fieldname="lake_centers") extracted_data += lake_centers.extract_data(section_coords,"lake_centers", "integer",language) flood_volume_thresholds = \ iodriver.advanced_field_loader(lake_parameters_filepath, field_type='Generic', fieldname="flood_volume_thresholds") extracted_data += flood_volume_thresholds.\ extract_data(section_coords,"flood_volume_thresholds", "double",language) flood_redirect_lat_index = \ iodriver.advanced_field_loader(lake_parameters_filepath, field_type='Generic', fieldname="flood_redirect_lat_index") extracted_data += flood_redirect_lat_index.\ extract_data(section_coords,"flood_redirect_lat_index","integer",language) flood_redirect_lon_index = \ iodriver.advanced_field_loader(lake_parameters_filepath, field_type='Generic', fieldname="flood_redirect_lon_index") extracted_data += flood_redirect_lon_index.\ extract_data(section_coords,"flood_redirect_lon_index","integer",language) flood_next_cell_lat_index = \ iodriver.advanced_field_loader(lake_parameters_filepath, field_type='Generic', fieldname="flood_next_cell_lat_index") extracted_data += flood_next_cell_lat_index.\ extract_data(section_coords,"flood_next_cell_lat_index","integer",language) flood_next_cell_lon_index = \ iodriver.advanced_field_loader(lake_parameters_filepath, field_type='Generic', fieldname="flood_next_cell_lon_index") extracted_data += flood_next_cell_lon_index.\ extract_data(section_coords,"flood_next_cell_lon_index","integer",language) flood_local_redirect = \ iodriver.advanced_field_loader(lake_parameters_filepath, field_type='Generic', fieldname="flood_local_redirect") extracted_data += flood_local_redirect.\ extract_data(section_coords,"flood_local_redirect","integer",language) flood_force_merge_lat_index = \ iodriver.advanced_field_loader(lake_parameters_filepath, field_type='Generic', fieldname="flood_force_merge_lat_index") extracted_data += flood_force_merge_lat_index.\ extract_data(section_coords,"flood_force_merge_lat_index","integer",language) flood_force_merge_lon_index = \ iodriver.advanced_field_loader(lake_parameters_filepath, field_type='Generic', fieldname="flood_force_merge_lon_index") extracted_data += flood_force_merge_lon_index.\ extract_data(section_coords,"flood_force_merge_lon_index","integer",language) additional_flood_redirect_lat_index = \ iodriver.advanced_field_loader(lake_parameters_filepath, field_type='Generic', fieldname="additional_flood_redirect_lat_index") extracted_data += additional_flood_redirect_lat_index.\ extract_data(section_coords,"additional_flood_redirect_lat_index","integer", language) additional_flood_redirect_lon_index = \ iodriver.advanced_field_loader(lake_parameters_filepath, field_type='Generic', fieldname="additional_flood_redirect_lon_index") extracted_data += additional_flood_redirect_lon_index.\ extract_data(section_coords,"additional_flood_redirect_lon_index","integer", language) additional_flood_local_redirect = \ iodriver.advanced_field_loader(lake_parameters_filepath, field_type='Generic', fieldname="additional_flood_local_redirect") extracted_data += additional_flood_local_redirect.\ extract_data(section_coords,"additional_flood_local_redirect","integer", language) connection_volume_thresholds = \ iodriver.advanced_field_loader(lake_parameters_filepath, field_type='Generic', fieldname="connection_volume_thresholds") extracted_data += connection_volume_thresholds.\ extract_data(section_coords,"connection_volume_thresholds", "double",language) connect_redirect_lat_index = \ iodriver.advanced_field_loader(lake_parameters_filepath, field_type='Generic', fieldname="connect_redirect_lat_index") extracted_data += connect_redirect_lat_index.\ extract_data(section_coords,"connect_redirect_lat_index","integer", language) connect_redirect_lon_index = \ iodriver.advanced_field_loader(lake_parameters_filepath, field_type='Generic', fieldname="connect_redirect_lon_index") extracted_data += connect_redirect_lon_index.\ extract_data(section_coords,"connect_redirect_lon_index","integer", language) connect_next_cell_lat_index = \ iodriver.advanced_field_loader(lake_parameters_filepath, field_type='Generic', fieldname="connect_next_cell_lat_index") extracted_data += connect_next_cell_lat_index.\ extract_data(section_coords,"connect_next_cell_lat_index","integer", language) connect_next_cell_lon_index = \ iodriver.advanced_field_loader(lake_parameters_filepath, field_type='Generic', fieldname="connect_next_cell_lon_index") extracted_data += connect_next_cell_lon_index.\ extract_data(section_coords,"connect_next_cell_lon_index","integer", language) connect_local_redirect = \ iodriver.advanced_field_loader(lake_parameters_filepath, field_type='Generic', fieldname="connect_local_redirect") extracted_data += connect_local_redirect.\ extract_data(section_coords,"connect_local_redirect","integer", language) connect_force_merge_lat_index = \ iodriver.advanced_field_loader(lake_parameters_filepath, field_type='Generic', fieldname="connect_force_merge_lat_index") extracted_data += connect_force_merge_lat_index.\ extract_data(section_coords,"connect_force_merge_lat_index","integer", language) connect_force_merge_lon_index = \ iodriver.advanced_field_loader(lake_parameters_filepath, field_type='Generic', fieldname="connect_force_merge_lon_index") extracted_data += connect_force_merge_lon_index.\ extract_data(section_coords,"connect_force_merge_lon_index","integer", language) additional_connect_redirect_lat_index = \ iodriver.advanced_field_loader(lake_parameters_filepath, field_type='Generic', fieldname="additional_connect_redirect_lat_index") extracted_data += additional_connect_redirect_lat_index.\ extract_data(section_coords,"additional_connect_redirect_lat_index","integer", language) additional_connect_redirect_lon_index = \ iodriver.advanced_field_loader(lake_parameters_filepath, field_type='Generic', fieldname="additional_connect_redirect_lon_index") extracted_data += additional_connect_redirect_lon_index.\ extract_data(section_coords,"additional_connect_redirect_lon_index","integer", language) additional_connect_local_redirect = \ iodriver.advanced_field_loader(lake_parameters_filepath, field_type='Generic', fieldname="additional_connect_local_redirect") extracted_data += additional_connect_local_redirect.\ extract_data(section_coords,"additional_connect_local_redirect","integer", language) baseflow_k = \ iodriver.advanced_field_loader(hd_parameters_filepath, field_type='Generic', fieldname="AGF_K") extracted_data += baseflow_k.\ extract_data(coarse_section_coords,"baseflow_k","double", language) landflow_k = \ iodriver.advanced_field_loader(hd_parameters_filepath, field_type='Generic', fieldname="ALF_K") extracted_data += landflow_k.\ extract_data(coarse_section_coords,"landflow_k","double", language) landflow_n = \ iodriver.advanced_field_loader(hd_parameters_filepath, field_type='Generic', fieldname="ALF_N") extracted_data += landflow_n.\ extract_data(coarse_section_coords,"landflow_n","integer", language) riverflow_k = \ iodriver.advanced_field_loader(hd_parameters_filepath, field_type='Generic', fieldname="ARF_K") extracted_data += riverflow_k.\ extract_data(coarse_section_coords,"riverflow_k","double", language) riverflow_n = \ iodriver.advanced_field_loader(hd_parameters_filepath, field_type='Generic', fieldname="ARF_N") extracted_data += riverflow_n.\ extract_data(coarse_section_coords,"riverflow_n","integer", language) river_directions = \ iodriver.advanced_field_loader(hd_parameters_filepath, field_type='Generic', fieldname="FDIR") extracted_data += river_directions.\ extract_data(coarse_section_coords,"river_directions","integer", language) landsea_mask = \ iodriver.advanced_field_loader(hd_parameters_filepath, field_type='Generic', fieldname="FLAG") extracted_data += landsea_mask.\ extract_data(coarse_section_coords,"landsea_mask","integer", language) if write_to_text_file_filename: print("Writing output to {}".format(write_to_text_file_filename)) with open(write_to_text_file_filename, "w") as output_file: output_file.write(extracted_data) else: print(extracted_data)
def connect_coarse_lake_catchments_driver(coarse_catchments_filepath, lake_parameters_filepath, basin_catchment_numbers_filepath, river_directions_filepath, connected_coarse_catchments_out_filename, coarse_catchments_fieldname, connected_coarse_catchments_out_fieldname, basin_catchment_numbers_fieldname, river_directions_fieldname, cumulative_flow_filepath = None, connected_cumulative_flow_out_filepath=None, cumulative_flow_fieldname = None, connected_cumulative_flow_out_fieldname=None, scale_factor = 3): coarse_catchments = iodriver.advanced_field_loader(coarse_catchments_filepath, field_type='Generic', fieldname=\ coarse_catchments_fieldname) basin_catchment_numbers = iodriver.advanced_field_loader(basin_catchment_numbers_filepath, field_type='Generic', fieldname=\ basin_catchment_numbers_fieldname) lake_centers = iodriver.advanced_field_loader(lake_parameters_filepath, field_type='Generic', fieldname= "lake_centers") flood_next_cell_index_lat = iodriver.advanced_field_loader(lake_parameters_filepath, field_type='Generic', fieldname= "flood_next_cell_lat_index") flood_next_cell_index_lon = iodriver.advanced_field_loader(lake_parameters_filepath, field_type='Generic', fieldname= "flood_next_cell_lon_index") flood_redirect_lat = iodriver.advanced_field_loader(lake_parameters_filepath, field_type='Generic', fieldname= "flood_redirect_lat_index") flood_redirect_lon = iodriver.advanced_field_loader(lake_parameters_filepath, field_type='Generic', fieldname= "flood_redirect_lon_index") additional_flood_redirect_lat = iodriver.advanced_field_loader(lake_parameters_filepath, field_type='Generic', fieldname= "additional_flood_redirect_lat_index") additional_flood_redirect_lon = iodriver.advanced_field_loader(lake_parameters_filepath, field_type='Generic', fieldname= "additional_flood_redirect_lon_index") local_redirect = iodriver.advanced_field_loader(lake_parameters_filepath, field_type='Generic', fieldname=\ "flood_local_redirect") additional_local_redirect = iodriver.advanced_field_loader(lake_parameters_filepath, field_type='Generic', fieldname=\ "additional_flood_local_redirect") merge_types = iodriver.advanced_field_loader(lake_parameters_filepath, field_type='Generic', fieldname=\ "merge_points") river_directions = iodriver.advanced_field_loader(river_directions_filepath, field_type='Generic', fieldname=\ river_directions_fieldname) if cumulative_flow_filepath is not None: cumulative_flow = iodriver.advanced_field_loader(cumulative_flow_filepath, field_type='Generic', fieldname=\ cumulative_flow_fieldname) catchments, corrected_cumulative_flow = \ connect_coarse_lake_catchments(coarse_catchments,lake_centers,basin_catchment_numbers, flood_next_cell_index_lat,flood_next_cell_index_lon, flood_redirect_lat,flood_redirect_lon, additional_flood_redirect_lat, additional_flood_redirect_lon, local_redirect,additional_local_redirect, merge_types,river_directions,scale_factor, cumulative_flow=(cumulative_flow if cumulative_flow_filepath is not None else None), correct_cumulative_flow=(True if cumulative_flow_filepath is not None else False)) iodriver.advanced_field_writer(connected_coarse_catchments_out_filename, field=catchments, fieldname=connected_coarse_catchments_out_fieldname) if cumulative_flow_filepath is not None: iodriver.advanced_field_writer(connected_cumulative_flow_out_filepath, field=corrected_cumulative_flow, fieldname=connected_cumulative_flow_out_fieldname)
def advanced_drive_orography_upscaling(input_fine_orography_file, output_coarse_orography_file, input_orography_fieldname, output_coarse_orography_fieldname, landsea_file=None, true_sinks_file=None, landsea_fieldname=None, true_sinks_fieldname=None, upscaling_parameters_filename=None, scaling_factor=3): if upscaling_parameters_filename: config = read_and_validate_config(upscaling_parameters_filename) method = config.getint("orography_upscaling_parameters", "method") add_slope_in = config.getboolean("orography_upscaling_parameters", "add_slope_in") epsilon_in = config.getfloat("orography_upscaling_parameters", "epsilon_in") tarasov_separation_threshold_for_returning_to_same_edge_in =\ config.getint("orography_upscaling_parameters", "tarasov_separation_threshold_for_returning_to_same_edge_in") tarasov_min_path_length_in = config.getfloat( "orography_upscaling_parameters", "tarasov_min_path_length_in") tarasov_include_corners_in_same_edge_criteria_in = \ config.getboolean("orography_upscaling_parameters", "tarasov_include_corners_in_same_edge_criteria_in") else: #use defaults method = 1 add_slope_in = False epsilon_in = 0.1 tarasov_separation_threshold_for_returning_to_same_edge_in = 5 tarasov_min_path_length_in = 2.0 tarasov_include_corners_in_same_edge_criteria_in = False input_orography = iodriver.advanced_field_loader( input_fine_orography_file, field_type='Orography', fieldname=input_orography_fieldname) nlat_fine, nlon_fine = input_orography.get_grid_dimensions() lat_pts_fine, lon_pts_fine = input_orography.get_grid_coordinates() nlat_coarse,nlon_coarse,lat_pts_coarse,lon_pts_coarse = \ coordinate_scaling_utilities.generate_coarse_coords(nlat_fine,nlon_fine, lat_pts_fine,lon_pts_fine, scaling_factor) output_orography = field.makeEmptyField(field_type='Orography', dtype=np.float64, grid_type='LatLong', nlat=nlat_coarse, nlong=nlon_coarse) output_orography.set_grid_coordinates([lat_pts_coarse, lon_pts_coarse]) if landsea_file: landsea_mask = iodriver.advanced_field_loader( landsea_file, field_type='Generic', fieldname=landsea_fieldname) else: landsea_mask = field.makeEmptyField(field_type='Generic', dtype=np.int32, grid_type='LatLong', nlat=nlat_fine, nlong=nlon_fine) if true_sinks_file: true_sinks = iodriver.advanced_field_loader( true_sinks_file, field_type='Generic', fieldname=true_sinks_fieldname) else: true_sinks = field.makeEmptyField(field_type='Generic', dtype=np.int32, grid_type='LatLong', nlat=nlat_fine, nlong=nlon_fine) if not input_orography.get_data().dtype is np.float64(): input_orography.change_dtype(np.float64) #Make sure old data type array is flushed out of memory immediately gc.collect() upscale_orography_wrapper.upscale_orography(orography_in=input_orography.get_data(), orography_out=output_orography.get_data(), method=method,landsea_in=landsea_mask.get_data(), true_sinks_in=true_sinks.get_data(), add_slope_in=add_slope_in, epsilon_in=epsilon_in, tarasov_separation_threshold_for_returning_to_same_edge_in=\ tarasov_separation_threshold_for_returning_to_same_edge_in, tarasov_min_path_length_in=tarasov_min_path_length_in, tarasov_include_corners_in_same_edge_criteria_in=\ tarasov_include_corners_in_same_edge_criteria_in) iodriver.advanced_field_writer(output_coarse_orography_file, output_orography)