def test_scale_per_time_interval_apply_night_time_correction(): # Get the path to the Envira files file_paths = abs_path('data/H_500_00_doc29') # Create a dict for the meteotoeslag grids meteotoeslag = {} for unit in ['Lden', 'Lnight']: # Set the pattern pattern = r'[\w\d\s]+{}[\w\d\s]+\.dat'.format(unit) # Create a meteotoeslag grid object from the data file meteotoeslag[unit] = Grid.read_enviras(file_paths, pattern).meteotoeslag_grid_from_method('hybride') # Scale the meteotoeslag 2.1% meteotoeslag_lden_scaled_with = meteotoeslag['Lden'].copy().scale_per_time_interval(meteotoeslag['Lnight'], scale_de=1.021) # Scale the meteotoeslag 2.1% without lnight time correction meteotoeslag_lden_scaled_without = meteotoeslag['Lden'].copy().scale_per_time_interval(meteotoeslag['Lnight'], scale_de=1.021, apply_lnight_time_correction=False) # Without lnight time correction, the scaled data Lden should be lower assert (meteotoeslag_lden_scaled_with.data > meteotoeslag_lden_scaled_without.data).all()
def test_hg_multigrid(): # Get the path to the Envira files file_paths = abs_path('data/MINIMER2015') # Set the pattern pattern = r'[\w\d\s]+\.dat' # Create a grid object from the data file grid = Grid.read_enviras(file_paths, pattern) # Calculate the Hoeveelheid Geluid grid.hg()
def test_meteotoeslag_from_years_doubles(): # Get the path to the Envira files file_paths = abs_path('data/MINIMER2015') # Set the pattern pattern = r'[\w\d\s]+\.dat' # Create a grid object from the data file grid = Grid.read_enviras(file_paths, pattern) # Calculate the meteotoeslag grid.meteotoeslag_from_years(np.ones((32,), dtype=int) * 1981)
def test_meteotoeslag_from_years(): # Get the path to the Envira files file_paths = abs_path('data/MINIMER2015') # Set the pattern pattern = r'[\w\d\s]+\.dat' # Create a grid object from the data file grid = Grid.read_enviras(file_paths, pattern) # Calculate the meteotoeslag grid.meteotoeslag_from_years([1981, 1984, 1993, 1994, 1996, 2000, 2002, 2010])
def test_statistics_multigrid(): # Get the path to the Envira files file_paths = abs_path('data/MINIMER2015') # Set the pattern pattern = r'[\w\d\s]+\.dat' # Create a grid object from the data file grid = Grid.read_enviras(file_paths, pattern) stats = Grid.statistics(grid) assert isinstance(stats, dict)
def test_read_enviras(): # Get the path to the Envira files file_paths = abs_path('data/') # Set the pattern pattern = r'GP2018 - Lnight y201[67].dat' # Create a grid object from the data file grid = Grid.read_enviras(file_paths, pattern) # Check if the data is stored correctly assert isinstance(grid.data, list) and len(grid.data) == 2 assert isinstance(grid.info, list) and len(grid.data) == 2
def test_statistics_nan(): # Get the path to the Envira files file_paths = abs_path('data/MINIMER2015') # Set the pattern pattern = r'[\w\d\s]+\.dat' # Create a grid object from the data file grid = Grid.read_enviras(file_paths, pattern) # Modify the data grid.data = np.nan # Check if the statistics can be calculated Grid.statistics(grid)
def test_scale_per_time_interval_incompatible_grids(): # Get the path to the Envira files file_paths = abs_path('data/H_500_00_doc29') # Create a dict for the meteotoeslag grids meteotoeslag = {} for unit in ['Lden', 'Lnight']: # Set the pattern pattern = r'[\w\d\s]+{}[\w\d\s]+\.dat'.format(unit) # Create a meteotoeslag grid object from the data file meteotoeslag[unit] = Grid.read_enviras(file_paths, pattern).meteotoeslag_grid_from_method('hybride') # Scale the meteotoeslag without actually scaling meteotoeslag['Lden'].copy().scale_per_time_interval(meteotoeslag['Lnight'].refine(.5))
def test_meteotoeslag_from_method(): # Get the path to the Envira files file_paths = abs_path('data/MINIMER2015') # Set the pattern pattern = r'[\w\d\s]+\.dat' # Create a grid object from the data file grid = Grid.read_enviras(file_paths, pattern) # Calculate the meteotoeslag meteotoeslag, meteo_years = grid.meteotoeslag_from_method('hybride') # Check if the data is processed correctly assert meteo_years.shape == (32,) assert grid.data[0].shape == meteotoeslag.shape assert np.all(grid.data[0] <= meteotoeslag)
def example_2(): # Collect a Grid grid = Grid.read_enviras('../tests/data/MER2019 H_500_doc29_VVR', r'[\w\d\s]+{}[\w\d\s]+\.dat'.format('Lden')) # Create a figure plot = GridPlot(grid) # Add the 58dB contour plot.add_contours(48, default['kleuren']['schipholblauw'], default['kleuren']['middagblauw']) # Save the figure plot.save('figures/plot_grid_example_2.pdf') # And show the plot plot.show()
def test_scale_per_time_interval_decrease(): # Get the path to the Envira files file_paths = abs_path('data/H_500_00_doc29') # Create a dict for the meteotoeslag grids meteotoeslag = {} for unit in ['Lden', 'Lnight']: # Set the pattern pattern = r'[\w\d\s]+{}[\w\d\s]+\.dat'.format(unit) # Create a meteotoeslag grid object from the data file meteotoeslag[unit] = Grid.read_enviras(file_paths, pattern).meteotoeslag_grid_from_method('hybride') # Scale the meteotoeslag with a scaling factor below 1 meteotoeslag_lden_scaled = meteotoeslag['Lden'].copy().scale_per_time_interval(meteotoeslag['Lnight'], scale_de=.9) # The Lden data should decrease due to the decrease of Lde assert (meteotoeslag_lden_scaled.data < meteotoeslag['Lden'].data).all()
def test_scale_per_time_interval_no_scale(): # Get the path to the Envira files file_paths = abs_path('data/H_500_00_doc29') # Create a dict for the meteotoeslag grids meteotoeslag = {} for unit in ['Lden', 'Lnight']: # Set the pattern pattern = r'[\w\d\s]+{}[\w\d\s]+\.dat'.format(unit) # Create a meteotoeslag grid object from the data file meteotoeslag[unit] = Grid.read_enviras(file_paths, pattern).meteotoeslag_grid_from_method('hybride') # Scale the meteotoeslag without actually scaling meteotoeslag_lden_scaled = meteotoeslag['Lden'].copy().scale_per_time_interval(meteotoeslag['Lnight']) # Check if the values are still the same np.testing.assert_almost_equal(meteotoeslag['Lden'].data, meteotoeslag_lden_scaled.data, 12)
def test_read_enviras_inconsistent_data(): # Get the path to the Envira files file_paths = abs_path('data/') # Set the pattern pattern = r'GP2018 - Lnight y2016r?.dat' try: # Create a grid object from the data file grid = Grid.read_enviras(file_paths, pattern) # If the test reaches this point, the method is not working properly assert False except ValueError: # Get the envira files file_paths = [f for f in os.listdir(file_paths) if re.search(pattern, f)] # Check if the file names are correct assert file_paths == ['GP2018 - Lnight y2016.dat', 'GP2018 - Lnight y2016r.dat']
def test_scale_multigrid(): # Get the path to the Envira files file_paths = abs_path('data/MINIMER2015') # Set the pattern pattern = r'[\w\d\s]+\.dat' # Create a grid object from the data file grid = Grid.read_enviras(file_paths, pattern) # Calculate the meteotoeslag meteotoeslag, meteo_years = grid.meteotoeslag_from_method('hybride') # Calculate the meteotoeslag grid.scale(2.) # Calculate the meteotoeslag scaled_meteotoeslag, scaled_meteo_years = grid.meteotoeslag_from_method('hybride') assert np.all(meteo_years == scaled_meteo_years) np.testing.assert_equal(scaled_meteotoeslag, meteotoeslag + 10 * np.log10(2))
def test_gwc(): # Get the verification data gwc_verification = pd.read_csv( abs_path('data/H_500_00_doc29_gwc_verification.csv'), index_col=[0]) gwc_description_verification = pd.read_csv( abs_path('data/H_500_00_doc29_gwc_description_verification.csv'), index_col=[0]) # Set the dose-effect relationship arguments de_kwargs = dict(de='ges2002', max_noise_level=65) # Get the path to the WBS file file_path = abs_path('../data/wbs2005.h5') # Create a wbs object from the data file wbs = WBS.read_file(file_path) # Get the path to the Envira files file_paths = abs_path('data/H_500_00_doc29') # Create a dict for the grids grids = {} # Create a dict for the meteotoeslag grids meteotoeslag = {} for unit in ['Lden', 'Lnight']: # Set the pattern pattern = r'[\w\d\s]+{}[\w\d\s]+\.dat'.format(unit) # Create a grid object from the data file grids[unit] = Grid.read_enviras(file_paths, pattern) # Calculate the meteotoeslag meteotoeslag[unit] = grids[unit].meteotoeslag_grid_from_method( 'hybride') # Calculate the gelijkwaardigheidscriteria (GWC) gwc = wbs.gwc(grids['Lden'], grids['Lnight'], **de_kwargs) # Calculate the gelijkwaardigheidscriteria (GWC) meteo_gwc = wbs.gwc(lden_grid=meteotoeslag['Lden'], lnight_grid=meteotoeslag['Lnight']) # Calculate the GWC statistics gwc_statistics = gwc.agg(['mean', 'min', 'max']) pd.testing.assert_series_equal( gwc_statistics['w58den'].round(-2), gwc_description_verification.loc['Won 58 dB(A) Lden', gwc_statistics.index], check_names=False) pd.testing.assert_series_equal( gwc_statistics['eh48den'].round(-2), gwc_description_verification.loc['EGH 48 dB(A) Lden', gwc_statistics.index], check_names=False) pd.testing.assert_series_equal( gwc_statistics['w48n'].round(-2), gwc_description_verification.loc['Won 48 dB(A) Lnight', gwc_statistics.index], check_names=False) pd.testing.assert_series_equal( gwc_statistics['sv40n'].round(-2), gwc_description_verification.loc['SV 40 dB(A) Lnight', gwc_statistics.index], check_names=False) pd.testing.assert_series_equal(gwc['w58den'].sort_index(), gwc_verification['w58den'], check_names=False) pd.testing.assert_series_equal(gwc['eh48den'].sort_index(), gwc_verification['egh48den'], check_names=False) # error pd.testing.assert_series_equal(gwc['w48n'].sort_index(), gwc_verification['w48n'], check_names=False) pd.testing.assert_series_equal(gwc['sv40n'].sort_index(), gwc_verification['sv40n'], check_names=False) # error
def test_grid_relative_den_norm_performance(): """ Integration test based on the example provided to Robert Koster by Ed Gordijn on April 1st 2019. This example shows an efficient way to determine the maximum volume of traffic that can fit within the GWC bounds. It uses a routine to search for zeros, which is the case when there is no room for additional traffic. """ # ------------------------------------------------------------------------ # Directories and paths # ------------------------------------------------------------------------ forecast_directory = abs_path('data/MER2019 H_500_doc29_VVR') wbs_file = abs_path('../data/wbs2018.h5') # ------------------------------------------------------------------------ # Read Grid # ------------------------------------------------------------------------ # Create a grid object from the data file den_grids = Grid.read_enviras(forecast_directory, r'[\w\d\s]+{}[\w\d\s]+\.dat'.format('Lden')) # Scale the grid with factor 1.0319 den_grids.scale(1.0319) # Get the Lden data den_data = np.array(den_grids.data) # Get the Lden statistics den_statistics = den_grids.statistics() # Calculate the meteotoeslag den_meteotoeslag = den_grids.meteotoeslag_grid_from_method('hybride') # Create a grid object from the Lnight data file night_grids = Grid.read_enviras( forecast_directory, r'[\w\d\s]+{}[\w\d\s]+\.dat'.format('Lnight')) # Scale the grid with factor 1.0121 night_grids.scale(1.0121) # Get the Lnight data night_data = np.array(night_grids.data) # Get the Lnight statistics night_statistics = night_grids.statistics() # Calculate the meteotoeslag night_meteotoeslag = night_grids.meteotoeslag_grid_from_method('hybride') # ------------------------------------------------------------------------ # Get the validation data of the grid # ------------------------------------------------------------------------ dat_den_500k_dat = np.fromfile( abs_path( 'data/validation_schaal_relatief_norm_etmaal/dat_den_500k_dat.npy') ) dat_den_500k_mm = np.fromfile( abs_path( 'data/validation_schaal_relatief_norm_etmaal/dat_den_500k_mm.npy')) dat_den_500k_dhi = np.fromfile( abs_path( 'data/validation_schaal_relatief_norm_etmaal/dat_den_500k_dhi.npy') ) dat_den_500k_dlo = np.fromfile( abs_path( 'data/validation_schaal_relatief_norm_etmaal/dat_den_500k_dlo.npy') ) dat_den_500k_mean = np.fromfile( abs_path( 'data/validation_schaal_relatief_norm_etmaal/dat_den_500k_mean.npy' )) dat_den_500k_std = np.fromfile( abs_path( 'data/validation_schaal_relatief_norm_etmaal/dat_den_500k_std.npy') ) dat_night_500k_dat = np.fromfile( abs_path( 'data/validation_schaal_relatief_norm_etmaal/dat_night_500k_dat.npy' )) dat_night_500k_mm = np.fromfile( abs_path( 'data/validation_schaal_relatief_norm_etmaal/dat_night_500k_mm.npy' )) dat_night_500k_dhi = np.fromfile( abs_path( 'data/validation_schaal_relatief_norm_etmaal/dat_night_500k_dhi.npy' )) dat_night_500k_dlo = np.fromfile( abs_path( 'data/validation_schaal_relatief_norm_etmaal/dat_night_500k_dlo.npy' )) dat_night_500k_mean = np.fromfile( abs_path( 'data/validation_schaal_relatief_norm_etmaal/dat_night_500k_mean.npy' )) dat_night_500k_std = np.fromfile( abs_path( 'data/validation_schaal_relatief_norm_etmaal/dat_night_500k_std.npy' )) # ------------------------------------------------------------------------ # Validate the grid data # ------------------------------------------------------------------------ np.testing.assert_almost_equal(dat_den_500k_dat.reshape(den_data.shape), den_data) np.testing.assert_almost_equal( dat_den_500k_mm.reshape(den_meteotoeslag.data.shape), den_meteotoeslag.data) np.testing.assert_almost_equal( dat_den_500k_dhi.reshape(den_statistics['dhi'].data.shape), den_statistics['dhi'].data) np.testing.assert_almost_equal( dat_den_500k_dlo.reshape(den_statistics['dlo'].data.shape), den_statistics['dlo'].data) np.testing.assert_almost_equal( dat_den_500k_mean.reshape(den_statistics['mean'].data.shape), den_statistics['mean'].data) np.testing.assert_almost_equal( dat_den_500k_std.reshape(den_statistics['std'].data.shape), den_statistics['std'].data) np.testing.assert_almost_equal( dat_night_500k_dat.reshape(night_data.shape), night_data) np.testing.assert_almost_equal( dat_night_500k_mm.reshape(night_meteotoeslag.data.shape), night_meteotoeslag.data) np.testing.assert_almost_equal( dat_night_500k_dhi.reshape(night_statistics['dhi'].data.shape), night_statistics['dhi'].data) np.testing.assert_almost_equal( dat_night_500k_dlo.reshape(night_statistics['dlo'].data.shape), night_statistics['dlo'].data) np.testing.assert_almost_equal( dat_night_500k_mean.reshape(night_statistics['mean'].data.shape), night_statistics['mean'].data) np.testing.assert_almost_equal( dat_night_500k_std.reshape(night_statistics['std'].data.shape), night_statistics['std'].data) # ------------------------------------------------------------------------ # Read the WBS file # ------------------------------------------------------------------------ # Create a wbs object from the data file wbs = WBS.read_file(wbs_file) # ------------------------------------------------------------------------ # Interpolate the noise levels for the WBS # ------------------------------------------------------------------------ interp_den = den_meteotoeslag.interpolation_function() interp_night = night_meteotoeslag.interpolation_function() wbs.add_noise_from_grid(den_meteotoeslag) wbs.add_noise_from_grid(night_meteotoeslag) # ------------------------------------------------------------------------ # Get the validation data of the interpolated grid data # ------------------------------------------------------------------------ wbs_den = np.fromfile( abs_path('data/validation_schaal_relatief_norm_etmaal/wbs_den.npy')) interp_den_coeffs = np.fromfile( abs_path( 'data/validation_schaal_relatief_norm_etmaal/interp_den_coeffs.npy' )) interp_den_knots_0 = np.fromfile( abs_path( 'data/validation_schaal_relatief_norm_etmaal/interp_den_knots_0.npy' )) interp_den_knots_1 = np.fromfile( abs_path( 'data/validation_schaal_relatief_norm_etmaal/interp_den_knots_1.npy' )) interp_den_residual = np.fromfile( abs_path( 'data/validation_schaal_relatief_norm_etmaal/interp_den_residual.npy' )) wbs_night = np.fromfile( abs_path('data/validation_schaal_relatief_norm_etmaal/wbs_night.npy')) interp_night_coeffs = np.fromfile( abs_path( 'data/validation_schaal_relatief_norm_etmaal/interp_night_coeffs.npy' )) interp_night_knots_0 = np.fromfile( abs_path( 'data/validation_schaal_relatief_norm_etmaal/interp_night_knots_0.npy' )) interp_night_knots_1 = np.fromfile( abs_path( 'data/validation_schaal_relatief_norm_etmaal/interp_night_knots_1.npy' )) interp_night_residual = np.fromfile( abs_path( 'data/validation_schaal_relatief_norm_etmaal/interp_night_residual.npy' )) # ------------------------------------------------------------------------ # Validate the interpolated grid data # ------------------------------------------------------------------------ np.testing.assert_almost_equal(interp_den.get_coeffs(), interp_den_coeffs) np.testing.assert_almost_equal(interp_den.get_knots()[0], interp_den_knots_0) np.testing.assert_almost_equal(interp_den.get_knots()[1], interp_den_knots_1) np.testing.assert_almost_equal(interp_den.get_residual(), interp_den_residual[0]) np.testing.assert_almost_equal(wbs.data['Lden'].values, wbs_den) np.testing.assert_almost_equal(interp_night.get_coeffs(), interp_night_coeffs) np.testing.assert_almost_equal(interp_night.get_knots()[0], interp_night_knots_0) np.testing.assert_almost_equal(interp_night.get_knots()[1], interp_night_knots_1) np.testing.assert_almost_equal(interp_night.get_residual(), interp_night_residual[0]) np.testing.assert_almost_equal(wbs.data['Lnight'].values, wbs_night) # ------------------------------------------------------------------------ # Get the optimal scaling factor that fits within the GWC # ------------------------------------------------------------------------ norm = gwc['doc29_2018'].copy() # Run the function a single time for testing a = relative_den_norm_performance(1, norm, wbs, den_meteotoeslag) b = relative_den_norm_performance(3, norm, wbs, den_meteotoeslag) # Get the optimal scale factor to apply scale_1 = brentq(relative_den_norm_performance, 1.0, 3.0, rtol=0.0001, args=(norm, wbs, den_meteotoeslag)) # Test if the result is equal to the validation case assert scale_1 == 1.2713069520185394 # Apply the new scale factor to the meteotoeslag grids den_meteotoeslag.scale(scale_1) night_meteotoeslag.scale(scale_1) # Add the grids to the WBS wbs.add_noise_from_grid(den_meteotoeslag).add_noise_from_grid( night_meteotoeslag) # Count the number of wden = wbs.count_homes_above(58, 'Lden') egh = wbs.count_annoyed_people(48) wn = wbs.count_homes_above(48, 'Lnight') sv = wbs.count_sleep_disturbed_people(40) # Validate the end results assert wden == 12001 np.testing.assert_almost_equal(egh, 175405.58276207035, decimal=0) assert wn == 11188 np.testing.assert_almost_equal(sv, 36117.424022735766, decimal=0)
def test_grid_scale_per_time_interval(): # ------------------------------------------------------------------------ # Directories and paths # ------------------------------------------------------------------------ forecast_directory = abs_path('data/MER2019 H_500_doc29_VVR') wbs_file = abs_path('../data/wbs2018.h5') # ------------------------------------------------------------------------ # Read Grid # ------------------------------------------------------------------------ # Create a grid object from the data file den_grids = Grid.read_enviras(forecast_directory, r'[\w\d\s]+{}[\w\d\s]+\.dat'.format('Lden')) # Calculate the meteotoeslag den_meteotoeslag = den_grids.meteotoeslag_grid_from_method('hybride') # Create a grid object from the Lnight data file night_grids = Grid.read_enviras( forecast_directory, r'[\w\d\s]+{}[\w\d\s]+\.dat'.format('Lnight')) # Calculate the meteotoeslag night_meteotoeslag = night_grids.meteotoeslag_grid_from_method('hybride') # ------------------------------------------------------------------------ # Read the WBS file # ------------------------------------------------------------------------ # Create a wbs object from the data file wbs = WBS.read_file(wbs_file) # ------------------------------------------------------------------------ # Interpolate the noise levels for the WBS # ------------------------------------------------------------------------ wbs.add_noise_from_grid(den_meteotoeslag) wbs.add_noise_from_grid(night_meteotoeslag) # ------------------------------------------------------------------------ # Get the optimal scaling factor that fits within the GWC # ------------------------------------------------------------------------ norm = gwc['doc29_2018'].copy() # Run the function a single time for testing a = relative_den_norm_performance(1, norm, wbs, den_meteotoeslag) b = relative_den_norm_performance(3, norm, wbs, den_meteotoeslag) c = relative_den_norm_performance(3, norm, wbs, den_meteotoeslag, night_grid=night_meteotoeslag, scale_de=1, apply_lnight_time_correction=False) # Run the brentq function scale = brentq(relative_den_norm_performance, 1.0, 3.0, rtol=0.0001, args=(norm, wbs, den_meteotoeslag, night_meteotoeslag, 1, None, False)) assert scale < 3 assert scale > 1