def test_water_flux_unit_conversion_swe(self): ''' Tests variable values in returned dataset ''' self.ten_year_dataset.variable = 'swe' self.ten_year_dataset.units = 'm' values = self.ten_year_dataset.values + 999 dp.variable_unit_conversion(self.ten_year_dataset) np.testing.assert_array_equal(self.ten_year_dataset.values, values)
def test_returned_variable_unit_swe(self): ''' Tests returned dataset unit if original dataset unit is swe ''' self.ten_year_dataset.variable = 'swe' self.ten_year_dataset.units = 'm' dp.variable_unit_conversion(self.ten_year_dataset) self.assertEqual(self.ten_year_dataset.variable, 'swe') self.assertEqual(self.ten_year_dataset.units, 'km')
def test_water_flux_unit_conversion_pr(self): ''' Tests variable values in returned dataset ''' self.ten_year_dataset.variable = 'pr' self.ten_year_dataset.units = 'kg m-2 s-1' values = self.ten_year_dataset.values + 86399 dp.variable_unit_conversion(self.ten_year_dataset) np.testing.assert_array_equal(self.ten_year_dataset.values, values)
def test_returned_variable_unit_pr(self): ''' Tests returned dataset unit if original dataset unit is kgm^-2s^-1 ''' self.ten_year_dataset.variable = 'pr' self.ten_year_dataset.units = 'kg m-2 s-1' dp.variable_unit_conversion(self.ten_year_dataset) self.assertEqual(self.ten_year_dataset.variable, 'pr') self.assertEqual(self.ten_year_dataset.units, 'mm/day')
def test_temp_unit_conversion(self): ''' Tests returned dataset temp values ''' self.ten_year_dataset.values = np.ones([ len(self.ten_year_dataset.times), len(self.ten_year_dataset.lats), len(self.ten_year_dataset.lons)]) values = self.ten_year_dataset.values + 273.15 dp.variable_unit_conversion(self.ten_year_dataset) np.testing.assert_array_equal(self.ten_year_dataset.values, values)
def test_returned_variable_unit_kelvin(self): ''' Tests returned dataset unit if original dataset unit is kelvin ''' self.ten_year_dataset.units = 'K' another_dataset = dp.variable_unit_conversion(self.ten_year_dataset) self.assertEqual(another_dataset.units, self.ten_year_dataset.units)
def test_returned_variable_unit_celsius(self): ''' Tests returned dataset unit if original dataset unit is celcius ''' dp.variable_unit_conversion(self.ten_year_dataset) self.assertEqual(self.ten_year_dataset.units, 'K')
print('Regridding datasets: {}'.format(config['regrid'])) if not config['regrid']['regrid_on_reference']: reference_dataset = dsp.spatial_regrid(reference_dataset, new_lat, new_lon) print('Reference dataset has been regridded') for i, dataset in enumerate(target_datasets): target_datasets[i] = dsp.spatial_regrid(dataset, new_lat, new_lon, boundary_check=boundary_check) print('{} has been regridded'.format(target_names[i])) print('Propagating missing data information') datasets = dsp.mask_missing_data([reference_dataset]+target_datasets) reference_dataset = datasets[0] target_datasets = datasets[1:] """ Step 4: Checking and converting variable units """ print('Checking and converting variable units') reference_dataset = dsp.variable_unit_conversion(reference_dataset) for i, dataset in enumerate(target_datasets): target_datasets[i] = dsp.variable_unit_conversion(dataset) print('Generating multi-model ensemble') if len(target_datasets) >= 2.: target_datasets.append(dsp.ensemble(target_datasets)) target_names.append('ENS') """ Step 5: Generate subregion average and standard deviation """ if config['use_subregions']: # sort the subregion by region names and make a list subregions= sorted(config['subregions'].items(),key=operator.itemgetter(0)) # number of subregions nsubregion = len(subregions)
""" Step 4: Spatial regriding of the reference datasets """ print 'Regridding datasets: ', config['regrid'] if not config['regrid']['regrid_on_reference']: ref_dataset = dsp.spatial_regrid(ref_dataset, new_lat, new_lon) print 'Reference dataset has been regridded' for idata,dataset in enumerate(model_datasets): model_datasets[idata] = dsp.spatial_regrid(dataset, new_lat, new_lon, boundary_check = boundary_check_model) print model_names[idata]+' has been regridded' print 'Propagating missing data information' ref_dataset = dsp.mask_missing_data([ref_dataset]+model_datasets)[0] model_datasets = dsp.mask_missing_data([ref_dataset]+model_datasets)[1:] """ Step 5: Checking and converting variable units """ print 'Checking and converting variable units' ref_dataset = dsp.variable_unit_conversion(ref_dataset) for idata,dataset in enumerate(model_datasets): model_datasets[idata] = dsp.variable_unit_conversion(dataset) print 'Generating multi-model ensemble' if len(model_datasets) >= 2.: model_datasets.append(dsp.ensemble(model_datasets)) model_names.append('ENS') """ Step 6: Generate subregion average and standard deviation """ if config['use_subregions']: # sort the subregion by region names and make a list subregions= sorted(config['subregions'].items(),key=operator.itemgetter(0)) # number of subregions
print 'Regridding datasets: ', config['regrid'] if not config['regrid']['regrid_on_reference']: obs_dataset = dsp.spatial_regrid(obs_dataset, new_lat, new_lon) print 'Reference dataset has been regridded' for i, dataset in enumerate(model_datasets): model_datasets[i] = dsp.spatial_regrid(dataset, new_lat, new_lon, boundary_check=boundary_check) print model_names[i] + ' has been regridded' print 'Propagating missing data information' obs_dataset = dsp.mask_missing_data([obs_dataset] + model_datasets)[0] model_datasets = dsp.mask_missing_data([obs_dataset] + model_datasets)[1:] """ Step 5: Checking and converting variable units """ print 'Checking and converting variable units' obs_dataset = dsp.variable_unit_conversion(obs_dataset) for idata, dataset in enumerate(model_datasets): model_datasets[idata] = dsp.variable_unit_conversion(dataset) print 'Generating multi-model ensemble' if len(model_datasets) >= 2.: model_datasets.append(dsp.ensemble(model_datasets)) model_names.append('ENS') """ Step 6: Generate subregion average and standard deviation """ if config['use_subregions']: # sort the subregion by region names and make a list subregions = sorted(config['subregions'].items(), key=operator.itemgetter(0)) # number of subregions nsubregion = len(subregions)
if not config['regrid']['regrid_on_reference']: reference_dataset = dsp.spatial_regrid(reference_dataset, new_lat, new_lon) print('Reference dataset has been regridded') for i, dataset in enumerate(target_datasets): target_datasets[i] = dsp.spatial_regrid(dataset, new_lat, new_lon, boundary_check=boundary_check) print('{} has been regridded'.format(target_names[i])) print('Propagating missing data information') datasets = dsp.mask_missing_data([reference_dataset] + target_datasets) reference_dataset = datasets[0] target_datasets = datasets[1:] """ Step 4: Checking and converting variable units """ print('Checking and converting variable units') reference_dataset = dsp.variable_unit_conversion(reference_dataset) for i, dataset in enumerate(target_datasets): target_datasets[i] = dsp.variable_unit_conversion(dataset) print('Generating multi-model ensemble') if len(target_datasets) >= 2.: target_datasets.append(dsp.ensemble(target_datasets)) target_names.append('ENS') """ Step 5: Generate subregion average and standard deviation """ if config['use_subregions']: # sort the subregion by region names and make a list subregions = sorted(config['subregions'].items(), key=operator.itemgetter(0)) # number of subregions nsubregion = len(subregions)