def test_that_file_contents_are_valid(self): dp.write_netcdf(self.ds, self.file_name) new_ds = local.load_file(self.file_name, self.ds.variable) self.assertEqual(self.ds.variable, new_ds.variable) np.testing.assert_array_equal(self.ds.lats, new_ds.lats) np.testing.assert_array_equal(self.ds.lons, new_ds.lons) np.testing.assert_array_equal(self.ds.times, new_ds.times) np.testing.assert_array_equal(self.ds.values, new_ds.values)
def test_file_write(self): dp.write_netcdf(self.ds, self.file_name) self.assertTrue(os.path.isfile(self.file_name))
for target_name in target_names: print(target_name) """ Step 4: Checking and converting variable units """ print('Checking and converting variable units') reference_dataset = dsp.variable_unit_conversion(reference_dataset) for i, dataset in enumerate(target_datasets): target_datasets[i] = dsp.variable_unit_conversion(dataset) workdir = config['workdir'] if workdir[-1] != '/': workdir = workdir + '/' if not os.path.exists(workdir): os.system("mkdir -p " + workdir) var_name = season + '_' + data_info[0]['variable_name'] dsp.write_netcdf(reference_dataset, path=workdir + domain.upper() + '_' + var_name + '_' + reference_name + '.nc') for itarget, dataset in enumerate(target_datasets): dsp.write_netcdf(dataset, path=workdir + domain.upper() + '_' + var_name + '_' + target_names[itarget] + '.nc') """ Step 3: Spatial regriding of the datasets """ print('Regridding datasets: {}'.format(config['regrid'])) if not config['regrid']['regrid_on_reference']: reference_dataset = dsp.spatial_regrid(reference_dataset, new_lat, new_lon) print('Reference dataset has been regridded') for i, dataset in enumerate(target_datasets): target_datasets[i] = dsp.spatial_regrid(dataset, new_lat, new_lon, boundary_check=boundary_check)