def test_header_attrs(self): from satpy import Scene import xarray as xr import tempfile scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) scn['test-array'] = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time)) try: handle, filename = tempfile.mkstemp() os.close(handle) header_attrs = {'sensor': 'SEVIRI', 'orbit': None} scn.save_datasets(filename=filename, header_attrs=header_attrs, writer='cf') import h5netcdf as nc4 with nc4.File(filename) as f: self.assertTrue(f.attrs['sensor'] == 'SEVIRI') self.assertTrue('sensor' in f.attrs.keys()) self.assertTrue('orbit' not in f.attrs.keys()) finally: os.remove(filename)
def main(): from satpy import Scene from satpy.writers.scmi import add_backend_argument_groups as add_writer_argument_groups import argparse parser = argparse.ArgumentParser(description="Convert GEOCAT Level 1 and 2 to AWIPS SCMI files") parser.add_argument('-v', '--verbose', dest='verbosity', action="count", default=0, help='each occurrence increases verbosity 1 level through ERROR-WARNING-INFO-DEBUG (default INFO)') parser.add_argument('-l', '--log', dest="log_fn", default=None, help="specify the log filename") subgroups = add_scene_argument_groups(parser) subgroups += add_writer_argument_groups(parser) args = parser.parse_args() scene_args = {ga.dest: getattr(args, ga.dest) for ga in subgroups[0]._group_actions} load_args = {ga.dest: getattr(args, ga.dest) for ga in subgroups[1]._group_actions} writer_init_args = {ga.dest: getattr(args, ga.dest) for ga in subgroups[2]._group_actions} writer_call_args = {ga.dest: getattr(args, ga.dest) for ga in subgroups[3]._group_actions} levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG] logging.basicConfig(level=levels[min(3, args.verbosity)], filename=args.log_fn) scn = Scene(**scene_args) scn.load(load_args['datasets']) writer_args = {} writer_args.update(writer_init_args) writer_args.update(writer_call_args) scn.save_datasets(writer='scmi', **writer_args)
def test_encoding_kwarg(self): """Test 'encoding' keyword argument.""" from satpy import Scene import xarray as xr scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) scn['test-array'] = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: encoding = { 'test-array': { 'dtype': 'int8', 'scale_factor': 0.1, 'add_offset': 0.0, '_FillValue': 3 } } scn.save_datasets(filename=filename, encoding=encoding, writer='cf') with xr.open_dataset(filename, mask_and_scale=False) as f: self.assertTrue(np.all(f['test-array'][:] == [10, 20, 30])) self.assertTrue(f['test-array'].attrs['scale_factor'] == 0.1) self.assertTrue(f['test-array'].attrs['_FillValue'] == 3) # check that dtype behave as int8 self.assertTrue(np.iinfo(f['test-array'][:].dtype).max == 127)
def test_bounds_missing_time_info(self): from satpy import Scene import xarray as xr import tempfile scn = Scene() start_timeA = datetime(2018, 5, 30, 10, 0) end_timeA = datetime(2018, 5, 30, 10, 15) test_arrayA = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1) test_arrayB = np.array([[1, 2], [3, 5]]).reshape(2, 2, 1) scn['test-arrayA'] = xr.DataArray( test_arrayA, dims=['x', 'y', 'time'], coords={'time': [np.datetime64('2018-05-30T10:05:00')]}, attrs=dict(start_time=start_timeA, end_time=end_timeA)) scn['test-arrayB'] = xr.DataArray( test_arrayB, dims=['x', 'y', 'time'], coords={'time': [np.datetime64('2018-05-30T10:05:00')]}) try: handle, filename = tempfile.mkstemp() os.close(handle) scn.save_datasets(filename=filename, writer='cf') import h5netcdf as nc4 with nc4.File(filename) as f: self.assertTrue( all(f['time_bnds'][:] == np.array([-300., 600.]))) finally: os.remove(filename)
def test_bounds_minimum(self): """Test minimum bounds.""" from satpy import Scene import xarray as xr scn = Scene() start_timeA = datetime(2018, 5, 30, 10, 0) # expected to be used end_timeA = datetime(2018, 5, 30, 10, 20) start_timeB = datetime(2018, 5, 30, 10, 3) end_timeB = datetime(2018, 5, 30, 10, 15) # expected to be used test_arrayA = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1) test_arrayB = np.array([[1, 2], [3, 5]]).reshape(2, 2, 1) scn['test-arrayA'] = xr.DataArray(test_arrayA, dims=['x', 'y', 'time'], coords={'time': [np.datetime64('2018-05-30T10:05:00')]}, attrs=dict(start_time=start_timeA, end_time=end_timeA)) scn['test-arrayB'] = xr.DataArray(test_arrayB, dims=['x', 'y', 'time'], coords={'time': [np.datetime64('2018-05-30T10:05:00')]}, attrs=dict(start_time=start_timeB, end_time=end_timeB)) with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf') with xr.open_dataset(filename, decode_cf=False) as f: self.assertTrue(np.all(f['time_bnds'][:] == np.array([-300., 600.])))
def test_encoding_kwarg(self): from satpy import Scene import xarray as xr import tempfile scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) scn['test-array'] = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time)) try: handle, filename = tempfile.mkstemp() os.close(handle) encoding = {'test-array': {'dtype': 'int8', 'scale_factor': 0.1, 'add_offset': 0.0, '_FillValue': 3}} scn.save_datasets(filename=filename, encoding=encoding, writer='cf') import h5netcdf as nc4 with nc4.File(filename) as f: self.assertTrue(all(f['test-array'][:] == [10, 20, 30])) self.assertTrue(f['test-array'].attrs['scale_factor'] == 0.1) self.assertTrue(f['test-array'].attrs['_FillValue'] == 3) # check that dtype behave as int8 self.assertTrue(np.iinfo(f['test-array'][:].dtype).max == 127) finally: os.remove(filename)
def test_encoding_kwarg(self): from satpy import Scene import xarray as xr import tempfile scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) scn['test-array'] = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time)) try: handle, filename = tempfile.mkstemp() os.close(handle) encoding = { 'test-array': { 'dtype': 'int8', 'scale_factor': 0.1, 'add_offset': 0.0, '_FillValue': 3 } } scn.save_datasets(filename=filename, encoding=encoding, writer='cf') import h5netcdf as nc4 with nc4.File(filename) as f: self.assertTrue(all(f['test-array'][:] == [10, 20, 30])) self.assertTrue(f['test-array'].attrs['scale_factor'] == 0.1) self.assertTrue(f['test-array'].attrs['_FillValue'] == 3) # check that dtype behave as int8 self.assertTrue(np.iinfo(f['test-array'][:].dtype).max == 127) finally: os.remove(filename)
def test_save_array(self): from satpy import Scene import xarray as xr import tempfile scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) scn['test-array'] = xr.DataArray([1, 2, 3], attrs=dict( start_time=start_time, end_time=end_time, prerequisites=[DatasetID('hej')])) try: handle, filename = tempfile.mkstemp() os.close(handle) scn.save_datasets(filename=filename, writer='cf') import h5netcdf as nc4 with nc4.File(filename) as f: self.assertTrue(all(f['test-array'][:] == [1, 2, 3])) expected_prereq = ( "DatasetID(name='hej', wavelength=None, " "resolution=None, polarization=None, " "calibration=None, level=None, modifiers=())") self.assertEqual(f['test-array'].attrs['prerequisites'][0], np.string_(expected_prereq)) finally: os.remove(filename)
def test_bounds(self): """Test setting time bounds.""" from satpy import Scene import xarray as xr scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) test_array = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1) scn['test-array'] = xr.DataArray(test_array, dims=['x', 'y', 'time'], coords={'time': [np.datetime64('2018-05-30T10:05:00')]}, attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf') # Check decoded time coordinates & bounds with xr.open_dataset(filename, decode_cf=True) as f: bounds_exp = np.array([[start_time, end_time]], dtype='datetime64[m]') np.testing.assert_array_equal(f['time_bnds'], bounds_exp) self.assertEqual(f['time'].attrs['bounds'], 'time_bnds') # Check raw time coordinates & bounds with xr.open_dataset(filename, decode_cf=False) as f: np.testing.assert_almost_equal(f['time_bnds'], [[-0.0034722, 0.0069444]]) # User-specified time encoding should have preference with TempFile() as filename: time_units = 'seconds since 2018-01-01' scn.save_datasets(filename=filename, encoding={'time': {'units': time_units}}, writer='cf') with xr.open_dataset(filename, decode_cf=False) as f: np.testing.assert_array_equal(f['time_bnds'], [[12909600, 12910500]])
def save_datasets(scene: Scene, tag, folder, save_nc=False): if SAVE_IMAGES: scene.save_datasets(datasets=all_channels, base_dir=folder, writer='simple_image', filename=tag + '{start_time:%Y%m%d_%H%M%S}_{name}.png') if save_nc: scene.save_datasets(datasets=all_channels, base_dir=folder, writer='cf', filename=tag + '{start_time:%Y%m%d_%H%M%S}_{name}.nc')
def test_groups(self): """Test creating a file with groups.""" import xarray as xr from satpy import Scene tstart = datetime(2019, 4, 1, 12, 0) tend = datetime(2019, 4, 1, 12, 15) data_visir = [[1, 2], [3, 4]] y_visir = [1, 2] x_visir = [1, 2] time_vis006 = [1, 2] time_ir_108 = [3, 4] data_hrv = [[1, 2, 3], [4, 5, 6], [7, 8, 9]] y_hrv = [1, 2, 3] x_hrv = [1, 2, 3] time_hrv = [1, 2, 3] scn = Scene() scn['VIS006'] = xr.DataArray(data_visir, dims=('y', 'x'), coords={'y': y_visir, 'x': x_visir, 'acq_time': ('y', time_vis006)}, attrs={'name': 'VIS006', 'start_time': tstart, 'end_time': tend}) scn['IR_108'] = xr.DataArray(data_visir, dims=('y', 'x'), coords={'y': y_visir, 'x': x_visir, 'acq_time': ('y', time_ir_108)}, attrs={'name': 'IR_108', 'start_time': tstart, 'end_time': tend}) scn['HRV'] = xr.DataArray(data_hrv, dims=('y', 'x'), coords={'y': y_hrv, 'x': x_hrv, 'acq_time': ('y', time_hrv)}, attrs={'name': 'HRV', 'start_time': tstart, 'end_time': tend}) with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf', groups={'visir': ['IR_108', 'VIS006'], 'hrv': ['HRV']}, pretty=True) nc_root = xr.open_dataset(filename) self.assertIn('history', nc_root.attrs) self.assertSetEqual(set(nc_root.variables.keys()), set()) nc_visir = xr.open_dataset(filename, group='visir') nc_hrv = xr.open_dataset(filename, group='hrv') self.assertSetEqual(set(nc_visir.variables.keys()), {'VIS006', 'IR_108', 'y', 'x', 'VIS006_acq_time', 'IR_108_acq_time'}) self.assertSetEqual(set(nc_hrv.variables.keys()), {'HRV', 'y', 'x', 'acq_time'}) for tst, ref in zip([nc_visir['VIS006'], nc_visir['IR_108'], nc_hrv['HRV']], [scn['VIS006'], scn['IR_108'], scn['HRV']]): self.assertTrue(np.all(tst.data == ref.data)) nc_root.close() nc_visir.close() nc_hrv.close() # Different projection coordinates in one group are not supported with TempFile() as filename: self.assertRaises(ValueError, scn.save_datasets, datasets=['VIS006', 'HRV'], filename=filename, writer='cf')
def test_header_attrs(self): """Check master attributes are set.""" from satpy import Scene import xarray as xr scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) scn['test-array'] = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: header_attrs = { 'sensor': 'SEVIRI', 'orbit': 99999, 'none': None, 'list': [1, 2, 3], 'set': {1, 2, 3}, 'dict': { 'a': 1, 'b': 2 }, 'nested': { 'outer': { 'inner1': 1, 'inner2': 2 } }, 'bool': True, 'bool_': np.bool_(True) } scn.save_datasets(filename=filename, header_attrs=header_attrs, flatten_attrs=True, writer='cf') with xr.open_dataset(filename) as f: self.assertIn('history', f.attrs) self.assertEqual(f.attrs['sensor'], 'SEVIRI') self.assertEqual(f.attrs['orbit'], 99999) np.testing.assert_array_equal(f.attrs['list'], [1, 2, 3]) if sys.version_info.major == 3: self.assertEqual(f.attrs['set'], '{1, 2, 3}') else: # json module seems to encode sets differently in # Python 2 and 3 self.assertEqual(f.attrs['set'], u'set([1, 2, 3])') self.assertEqual(f.attrs['dict_a'], 1) self.assertEqual(f.attrs['dict_b'], 2) self.assertEqual(f.attrs['nested_outer_inner1'], 1) self.assertEqual(f.attrs['nested_outer_inner2'], 2) self.assertEqual(f.attrs['bool'], 'true') self.assertEqual(f.attrs['bool_'], 'true') self.assertTrue('none' not in f.attrs.keys())
def test_single_time_value(self): from satpy import Scene import xarray as xr scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) test_array = np.array([[1, 2], [3, 4]]) scn['test-array'] = xr.DataArray(test_array, dims=['x', 'y'], coords={'time': np.datetime64('2018-05-30T10:05:00')}, attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf') import h5netcdf as nc4 with nc4.File(filename) as f: self.assertTrue(all(f['time_bnds'][:] == np.array([-300., 600.])))
def test_bounds(self): """Test setting time bounds.""" from satpy import Scene import xarray as xr scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) test_array = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1) scn['test-array'] = xr.DataArray(test_array, dims=['x', 'y', 'time'], coords={'time': [np.datetime64('2018-05-30T10:05:00')]}, attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf') with xr.open_dataset(filename, decode_cf=False) as f: self.assertTrue(np.all(f['time_bnds'][:] == np.array([-300., 600.])))
def test_unlimited_dims_kwarg(self): """Test specification of unlimited dimensions.""" from satpy import Scene import xarray as xr scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) test_array = np.array([[1, 2], [3, 4]]) scn['test-array'] = xr.DataArray(test_array, dims=['x', 'y'], coords={'time': np.datetime64('2018-05-30T10:05:00')}, attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf', unlimited_dims=['time']) with xr.open_dataset(filename) as f: self.assertSetEqual(f.encoding['unlimited_dims'], {'time'})
def test_save_with_compression(self): """Test saving an array with compression.""" from satpy import Scene import xarray as xr scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) with mock.patch('satpy.writers.cf_writer.xr.Dataset') as xrdataset,\ mock.patch('satpy.writers.cf_writer.make_time_bounds'): scn['test-array'] = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time, prerequisites=[DatasetID('hej')])) comp = {'zlib': True, 'complevel': 9} scn.save_datasets(filename='bla', writer='cf', compression=comp) ars, kws = xrdataset.call_args_list[1] self.assertDictEqual(ars[0]['test-array'].encoding, comp)
def test_basic_write(self, tmpdir, src_dtype, dst_dtype, enhance): """Test writing data to disk.""" src_data_arr = _create_fake_data_arr(dtype=src_dtype) scn = Scene() scn[src_data_arr.attrs["name"]] = src_data_arr scn.save_datasets(writer="binary", base_dir=str(tmpdir), dtype=dst_dtype, enhance=enhance) exp_fn = tmpdir.join( "noaa-20_viirs_fake_p2g_name_20210101_120000_fake_area.dat") if dst_dtype is None: dst_dtype = src_dtype if src_dtype != np.float64 else np.float32 assert os.path.isfile(exp_fn) data = np.memmap(str(exp_fn), mode="r", dtype=dst_dtype) exp_data = self._generate_expected_output(src_data_arr, dst_dtype, enhance) np.testing.assert_allclose(data, exp_data, atol=2e-7)
def test_save_array(self): from satpy import Scene import xarray as xr import tempfile scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) scn['test-array'] = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time)) try: handle, filename = tempfile.mkstemp() os.close(handle) scn.save_datasets(filename=filename, writer='cf') import h5netcdf as nc4 with nc4.File(filename) as f: self.assertTrue(all(f['test-array'][:] == [1, 2, 3])) finally: os.remove(filename)
def test_header_attrs(self): """Check master attributes are set.""" from satpy import Scene import xarray as xr scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) scn['test-array'] = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: header_attrs = {'sensor': 'SEVIRI', 'orbit': None} scn.save_datasets(filename=filename, header_attrs=header_attrs, writer='cf') with xr.open_dataset(filename) as f: self.assertTrue(f.attrs['sensor'] == 'SEVIRI') self.assertTrue('sensor' in f.attrs.keys()) self.assertTrue('orbit' not in f.attrs.keys())
def test_single_time_value(self): """Test setting a single time value.""" from satpy import Scene import xarray as xr scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) test_array = np.array([[1, 2], [3, 4]]) scn['test-array'] = xr.DataArray(test_array, dims=['x', 'y'], coords={'time': np.datetime64('2018-05-30T10:05:00')}, attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf') with xr.open_dataset(filename, decode_cf=True) as f: np.testing.assert_array_equal(f['time'], scn['test-array']['time']) bounds_exp = np.array([[start_time, end_time]], dtype='datetime64[m]') np.testing.assert_array_equal(f['time_bnds'], bounds_exp)
def test_save_array(self): """Test saving an array to netcdf/cf.""" from satpy import Scene import xarray as xr scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) scn['test-array'] = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time, prerequisites=[DatasetID('hej')])) with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf') with xr.open_dataset(filename) as f: self.assertTrue(np.all(f['test-array'][:] == [1, 2, 3])) expected_prereq = ("DatasetID(name='hej', wavelength=None, " "resolution=None, polarization=None, " "calibration=None, level=None, modifiers=())") self.assertEqual(f['test-array'].attrs['prerequisites'], expected_prereq)
def test_temperature_difference(self, tmpdir, abi_l1b_c01_data_array): new_data_arr = abi_l1b_c01_data_array.copy() data = da.linspace(-10, 10, new_data_arr.size).reshape(new_data_arr.shape) new_data_arr.data = data new_data_arr.attrs["name"] = "test_temperature_difference" scn = Scene() scn["test_temperature_difference"] = new_data_arr out_fn = str(tmpdir + "test_temperature_difference.tif") scn.save_datasets(filename=out_fn) with rasterio.open(out_fn, "r") as out_ds: assert out_ds.count == 2 l_data = out_ds.read(1) # see polar2grid/tests/etc/enhancements/generic.yaml flat_l_data = l_data.ravel() data = data.ravel().compute() exp_out = np.round(np.linspace(5.0, 205.0, data.size)).astype(np.uint8) np.testing.assert_allclose(flat_l_data, exp_out)
def test_p2g_palettize(self, keep_palette, ds_name, tmpdir, abi_l1b_c01_data_array): if ds_name == "test_p2g_palettize3": shutil.copy(os.path.join(TEST_ETC_DIR, f"{ds_name}.npy"), tmpdir) new_data_arr = abi_l1b_c01_data_array.copy() data = da.linspace(180, 280, new_data_arr.size).reshape(new_data_arr.shape) new_data_arr.data = data new_data_arr.attrs["name"] = ds_name scn = Scene() scn[ds_name] = new_data_arr out_fn = str(tmpdir + f"{ds_name}_{keep_palette}.tif") with easy_cwd(tmpdir): scn.save_datasets(filename=out_fn, keep_palette=keep_palette) with rasterio.open(out_fn, "r") as out_ds: is_palette = keep_palette and "palettize" in ds_name num_bands = 1 if is_palette else 4 assert out_ds.count == num_bands if is_palette: assert out_ds.colormap(1) is not None
def test_save_array_coords(self): """Test saving array with coordinates.""" from satpy import Scene import xarray as xr import numpy as np scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) coords = { 'x': np.arange(3), 'y': np.arange(1), } if CRS is not None: proj_str = ('+proj=geos +lon_0=-95.0 +h=35786023.0 ' '+a=6378137.0 +b=6356752.31414 +sweep=x ' '+units=m +no_defs') coords['crs'] = CRS.from_string(proj_str) scn['test-array'] = xr.DataArray([[1, 2, 3]], dims=('y', 'x'), coords=coords, attrs=dict( start_time=start_time, end_time=end_time, prerequisites=[DatasetID('hej')])) with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf') import h5netcdf as nc4 with nc4.File(filename) as f: self.assertTrue(np.all(f['test-array'][:] == [1, 2, 3])) self.assertTrue(np.all(f['x'][:] == [0, 1, 2])) self.assertTrue(np.all(f['y'][:] == [0])) self.assertNotIn('crs', f) self.assertNotIn('_FillValue', f['x'].attrs) self.assertNotIn('_FillValue', f['y'].attrs) expected_prereq = ( "DatasetID(name='hej', wavelength=None, " "resolution=None, polarization=None, " "calibration=None, level=None, modifiers=())") self.assertEqual(f['test-array'].attrs['prerequisites'][0], expected_prereq)
def test_bounds_missing_time_info(self): """Test time bounds generation in case of missing time.""" from satpy import Scene import xarray as xr scn = Scene() start_timeA = datetime(2018, 5, 30, 10, 0) end_timeA = datetime(2018, 5, 30, 10, 15) test_arrayA = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1) test_arrayB = np.array([[1, 2], [3, 5]]).reshape(2, 2, 1) scn['test-arrayA'] = xr.DataArray(test_arrayA, dims=['x', 'y', 'time'], coords={'time': [np.datetime64('2018-05-30T10:05:00')]}, attrs=dict(start_time=start_timeA, end_time=end_timeA)) scn['test-arrayB'] = xr.DataArray(test_arrayB, dims=['x', 'y', 'time'], coords={'time': [np.datetime64('2018-05-30T10:05:00')]}) with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf') with xr.open_dataset(filename, decode_cf=True) as f: bounds_exp = np.array([[start_timeA, end_timeA]], dtype='datetime64[m]') np.testing.assert_array_equal(f['time_bnds'], bounds_exp)
def test_bounds(self): from satpy import Scene import xarray as xr import tempfile scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) test_array = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1) scn['test-array'] = xr.DataArray(test_array, dims=['x', 'y', 'time'], coords={'time': [np.datetime64('2018-05-30T10:05:00')]}, attrs=dict(start_time=start_time, end_time=end_time)) try: handle, filename = tempfile.mkstemp() os.close(handle) scn.save_datasets(filename=filename, writer='cf') import h5netcdf as nc4 with nc4.File(filename) as f: self.assertTrue(all(f['time_bnds'][:] == np.array([-300., 600.]))) finally: os.remove(filename)
def test_bounds_missing_time_info(self): """Test time bounds generation in case of missing time.""" from satpy import Scene import xarray as xr scn = Scene() start_timeA = datetime(2018, 5, 30, 10, 0) end_timeA = datetime(2018, 5, 30, 10, 15) test_arrayA = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1) test_arrayB = np.array([[1, 2], [3, 5]]).reshape(2, 2, 1) scn['test-arrayA'] = xr.DataArray( test_arrayA, dims=['x', 'y', 'time'], coords={'time': [np.datetime64('2018-05-30T10:05:00')]}, attrs=dict(start_time=start_timeA, end_time=end_timeA)) scn['test-arrayB'] = xr.DataArray( test_arrayB, dims=['x', 'y', 'time'], coords={'time': [np.datetime64('2018-05-30T10:05:00')]}) with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf') import h5netcdf as nc4 with nc4.File(filename) as f: self.assertTrue( np.all(f['time_bnds'][:] == np.array([-300., 600.])))
class TestCFReader(unittest.TestCase): """Test case for CF reader.""" def setUp(self): """Create a test scene.""" tstart = datetime(2019, 4, 1, 12, 0) tend = datetime(2019, 4, 1, 12, 15) data_visir = [[1, 2], [3, 4]] y_visir = [1, 2] x_visir = [1, 2] z_visir = [1, 2, 3, 4, 5, 6, 7] qual_data = [[1, 2, 3, 4, 5, 6, 7], [1, 2, 3, 4, 5, 6, 7]] time_vis006 = [1, 2] lat = 33.0 * np.array([[1, 2], [3, 4]]) lon = -13.0 * np.array([[1, 2], [3, 4]]) common_attrs = { 'start_time': tstart, 'end_time': tend, 'platform_name': 'tirosn', 'orbit_number': 99999 } vis006 = xr.DataArray(data_visir, dims=('y', 'x'), coords={ 'y': y_visir, 'x': x_visir, 'acq_time': ('y', time_vis006) }, attrs={ 'name': 'image0', 'id_tag': 'ch_r06', 'coordinates': 'lat lon', 'resolution': 1000, 'calibration': 'reflectance', 'wavelength': WavelengthRange(min=0.58, central=0.63, max=0.68, unit='µm') }) ir_108 = xr.DataArray(data_visir, dims=('y', 'x'), coords={ 'y': y_visir, 'x': x_visir, 'acq_time': ('y', time_vis006) }, attrs={ 'name': 'image1', 'id_tag': 'ch_tb11', 'coordinates': 'lat lon' }) qual_f = xr.DataArray(qual_data, dims=('y', 'z'), coords={ 'y': y_visir, 'z': z_visir, 'acq_time': ('y', time_vis006) }, attrs={ 'name': 'qual_flags', 'id_tag': 'qual_flags' }) lat = xr.DataArray(lat, dims=('y', 'x'), coords={ 'y': y_visir, 'x': x_visir }, attrs={ 'name': 'lat', 'standard_name': 'latitude', 'modifiers': np.array([]) }) lon = xr.DataArray(lon, dims=('y', 'x'), coords={ 'y': y_visir, 'x': x_visir }, attrs={ 'name': 'lon', 'standard_name': 'longitude', 'modifiers': np.array([]) }) self.scene = Scene() self.scene.attrs['sensor'] = ['avhrr-1', 'avhrr-2', 'avhrr-3'] scene_dict = { 'image0': vis006, 'image1': ir_108, 'lat': lat, 'lon': lon, 'qual_flags': qual_f } for key in scene_dict: self.scene[key] = scene_dict[key] self.scene[key].attrs.update(common_attrs) def test_write_and_read(self): """Save a file with cf_writer and read the data again.""" filename = 'testingcfwriter{:s}-viirs-mband-20201007075915-20201007080744.nc'.format( datetime.utcnow().strftime('%Y%j%H%M%S')) try: self.scene.save_datasets(writer='cf', filename=filename, header_attrs={'instrument': 'avhrr'}, engine='h5netcdf', flatten_attrs=True, pretty=True) scn_ = Scene(reader='satpy_cf_nc', filenames=[filename]) scn_.load(['image0', 'image1', 'lat']) self.assertTrue( np.all(scn_['image0'].data == self.scene['image0'].data)) self.assertTrue( np.all(scn_['lat'].data == self.scene['lat'].data)) # lat loaded as dataset self.assertTrue( np.all(scn_['image0'].coords['lon'] == self.scene['lon'].data)) # lon loded as coord assert isinstance(scn_['image0'].attrs['wavelength'], WavelengthRange) finally: with suppress(PermissionError): os.remove(filename) def test_fix_modifier_attr(self): """Check that fix modifier can handle empty list as modifier attribute.""" self.reader = SatpyCFFileHandler('filename', {}, {'filetype': 'info'}) ds_info = {'modifiers': []} self.reader.fix_modifier_attr(ds_info) self.assertEqual(ds_info['modifiers'], ())
dates = list( set([f.split("_")[4][0:8] for f in glob.glob1(process_path, "*.nc")])) if not os.path.exists(os.path.join(r"outputs")): os.mkdir(os.path.join(r"outputs")) hour_ = "1200" visual = False export = True for date_ in dates: files = [ os.path.join(process_path, row) for row in glob.glob1(process_path, "W_XX*" + date_ + hour_ + "*.nc") ] print(date_, files) scn = Scene(reader="seviri_l1b_nc", filenames=files) pprint.pprint(scn.available_composite_names()) scn.load(['natural_color', 'snow'], calibrations=['radiance']) if visual: scn.show("natural_color") scn.show("snow") scn.show("natural_enh") if not os.path.exists(os.path.join(r"outputs", date_)): os.mkdir(os.path.join(r"outputs", date_)) if export: out = scn.save_datasets( filename='{name}_{start_time:%Y%m%d_%H%M%S}.png', base_dir=os.path.join(r"outputs", date_)) # compute_writer_results(out) end = datetime.datetime.now() print("Duration is : ", str(end - start))
class TestCFReader(unittest.TestCase): """Test case for CF reader.""" def setUp(self): """Create a test scene.""" tstart = datetime(2019, 4, 1, 12, 0) tend = datetime(2019, 4, 1, 12, 15) data_visir = [[1, 2], [3, 4]] y_visir = [1, 2] x_visir = [1, 2] z_visir = [1, 2, 3, 4, 5, 6, 7] qual_data = [[1, 2, 3, 4, 5, 6, 7], [1, 2, 3, 4, 5, 6, 7]] time_vis006 = [1, 2] lat = 33.0 * np.array([[1, 2], [3, 4]]) lon = -13.0 * np.array([[1, 2], [3, 4]]) common_attrs = { 'start_time': tstart, 'end_time': tend, 'platform_name': 'tirosn', 'orbit_number': 99999 } vis006 = xr.DataArray(data_visir, dims=('y', 'x'), coords={ 'y': y_visir, 'x': x_visir, 'acq_time': ('y', time_vis006) }, attrs={ 'name': 'image0', 'id_tag': 'ch_r06', 'coordinates': 'lat lon', 'resolution': 1000, 'calibration': 'reflectance', 'wavelength': WavelengthRange(min=0.58, central=0.63, max=0.68, unit='µm') }) ir_108 = xr.DataArray(data_visir, dims=('y', 'x'), coords={ 'y': y_visir, 'x': x_visir, 'acq_time': ('y', time_vis006) }, attrs={ 'name': 'image1', 'id_tag': 'ch_tb11', 'coordinates': 'lat lon' }) qual_f = xr.DataArray(qual_data, dims=('y', 'z'), coords={ 'y': y_visir, 'z': z_visir, 'acq_time': ('y', time_vis006) }, attrs={ 'name': 'qual_flags', 'id_tag': 'qual_flags' }) lat = xr.DataArray(lat, dims=('y', 'x'), coords={ 'y': y_visir, 'x': x_visir }, attrs={ 'name': 'lat', 'standard_name': 'latitude', 'modifiers': np.array([]) }) lon = xr.DataArray(lon, dims=('y', 'x'), coords={ 'y': y_visir, 'x': x_visir }, attrs={ 'name': 'lon', 'standard_name': 'longitude', 'modifiers': np.array([]) }) self.scene = Scene() self.scene.attrs['sensor'] = ['avhrr-1', 'avhrr-2', 'avhrr-3'] scene_dict = { 'image0': vis006, 'image1': ir_108, 'lat': lat, 'lon': lon, 'qual_flags': qual_f } for key in scene_dict: self.scene[key] = scene_dict[key] self.scene[key].attrs.update(common_attrs) def test_write_and_read(self): """Save a file with cf_writer and read the data again.""" filename = 'testingcfwriter{:s}-viirs-mband-20201007075915-20201007080744.nc'.format( datetime.utcnow().strftime('%Y%j%H%M%S')) try: self.scene.save_datasets(writer='cf', filename=filename, header_attrs={'instrument': 'avhrr'}, engine='h5netcdf', flatten_attrs=True, pretty=True) scn_ = Scene(reader='satpy_cf_nc', filenames=[filename]) scn_.load(['image0', 'image1', 'lat']) np.testing.assert_array_equal(scn_['image0'].data, self.scene['image0'].data) np.testing.assert_array_equal( scn_['lat'].data, self.scene['lat'].data) # lat loaded as dataset np.testing.assert_array_equal( scn_['image0'].coords['lon'], self.scene['lon'].data) # lon loded as coord assert isinstance(scn_['image0'].attrs['wavelength'], WavelengthRange) finally: with suppress(PermissionError): os.remove(filename) def test_fix_modifier_attr(self): """Check that fix modifier can handle empty list as modifier attribute.""" self.reader = SatpyCFFileHandler('filename', {}, {'filetype': 'info'}) ds_info = {'modifiers': []} self.reader.fix_modifier_attr(ds_info) self.assertEqual(ds_info['modifiers'], ()) def _dataset_for_prefix_testing(self): data_visir = [[1, 2], [3, 4]] y_visir = [1, 2] x_visir = [1, 2] lat = 33.0 * np.array([[1, 2], [3, 4]]) lon = -13.0 * np.array([[1, 2], [3, 4]]) vis006 = xr.DataArray(data_visir, dims=('y', 'x'), coords={ 'y': y_visir, 'x': x_visir }, attrs={ 'name': '1', 'id_tag': 'ch_r06', 'coordinates': 'lat lon', 'resolution': 1000, 'calibration': 'reflectance', 'wavelength': WavelengthRange(min=0.58, central=0.63, max=0.68, unit='µm') }) lat = xr.DataArray(lat, dims=('y', 'x'), coords={ 'y': y_visir, 'x': x_visir }, attrs={ 'name': 'lat', 'standard_name': 'latitude', 'modifiers': np.array([]) }) lon = xr.DataArray(lon, dims=('y', 'x'), coords={ 'y': y_visir, 'x': x_visir }, attrs={ 'name': 'lon', 'standard_name': 'longitude', 'modifiers': np.array([]) }) scene = Scene() scene.attrs['sensor'] = ['avhrr-1', 'avhrr-2', 'avhrr-3'] scene['1'] = vis006 scene['lat'] = lat scene['lon'] = lon return scene def test_read_prefixed_channels(self): """Check channels starting with digit is prefixed and read back correctly.""" scene = self._dataset_for_prefix_testing() # Testing with default prefixing filename = 'testingcfwriter{:s}-viirs-mband-20201007075915-20201007080744.nc'.format( datetime.utcnow().strftime('%Y%j%H%M%S')) try: scene.save_datasets(writer='cf', filename=filename, header_attrs={'instrument': 'avhrr'}, engine='netcdf4', flatten_attrs=True, pretty=True) scn_ = Scene(reader='satpy_cf_nc', filenames=[filename]) scn_.load(['1']) np.testing.assert_array_equal(scn_['1'].data, scene['1'].data) np.testing.assert_array_equal( scn_['1'].coords['lon'], scene['lon'].data) # lon loaded as coord scn_ = Scene(reader='satpy_cf_nc', filenames=[filename], reader_kwargs={}) scn_.load(['1']) np.testing.assert_array_equal(scn_['1'].data, scene['1'].data) np.testing.assert_array_equal( scn_['1'].coords['lon'], scene['lon'].data) # lon loaded as coord # Check that variables starting with a digit is written to filename variable prefixed with xr.open_dataset(filename) as ds_disk: np.testing.assert_array_equal(ds_disk['CHANNEL_1'].data, scene['1'].data) finally: with suppress(PermissionError): os.remove(filename) def test_read_prefixed_channels_include_orig_name(self): """Check channels starting with digit and includeed orig name is prefixed and read back correctly.""" scene = self._dataset_for_prefix_testing() # Testing with default prefixing filename = 'testingcfwriter{:s}-viirs-mband-20201007075915-20201007080744.nc'.format( datetime.utcnow().strftime('%Y%j%H%M%S')) try: scene.save_datasets(writer='cf', filename=filename, header_attrs={'instrument': 'avhrr'}, engine='netcdf4', flatten_attrs=True, pretty=True, include_orig_name=True) scn_ = Scene(reader='satpy_cf_nc', filenames=[filename]) scn_.load(['1']) np.testing.assert_array_equal(scn_['1'].data, scene['1'].data) np.testing.assert_array_equal( scn_['1'].coords['lon'], scene['lon'].data) # lon loaded as coord self.assertEqual(scn_['1'].attrs['original_name'], '1') # Check that variables starting with a digit is written to filename variable prefixed with xr.open_dataset(filename) as ds_disk: np.testing.assert_array_equal(ds_disk['CHANNEL_1'].data, scene['1'].data) finally: with suppress(PermissionError): os.remove(filename) def test_read_prefixed_channels_by_user(self): """Check channels starting with digit is prefixed by user and read back correctly.""" scene = self._dataset_for_prefix_testing() filename = 'testingcfwriter{:s}-viirs-mband-20201007075915-20201007080744.nc'.format( datetime.utcnow().strftime('%Y%j%H%M%S')) try: scene.save_datasets(writer='cf', filename=filename, header_attrs={'instrument': 'avhrr'}, engine='netcdf4', flatten_attrs=True, pretty=True, numeric_name_prefix='USER') scn_ = Scene(reader='satpy_cf_nc', filenames=[filename], reader_kwargs={'numeric_name_prefix': 'USER'}) scn_.load(['1']) np.testing.assert_array_equal(scn_['1'].data, scene['1'].data) np.testing.assert_array_equal( scn_['1'].coords['lon'], scene['lon'].data) # lon loded as coord # Check that variables starting with a digit is written to filename variable prefixed with xr.open_dataset(filename) as ds_disk: np.testing.assert_array_equal(ds_disk['USER1'].data, scene['1'].data) finally: with suppress(PermissionError): os.remove(filename) def test_read_prefixed_channels_by_user2(self): """Check channels starting with digit is prefixed by user when saving and read back correctly without prefix.""" scene = self._dataset_for_prefix_testing() filename = 'testingcfwriter{:s}-viirs-mband-20201007075915-20201007080744.nc'.format( datetime.utcnow().strftime('%Y%j%H%M%S')) try: scene.save_datasets(writer='cf', filename=filename, header_attrs={'instrument': 'avhrr'}, engine='netcdf4', flatten_attrs=True, pretty=True, include_orig_name=False, numeric_name_prefix='USER') scn_ = Scene(reader='satpy_cf_nc', filenames=[filename]) scn_.load(['USER1']) np.testing.assert_array_equal(scn_['USER1'].data, scene['1'].data) np.testing.assert_array_equal( scn_['USER1'].coords['lon'], scene['lon'].data) # lon loded as coord finally: with suppress(PermissionError): os.remove(filename) def test_read_prefixed_channels_by_user_include_prefix(self): """Check channels starting with digit is prefixed by user and include original name when saving.""" scene = self._dataset_for_prefix_testing() filename = 'testingcfwriter2{:s}-viirs-mband-20201007075915-20201007080744.nc'.format( datetime.utcnow().strftime('%Y%j%H%M%S')) try: scene.save_datasets(writer='cf', filename=filename, header_attrs={'instrument': 'avhrr'}, engine='netcdf4', flatten_attrs=True, pretty=True, include_orig_name=True, numeric_name_prefix='USER') scn_ = Scene(reader='satpy_cf_nc', filenames=[filename]) scn_.load(['1']) np.testing.assert_array_equal(scn_['1'].data, scene['1'].data) np.testing.assert_array_equal( scn_['1'].coords['lon'], scene['lon'].data) # lon loded as coord finally: with suppress(PermissionError): os.remove(filename) def test_read_prefixed_channels_by_user_no_prefix(self): """Check channels starting with digit is not prefixed by user.""" scene = self._dataset_for_prefix_testing() filename = 'testingcfwriter3{:s}-viirs-mband-20201007075915-20201007080744.nc'.format( datetime.utcnow().strftime('%Y%j%H%M%S')) try: scene.save_datasets(writer='cf', filename=filename, header_attrs={'instrument': 'avhrr'}, engine='netcdf4', flatten_attrs=True, pretty=True, numeric_name_prefix='') scn_ = Scene(reader='satpy_cf_nc', filenames=[filename]) scn_.load(['1']) np.testing.assert_array_equal(scn_['1'].data, scene['1'].data) np.testing.assert_array_equal( scn_['1'].coords['lon'], scene['lon'].data) # lon loded as coord finally: with suppress(PermissionError): os.remove(filename)
matplotlib.rcParams['axes.labelsize'] = 10 matplotlib.rcParams['legend.fontsize'] = 10 matplotlib.rcParams['font.size'] = 10 import numpy as np from satpy import available_readers, Scene from glob import glob available_readers() filenames = glob('./GIMGO-SVI01*') scn = Scene(reader='viirs_sdr', filenames=filenames) scn.load(['I01']) scn.save_datasets(writer='cf', datasets=['I01'], filename='viirs_sdr_i01_damien_test.nc', exclude_attrs=['raw_metadata']) scn.load(['I04']) scn.save_datasets(writer='cf', datasets=['I04'], filename='viirs_sdr_i04_damien_test.nc', exclude_attrs=['raw_metadata']) scn.load(['I05']) scn.save_datasets(writer='cf', datasets=['I05'], filename='viirs_sdr_i05_damien_test.nc', exclude_attrs=['raw_metadata']) #Save test figure
import context from pathlib import Path from satpy import Scene print(context.before_dir) before_files = [ str(item) for item in Path(context.before_dir).glob("*B6*.TIF") ] print(before_files) scn = Scene(reader="generic_image", filenames=before_files) scn.load(['image']) print(help(scn)) scn.save_datasets(writer='simple_image', filename='b6.png', datasets=['image'])