def process_one_scene(scene_files, out_path, use_iband_res=False, engine='h5netcdf', all_channels=False, pps_channels=False, orbit_n=0): """Make level 1c files in PPS-format.""" tic = time.time() scn_ = Scene(reader='viirs_sdr', filenames=scene_files) MY_MBAND = MBAND_DEFAULT MY_IBAND_I = IBAND_DEFAULT_I MY_IBAND_M = IBAND_DEFAULT_M if all_channels: MY_MBAND = MBANDS MY_IBAND_I = IBANDS MY_IBAND_M = MBANDS if pps_channels: MY_MBAND = MBAND_PPS MY_IBAND_I = IBAND_PPS_I MY_IBAND_M = IBAND_PPS_M if use_iband_res: scn_.load(MY_IBAND_I + ANGLE_NAMES + ['i_latitude', 'i_longitude'], resolution=371) scn_.load(MY_IBAND_M, resolution=742) scn_ = scn_.resample(resampler='native') else: scn_.load(MY_MBAND + ANGLE_NAMES + ['m_latitude', 'm_longitude'], resolution=742) # one ir channel irch = scn_['M15'] # Set header and band attributes set_header_and_band_attrs(scn_, orbit_n=orbit_n) # Rename longitude, latitude to lon, lat. rename_latitude_longitude(scn_) # Convert angles to PPS convert_angles(scn_, delete_azimuth=True) update_angle_attributes(scn_, irch) filename = compose_filename(scn_, out_path, instrument='viirs', band=irch) scn_.save_datasets(writer='cf', filename=filename, header_attrs=get_header_attrs(scn_, band=irch, sensor='viirs'), engine=engine, include_lonlats=False, flatten_attrs=True, encoding=get_encoding_viirs(scn_)) print("Saved file {:s} after {:3.1f} seconds".format( os.path.basename(filename), time.time() - tic)) return filename
class TestBaseWriter(unittest.TestCase): """Test the base writer class.""" def setUp(self): """Set up tests.""" import tempfile from datetime import datetime from satpy.scene import Scene import dask.array as da ds1 = xr.DataArray(da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={ 'name': 'test', 'start_time': datetime(2018, 1, 1, 0, 0, 0) }) self.scn = Scene() self.scn['test'] = ds1 # Temp dir self.base_dir = tempfile.mkdtemp() def tearDown(self): """Remove the temporary directory created for a test""" try: shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass def test_save_dataset_static_filename(self): """Test saving a dataset with a static filename specified.""" self.scn.save_datasets(base_dir=self.base_dir, filename='geotiff.tif') self.assertTrue( os.path.isfile(os.path.join(self.base_dir, 'geotiff.tif'))) def test_save_dataset_dynamic_filename(self): """Test saving a dataset with a format filename specified.""" fmt_fn = 'geotiff_{name}_{start_time:%Y%m%d_%H%M%S}.tif' exp_fn = 'geotiff_test_20180101_000000.tif' self.scn.save_datasets(base_dir=self.base_dir, filename=fmt_fn) self.assertTrue(os.path.isfile(os.path.join(self.base_dir, exp_fn))) def test_save_dataset_dynamic_filename_with_dir(self): """Test saving a dataset with a format filename that includes a directory.""" fmt_fn = os.path.join('{start_time:%Y%m%d}', 'geotiff_{name}_{start_time:%Y%m%d_%H%M%S}.tif') exp_fn = os.path.join('20180101', 'geotiff_test_20180101_000000.tif') self.scn.save_datasets(base_dir=self.base_dir, filename=fmt_fn) self.assertTrue(os.path.isfile(os.path.join(self.base_dir, exp_fn))) # change the filename pattern but keep the same directory fmt_fn2 = os.path.join('{start_time:%Y%m%d}', 'geotiff_{name}_{start_time:%Y%m%d_%H}.tif') exp_fn2 = os.path.join('20180101', 'geotiff_test_20180101_00.tif') self.scn.save_datasets(base_dir=self.base_dir, filename=fmt_fn2) self.assertTrue(os.path.isfile(os.path.join(self.base_dir, exp_fn2))) # the original file should still exist self.assertTrue(os.path.isfile(os.path.join(self.base_dir, exp_fn)))
def process_one_scene(scene_files, out_path, engine='h5netcdf', all_channels=False, pps_channels=False, orbit_n=0): """Make level 1c files in PPS-format.""" tic = time.time() scn_ = Scene(reader='modis_l1b', filenames=scene_files) MY_BANDNAMES = BANDNAMES_DEFAULT if all_channels: MY_BANDNAMES = BANDNAMES if pps_channels: MY_BANDNAMES = BANDNAMES_PPS scn_.load(MY_BANDNAMES + ['latitude', 'longitude'] + ANGLE_NAMES, resolution=1000) # one ir channel irch = scn_['31'] # Set header and band attributes set_header_and_band_attrs(scn_, orbit_n=orbit_n) # Rename longitude, latitude to lon, lat. rename_latitude_longitude(scn_) # Convert angles to PPS convert_angles(scn_, delete_azimuth=True) update_angle_attributes(scn_, irch) # Apply sunz correction apply_sunz_correction(scn_, REFL_BANDS) filename = compose_filename(scn_, out_path, instrument='modis', band=irch) scn_.save_datasets(writer='cf', filename=filename, header_attrs=get_header_attrs(scn_, band=irch, sensor='modis'), engine=engine, include_lonlats=False, flatten_attrs=True, encoding=get_encoding_modis(scn_)) print("Saved file {:s} after {:3.1f} seconds".format( os.path.basename(filename), time.time() - tic)) return filename
def process_one_scene(scene_files, out_path): """Make level 1c files in PPS-format.""" tic = time.time() scn_ = Scene(reader='vii_l1b_nc', filenames=scene_files) scn_.load(BANDNAMES + ANGLE_NAMES + ['lat_pixels', 'lon_pixels']) # Transpose data to get scanlines as row dimension for key in BANDNAMES + ANGLE_NAMES + ['lat_pixels', 'lon_pixels']: if 'num_pixels' in scn_[key].dims: # satpy <= 0 .26.0 scn_[key] = scn_[key].transpose('num_lines', 'num_pixels') elif scn_[key].dims[0] == 'x': # first dim should be y scn_[key] = scn_[key].transpose('y', 'x') # one ir channel irch = scn_['vii_10690'] # Set header and band attributes set_header_and_band_attrs(scn_) # Rename longitude, latitude to lon, lat. rename_latitude_longitude(scn_) # Adjust lons to valid range: adjust_lons_to_valid_range(scn_) # Convert angles to PPS convert_angles(scn_, delete_azimuth=True) update_angle_attributes(scn_, irch) # Apply sunz correction # apply_sunz_correction(scn_, REFL_BANDS) filename = compose_filename(scn_, out_path, instrument='metimage', band=irch) scn_.save_datasets(writer='cf', filename=filename, header_attrs=get_header_attrs(scn_, band=irch, sensor='metimage'), engine='h5netcdf', include_lonlats=False, flatten_attrs=True, encoding=get_encoding_metimage(scn_)) print("Saved file {:s} after {:3.1f} seconds".format( os.path.basename(filename), time.time()-tic))
def process_one_scene(scene_files, out_path, engine='h5netcdf', orbit_n=0): """Make level 1c files in PPS-format.""" tic = time.time() if 'AVHR_xxx' in scene_files[0]: avhrr_reader = 'avhrr_l1b_eps' angles = ANGLE_NAMES_EPS else: avhrr_reader = 'avhrr_l1b_aapp' angles = ANGLE_NAMES_AAPP scn_ = Scene( reader=avhrr_reader, filenames=scene_files) scn_.load(BANDNAMES + ['latitude', 'longitude'] + angles) # one ir channel irch = scn_['4'] # Check if we have old hrpt format with data only every 20th line check_broken_data(scn_) # Set header and band attributes set_header_and_band_attrs(scn_, orbit_n=orbit_n) # Rename longitude, latitude to lon, lat. rename_latitude_longitude(scn_) # Convert angles to PPS convert_angles(scn_, delete_azimuth=True) update_angle_attributes(scn_, irch) # Apply sunz correction apply_sunz_correction(scn_, REFL_BANDS) filename = compose_filename(scn_, out_path, instrument='avhrr', band=irch) scn_.save_datasets(writer='cf', filename=filename, header_attrs=get_header_attrs(scn_, band=irch, sensor='avhrr'), engine=engine, include_lonlats=False, flatten_attrs=True, encoding=get_encoding_avhrr(scn_)) print("Saved file {:s} after {:3.1f} seconds".format( os.path.basename(filename), time.time()-tic)) return filename
def process_one_scene(scene_files, out_path, engine='h5netcdf', all_channels=False, pps_channels=False): """Make level 1c files in PPS-format.""" tic = time.time() scn_ = Scene( reader='slstr_l1b', filenames=scene_files) MY_BANDNAMES = BANDNAMES_DEFAULT if all_channels: MY_BANDNAMES = BANDNAMES if pps_channels: MY_BANDNAMES = BANDNAMES_PPS scn_.load(MY_BANDNAMES + ['latitude', 'longitude'] + ANGLE_NAMES) # Everything should be on the same grid, to be saved as ppsleve1c scn_ = scn_.resample(resampler="native") # one ir channel irch = scn_['S8'] # Set header and band attributes set_header_and_band_attrs(scn_) # Rename longitude, latitude to lon, lat. rename_latitude_longitude(scn_) # Convert angles to PPS convert_angles(scn_, delete_azimuth=True) update_angle_attributes(scn_, irch) filename = compose_filename(scn_, out_path, instrument='slstr', band=irch) scn_.save_datasets(writer='cf', filename=filename, header_attrs=get_header_attrs(scn_, band=irch, sensor='slstr'), engine=engine, include_lonlats=False, flatten_attrs=True, encoding=get_encoding_slstr(scn_)) print("Saved file {:s} after {:3.1f} seconds".format( os.path.basename(filename), time.time()-tic))
def process_one_scene(scene_files, out_path, engine='h5netcdf'): """Make level 1c files in PPS-format.""" tic = time.time() scn_ = Scene(reader='mersi2_l1b', filenames=scene_files) scn_.load(BANDNAMES + ['latitude', 'longitude'] + ANGLE_NAMES, resolution=1000) # Remove bad data at first and last column remove_broken_data(scn_) # one ir channel irch = scn_['24'] # Set header and band attributes set_header_and_band_attrs(scn_) # Rename longitude, latitude to lon, lat. rename_latitude_longitude(scn_) # Convert angles to PPS convert_angles(scn_, delete_azimuth=True) update_angle_attributes(scn_, irch) for angle in ['sunzenith', 'satzenith', 'azimuthdiff']: scn_[angle].attrs['file_key'] = ANGLE_ATTRIBUTES['mersi2_file_key'][ angle] filename = compose_filename(scn_, out_path, instrument='mersi2', band=irch) scn_.save_datasets(writer='cf', filename=filename, header_attrs=get_header_attrs(scn_, band=irch, sensor='mersi-2'), engine=engine, include_lonlats=False, flatten_attrs=True, encoding=get_encoding_mersi2(scn_)) print("Saved file {:s} after {:3.1f} seconds".format( os.path.basename(filename), time.time() - tic)) return filename
class TestComputeWriterResults(unittest.TestCase): """Test compute_writer_results().""" def setUp(self): """Create temporary directory to save files to and a mock scene""" import tempfile from datetime import datetime from satpy.scene import Scene import dask.array as da ds1 = xr.DataArray(da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={ 'name': 'test', 'start_time': datetime(2018, 1, 1, 0, 0, 0) }) self.scn = Scene() self.scn['test'] = ds1 # Temp dir self.base_dir = tempfile.mkdtemp() def tearDown(self): """Remove the temporary directory created for a test""" try: shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass def test_empty(self): """Test empty result list""" from satpy.writers import compute_writer_results compute_writer_results([]) def test_simple_image(self): """Test writing to PNG file""" from satpy.writers import compute_writer_results fname = os.path.join(self.base_dir, 'simple_image.png') res = self.scn.save_datasets(filename=fname, datasets=['test'], writer='simple_image', compute=False) compute_writer_results([res]) self.assertTrue(os.path.isfile(fname)) def test_geotiff(self): """Test writing to mitiff file""" from satpy.writers import compute_writer_results fname = os.path.join(self.base_dir, 'geotiff.tif') res = self.scn.save_datasets(filename=fname, datasets=['test'], writer='geotiff', compute=False) compute_writer_results([res]) self.assertTrue(os.path.isfile(fname)) # FIXME: This reader needs more information than exist at the moment # def test_mitiff(self): # """Test writing to mitiff file""" # fname = os.path.join(self.base_dir, 'mitiff.tif') # res = self.scn.save_datasets(filename=fname, # datasets=['test'], # writer='mitiff') # compute_writer_results([res]) # self.assertTrue(os.path.isfile(fname)) # FIXME: This reader needs more information than exist at the moment # def test_cf(self): # """Test writing to NetCDF4 file""" # fname = os.path.join(self.base_dir, 'cf.nc') # res = self.scn.save_datasets(filename=fname, # datasets=['test'], # writer='cf') # compute_writer_results([res]) # self.assertTrue(os.path.isfile(fname)) def test_multiple_geotiff(self): """Test writing to mitiff file""" from satpy.writers import compute_writer_results fname1 = os.path.join(self.base_dir, 'geotiff1.tif') res1 = self.scn.save_datasets(filename=fname1, datasets=['test'], writer='geotiff', compute=False) fname2 = os.path.join(self.base_dir, 'geotiff2.tif') res2 = self.scn.save_datasets(filename=fname2, datasets=['test'], writer='geotiff', compute=False) compute_writer_results([res1, res2]) self.assertTrue(os.path.isfile(fname1)) self.assertTrue(os.path.isfile(fname2)) def test_multiple_simple(self): """Test writing to geotiff files""" from satpy.writers import compute_writer_results fname1 = os.path.join(self.base_dir, 'simple_image1.png') res1 = self.scn.save_datasets(filename=fname1, datasets=['test'], writer='simple_image', compute=False) fname2 = os.path.join(self.base_dir, 'simple_image2.png') res2 = self.scn.save_datasets(filename=fname2, datasets=['test'], writer='simple_image', compute=False) compute_writer_results([res1, res2]) self.assertTrue(os.path.isfile(fname1)) self.assertTrue(os.path.isfile(fname2)) def test_mixed(self): """Test writing to multiple mixed-type files""" from satpy.writers import compute_writer_results fname1 = os.path.join(self.base_dir, 'simple_image3.png') res1 = self.scn.save_datasets(filename=fname1, datasets=['test'], writer='simple_image', compute=False) fname2 = os.path.join(self.base_dir, 'geotiff3.tif') res2 = self.scn.save_datasets(filename=fname2, datasets=['test'], writer='geotiff', compute=False) res3 = [] compute_writer_results([res1, res2, res3]) self.assertTrue(os.path.isfile(fname1)) self.assertTrue(os.path.isfile(fname2))
def process_one_file(eumgacfdr_file, out_path='.', reader_kwargs=None, start_line=None, end_line=None, engine='h5netcdf', remove_broken=True): """Make level 1c files in PPS-format.""" tic = time.time() scn_ = Scene(reader='avhrr_l1c_eum_gac_fdr_nc', filenames=[eumgacfdr_file]) scn_.load(BANDNAMES) scn_.load([ 'latitude', 'longitude', 'qual_flags', 'equator_crossing_time', 'equator_crossing_longitude', 'acq_time' ] + ANGLENAMES) # Only load these if we do not crop data if start_line is None and end_line is None: scn_.load(['overlap_free_end', 'overlap_free_start', 'midnight_line']) # Needs to be done before everything else to avoid problems with attributes. if remove_broken: logger.info("Setting low quality data (qual_flags) to nodata.") remove_broken_data(scn_) # Crop after all renaming of variables are done # Problems to rename if cropping is done first. set_exact_time_and_crop(scn_, start_line, end_line, time_key='acq_time') irch = scn_[ 'brightness_temperature_channel_4'] # Redefine, to get updated start/end_times # One ir channel irch = scn_['brightness_temperature_channel_4'] # Set header and band attributes set_header_and_band_attrs(scn_) # Rename longitude, latitude to lon, lat. rename_latitude_longitude(scn_) # Convert angles to PPS convert_angles(scn_) update_angle_attributes(scn_, irch) # Standard name etc # Handle gac specific datasets qual_flags and scanline_timestamps update_ancilliary_datasets(scn_) filename = compose_filename(scn_, out_path, instrument='avhrr', band=irch) encoding = get_encoding_gac(scn_) scn_.save_datasets( writer='cf', filename=filename, header_attrs=get_header_attrs(scn_, band=irch, sensor='avhrr'), engine=engine, flatten_attrs=True, include_lonlats=False, # Included anyway as they are datasets in scn_ pretty=True, encoding=encoding) logger.info("Saved file {:s} after {:3.1f} seconds".format( os.path.basename(filename), time.time() - tic)) return filename
class TestBaseWriter: """Test the base writer class.""" def setup_method(self): """Set up tests.""" import tempfile from datetime import datetime from satpy.scene import Scene import dask.array as da ds1 = xr.DataArray(da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={ 'name': 'test', 'start_time': datetime(2018, 1, 1, 0, 0, 0), 'sensor': 'fake_sensor', }) ds2 = ds1.copy() ds2.attrs['sensor'] = {'fake_sensor1', 'fake_sensor2'} self.scn = Scene() self.scn['test'] = ds1 self.scn['test2'] = ds2 # Temp dir self.base_dir = tempfile.mkdtemp() def teardown_method(self): """Remove the temporary directory created for a test.""" try: shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass def test_save_dataset_static_filename(self): """Test saving a dataset with a static filename specified.""" self.scn.save_datasets(base_dir=self.base_dir, filename='geotiff.tif') assert os.path.isfile(os.path.join(self.base_dir, 'geotiff.tif')) @pytest.mark.parametrize(('fmt_fn', 'exp_fns'), [ ('geotiff_{name}_{start_time:%Y%m%d_%H%M%S}.tif', [ 'geotiff_test_20180101_000000.tif', 'geotiff_test2_20180101_000000.tif' ]), ('geotiff_{name}_{sensor}.tif', [ 'geotiff_test_fake_sensor.tif', 'geotiff_test2_fake_sensor1-fake_sensor2.tif' ]), ]) def test_save_dataset_dynamic_filename(self, fmt_fn, exp_fns): """Test saving a dataset with a format filename specified.""" self.scn.save_datasets(base_dir=self.base_dir, filename=fmt_fn) for exp_fn in exp_fns: exp_path = os.path.join(self.base_dir, exp_fn) assert os.path.isfile(exp_path) def test_save_dataset_dynamic_filename_with_dir(self): """Test saving a dataset with a format filename that includes a directory.""" fmt_fn = os.path.join('{start_time:%Y%m%d}', 'geotiff_{name}_{start_time:%Y%m%d_%H%M%S}.tif') exp_fn = os.path.join('20180101', 'geotiff_test_20180101_000000.tif') self.scn.save_datasets(base_dir=self.base_dir, filename=fmt_fn) assert os.path.isfile(os.path.join(self.base_dir, exp_fn)) # change the filename pattern but keep the same directory fmt_fn2 = os.path.join('{start_time:%Y%m%d}', 'geotiff_{name}_{start_time:%Y%m%d_%H}.tif') exp_fn2 = os.path.join('20180101', 'geotiff_test_20180101_00.tif') self.scn.save_datasets(base_dir=self.base_dir, filename=fmt_fn2) assert os.path.isfile(os.path.join(self.base_dir, exp_fn2)) # the original file should still exist assert os.path.isfile(os.path.join(self.base_dir, exp_fn))
def process_one_file(gac_file, out_path='.'): """Make level 1c files in PPS-format.""" tic = time.time() image_num = 0 # name of first dataset is image0 # platform_shortname = p__.parse( # os.path.basename(tslot_files[0]))['platform_shortname'] # start_time = p__.parse( # os.path.basename(tslot_files[0]))['start_time'] # platform_name = PLATFORM_SHORTNAMES[platform_shortname] # #Load channel data for one scene and set some attributes # coefs = get_calibration_for_time(platform=platform_shortname, # time=start_time) scn_ = Scene(reader='avhrr_l1b_gaclac', filenames=[gac_file]) if 'avhrr-3' in scn_.attrs['sensor']: sensor = 'avhrr' scn_.load(BANDNAMES + [ 'latitude', 'longitude', 'sensor_zenith_angle', 'solar_zenith_angle', 'sun_sensor_azimuth_difference_angle' ]) for band in BANDNAMES: try: idtag = PPS_TAGNAMES.get(band, band) scn_[band].attrs['id_tag'] = idtag scn_[band].attrs['description'] = 'AVHRR ' + str(band) scn_[band].attrs['sun_earth_distance_correction_applied'] = 'False' scn_[band].attrs['sun_earth_distance_correction_factor'] = 1.0 scn_[band].attrs['sun_zenith_angle_correction_applied'] = 'False' scn_[band].attrs['name'] = "image{:d}".format(image_num) scn_[band].attrs['coordinates'] = 'lon lat' del scn_[band].attrs['area'] image_num += 1 except KeyError: continue # Set some header attributes: scn_.attrs['instrument'] = sensor.upper() scn_.attrs['source'] = "gac2pps.py" nowutc = datetime.utcnow() scn_.attrs['date_created'] = nowutc.strftime("%Y-%m-%dT%H:%M:%SZ") # Find lat/lon data irch = scn_['4'] scn_.attrs['platform'] = irch.attrs['platform_name'] scn_.attrs['platform_name'] = irch.attrs['platform_name'] scn_.attrs['orbit_number'] = '{:05d}'.format(irch.attrs['orbit_number']) scn_.attrs['orbit'] = scn_.attrs['orbit_number'] # lons = lons.where(lons <= 360, -999.0) # lons = lons.where(lons >= -360, 999.0) # lats = lats.where(lats <= 90, -999.0) # lats = lats.where(lats >= -90, 999.0) scn_['lat'] = scn_['latitude'] del scn_['latitude'] scn_['lat'].attrs['long_name'] = 'latitude coordinate' del scn_['lat'].coords['acq_time'] scn_['lon'] = scn_['longitude'] del scn_['longitude'] scn_['lon'].attrs['long_name'] = 'longitude coordinate' del scn_['lon'].coords['acq_time'] angle_names = [] scn_['sunzenith'] = scn_['solar_zenith_angle'] del scn_['solar_zenith_angle'] scn_['sunzenith'].attrs['id_tag'] = 'sunzenith' scn_['sunzenith'].attrs['long_name'] = 'sun zenith angle' scn_['sunzenith'].attrs['valid_range'] = [0, 18000] scn_['sunzenith'].attrs['name'] = "image{:d}".format(image_num) angle_names.append("image{:d}".format(image_num)) scn_['sunzenith'].attrs['coordinates'] = 'lon lat' del scn_['sunzenith'].attrs['area'] scn_['sunzenith'].coords['time'] = irch.attrs['start_time'] del scn_['sunzenith'].coords['acq_time'] image_num += 1 # satzenith scn_['satzenith'] = scn_['sensor_zenith_angle'] del scn_['sensor_zenith_angle'] scn_['satzenith'].attrs['id_tag'] = 'satzenith' scn_['satzenith'].attrs['long_name'] = 'satellite zenith angle' scn_['satzenith'].attrs['valid_range'] = [0, 9000] scn_['satzenith'].attrs['name'] = "image{:d}".format(image_num) angle_names.append("image{:d}".format(image_num)) scn_['satzenith'].attrs['coordinates'] = 'lon lat' del scn_['satzenith'].attrs['area'] scn_['satzenith'].coords['time'] = irch.attrs['start_time'] del scn_['satzenith'].coords['acq_time'] image_num += 1 # azidiff scn_['azimuthdiff'] = abs(scn_['sun_sensor_azimuth_difference_angle']) scn_['azimuthdiff'].attrs = scn_[ 'sun_sensor_azimuth_difference_angle'].attrs del scn_['sun_sensor_azimuth_difference_angle'] scn_['azimuthdiff'].attrs['id_tag'] = 'azimuthdiff' # scn_['azimuthdiff'].attrs['standard_name'] = ( # 'angle_of_rotation_from_solar_azimuth_to_platform_azimuth') scn_['azimuthdiff'].attrs[ 'long_name'] = 'absolute azimuth difference angle' scn_['azimuthdiff'].attrs['valid_range'] = [0, 18000] scn_['azimuthdiff'].attrs['name'] = "image{:d}".format(image_num) angle_names.append("image{:d}".format(image_num)) scn_['azimuthdiff'].attrs['coordinates'] = 'lon lat' del scn_['azimuthdiff'].attrs['area'] scn_['azimuthdiff'].coords['time'] = irch.attrs['start_time'] del scn_['azimuthdiff'].coords['acq_time'] image_num += 1 # Get filename start_time = irch.attrs['start_time'] end_time = irch.attrs['end_time'] platform_name = irch.attrs['platform_name'] orbit_number = int(scn_.attrs['orbit_number']) filename = os.path.join( out_path, "S_NWC_avhrr_{:s}_{:05d}_{:s}Z_{:s}Z.nc".format( platform_name.lower().replace('-', ''), orbit_number, start_time.strftime('%Y%m%dT%H%M%S%f')[:-5], end_time.strftime('%Y%m%dT%H%M%S%f')[:-5])) for dataset in scn_.keys(): if hasattr(scn_[dataset], 'attrs'): if hasattr(scn_[dataset].attrs, 'modifiers'): scn_[dataset].attrs['modifiers'] = 0.0 # Encoding for channels save_info = {} for band in BANDNAMES: idtag = PPS_TAGNAMES[band] try: name = scn_[band].attrs['name'] except KeyError: logger.debug("No band named %s", band) continue # Add time coordinate. To make cfwriter aware that we want 3D data. scn_[band].coords['time'] = irch.attrs['start_time'] del scn_[band].coords['acq_time'] if 'tb' in idtag: save_info[name] = { 'dtype': 'int16', 'scale_factor': 0.01, '_FillValue': -32767, 'zlib': True, 'complevel': 4, 'add_offset': 273.15 } else: save_info[name] = { 'dtype': 'int16', 'scale_factor': 0.01, 'zlib': True, 'complevel': 4, '_FillValue': -32767, 'add_offset': 0.0 } # Encoding for angles and lat/lon for name in angle_names: save_info[name] = { 'dtype': 'int16', 'scale_factor': 0.01, 'zlib': True, 'complevel': 4, '_FillValue': -32767, 'add_offset': 0.0 } for name in ['lon', 'lat']: save_info[name] = { 'dtype': 'float32', 'zlib': True, 'complevel': 4, '_FillValue': -999.0 } header_attrs = scn_.attrs.copy() header_attrs['start_time'] = time.strftime( "%Y-%m-%d %H:%M:%S", irch.attrs['start_time'].timetuple()) header_attrs['end_time'] = time.strftime( "%Y-%m-%d %H:%M:%S", irch.attrs['end_time'].timetuple()) header_attrs['sensor'] = sensor.lower() for band in BANDNAMES: idtag = PPS_TAGNAMES[band] try: to_pop = [] for attr in scn_[band].attrs.keys(): if hasattr(scn_[band].attrs[attr], 'keys'): print("found dict", attr) to_pop.append(attr) for attr in to_pop: attr_dict = scn_[band].attrs[attr] scn_[band].attrs.pop(attr) for key in attr_dict.keys(): scn_[band].attrs[attr + str(key)] = attr_dict[key] except KeyError: continue scn_.save_datasets(writer='cf', filename=filename, header_attrs=header_attrs, engine='netcdf4', encoding=save_info) print("Saved file {:s} after {:3.1f} seconds".format( os.path.basename(filename), time.time() - tic))
def process_one_scan(tslot_files, out_path, process_buggy_satellite_zenith_angles=False): """ Make level 1c files in PPS-format """ tic = time.time() image_num = 0 # name of first dataset is image0 #if len(tslot_files) != 8 * len(BANDNAMES) + 2: # raise Exception("Some data is missing") platform_shortname = p__.parse(os.path.basename( tslot_files[0]))['platform_shortname'] start_time = p__.parse(os.path.basename(tslot_files[0]))['start_time'] platform_name = PLATFORM_SHORTNAMES[platform_shortname] #Load channel data for one scene and set some attributes coefs = get_calibration_for_time(platform=platform_shortname, time=start_time) scn_ = Scene(reader='seviri_l1b_hrit', filenames=tslot_files, reader_kwargs={ 'calib_mode': CALIB_MODE, 'ext_calib_coefs': coefs }) scn_.attrs['platform_name'] = platform_name #SEVIRI data only if scn_.attrs['sensor'] == {'seviri'}: sensor = 'seviri' scn_.load(BANDNAMES) for band in BANDNAMES: idtag = PPS_TAGNAMES[band] scn_[band].attrs['id_tag'] = idtag scn_[band].attrs['description'] = 'SEVIRI ' + str(band) scn_[band].attrs['sun_earth_distance_correction_applied'] = 'False' scn_[band].attrs['sun_earth_distance_correction_factor'] = 1.0 scn_[band].attrs['sun_zenith_angle_correction_applied'] = 'False' scn_[band].attrs['name'] = "image{:d}".format(image_num) scn_[band].attrs['coordinates'] = 'lon lat' image_num += 1 #correct area area_corr = pyresample.geometry.AreaDefinition( 'seviri-corrected', 'Corrected SEVIRI L1.5 grid (since Dec 2017)', 'geosmsg', { 'a': 6378169.00, 'b': 6356583.80, 'h': 35785831.0, 'lon_0': 0.0, 'proj': 'geos', 'units': 'm' }, 3712, 3712, (5567248.28340708, 5570248.686685662, -5570248.686685662, -5567248.28340708)) if not scn_['IR_108'].attrs['georef_offset_corrected']: scn_ = scn_.resample(area_corr) print(scn_['IR_108'].attrs['georef_offset_corrected']) #import pdb;pdb.set_trace() #Set som header attributes: scn_.attrs['platform'] = platform_name scn_.attrs['instrument'] = sensor.upper() scn_.attrs['source'] = "seviri2pps.py" scn_.attrs['orbit_number'] = "99999" #scn_.attrs['orbit'] = "99999" nowutc = datetime.utcnow() scn_.attrs['date_created'] = nowutc.strftime("%Y-%m-%dT%H:%M:%SZ") #Find lat/lon data irch = scn_['IR_108'] lons, lats = irch.attrs['area'].get_lonlats() lons[lons > 360] = -999.0 lons[lons < -360] = -999.0 lats[lats > 360] = -999.0 lats[lats < -360] = -999.0 #Find angles data sunalt, suna = get_alt_az(irch.attrs['start_time'], *irch.attrs['area'].get_lonlats()) suna = np.rad2deg(suna) sunz = sun_zenith_angle(irch.attrs['start_time'], *irch.attrs['area'].get_lonlats()) # if: # Buggy data is requested buggy data is prepared! # elif: # 1) get_observer_look() gives wrong answer ... # ... for satellite altitude in m. AND # 2) get_observer_look() gives correct answer ... # .... for satellite altitude in km. AND # 3) Satellite altitude is m.: # => Satellite alltitude need to be converted to km. # else: # => There have been updates to SatPy and this script # need to be modified. if process_buggy_satellite_zenith_angles: print(" Making buggy satellite zenith angels on purpose!") sata, satel = get_observer_look( irch.attrs['orbital_parameters']['satellite_actual_longitude'], irch.attrs['orbital_parameters']['satellite_actual_latitude'], irch.attrs['orbital_parameters']['satellite_actual_altitude'], irch.attrs['start_time'], lons, lats, 0) elif (get_observer_look(0, 0, 36000 * 1000, datetime.utcnow(), np.array([16]), np.array([58]), np.array( [0]))[1] > 30 and get_observer_look(0, 0, 36000, datetime.utcnow(), np.array([16]), np.array([58]), np.array([0]))[1] < 23 and irch.attrs['orbital_parameters']['satellite_actual_altitude'] > 38000): sata, satel = get_observer_look( irch.attrs['orbital_parameters']['satellite_actual_longitude'], irch.attrs['orbital_parameters']['satellite_actual_latitude'], 0.001 * irch.attrs['orbital_parameters']['satellite_actual_altitude'], irch.attrs['start_time'], lons, lats, 0) else: raise UnexpectedSatpyVersion( "You might have a newer version of satpy/pyorbital that" "handles units. In that case the m => km conversion might" "be unneeded and wrong.") satz = 90 - satel azidiff = make_azidiff_angle(sata, suna, -32767) #Add lat/lon and angles datasets to the scen object my_coords = scn_['IR_108'].coords my_coords['time'] = irch.attrs['start_time'] scn_['lat'] = xr.DataArray(da.from_array(lats, chunks=(53, 3712)), dims=['y', 'x'], coords={ 'y': scn_['IR_108']['y'], 'x': scn_['IR_108']['x'] }) scn_['lat'].attrs['long_name'] = 'latitude coordinate' scn_['lat'].attrs['standard_name'] = 'latitude' scn_['lat'].attrs['units'] = 'degrees_north' scn_['lat'].attrs['start_time'] = irch.attrs['start_time'] scn_['lat'].attrs['end_time'] = irch.attrs['end_time'] scn_['lon'] = xr.DataArray(da.from_array(lons, chunks=(53, 3712)), dims=['y', 'x'], coords={ 'y': scn_['IR_108']['y'], 'x': scn_['IR_108']['x'] }) scn_['lon'].attrs['long_name'] = 'longitude coordinate' scn_['lon'].attrs['standard_name'] = 'longitude' scn_['lon'].attrs['units'] = 'degrees_east' scn_['lon'].attrs['start_time'] = irch.attrs['start_time'] scn_['lon'].attrs['end_time'] = irch.attrs['end_time'] #sunzenith scn_['sunzenith'] = xr.DataArray(da.from_array(sunz[:, :], chunks=(53, 3712)), dims=['y', 'x'], coords=my_coords) scn_['sunzenith'].attrs['id_tag'] = 'sunzenith' scn_['sunzenith'].attrs['long_name'] = 'sun zenith angle' scn_['sunzenith'].attrs['standard_name'] = 'solar_zenith_angle' scn_['sunzenith'].attrs['valid_range'] = [0, 18000] scn_['sunzenith'].attrs['name'] = "image{:d}".format(image_num) image_num += 1 #satzenith scn_['satzenith'] = xr.DataArray(da.from_array(satz[:, :], chunks=(53, 3712)), dims=['y', 'x'], coords=my_coords) scn_['satzenith'].attrs['id_tag'] = 'satzenith' scn_['satzenith'].attrs['long_name'] = 'satellite zenith angle' scn_['satzenith'].attrs['standard_name'] = 'platform_zenith_angle' scn_['satzenith'].attrs['valid_range'] = [0, 9000] scn_['satzenith'].attrs['name'] = "image{:d}".format(image_num) image_num += 1 #azidiff scn_['azimuthdiff'] = xr.DataArray(da.from_array(azidiff[:, :], chunks=(53, 3712)), dims=['y', 'x'], coords=my_coords) scn_['azimuthdiff'].attrs['id_tag'] = 'azimuthdiff' #scn_['azimuthdiff'].attrs['standard_name'] = ( # 'angle_of_rotation_from_solar_azimuth_to_platform_azimuth') scn_['azimuthdiff'].attrs[ 'long_name'] = 'absoulte azimuth difference angle' scn_['azimuthdiff'].attrs['valid_range'] = [0, 18000] scn_['azimuthdiff'].attrs['name'] = "image{:d}".format(image_num) image_num += 1 for angle in ['azimuthdiff', 'satzenith', 'sunzenith']: scn_[angle].attrs['units'] = 'degree' for attr in irch.attrs.keys(): if attr in [ "start_time", "end_time", "navigation", "georef_offset_corrected", "projection" ]: scn_[angle].attrs[attr] = irch.attrs[attr] #Get filename start_time = scn_['IR_108'].attrs['start_time'] end_time = scn_['IR_108'].attrs['end_time'] filename = os.path.join( out_path, "S_NWC_seviri_{:s}_{:s}_{:s}Z_{:s}Z.nc".format( platform_name.lower().replace('-', ''), "99999", start_time.strftime('%Y%m%dT%H%M%S%f')[:-5], end_time.strftime('%Y%m%dT%H%M%S%f')[:-5])) #Encoding for channels save_info = {} for band in BANDNAMES: idtag = PPS_TAGNAMES[band] name = scn_[band].attrs['name'] scn_[band].attrs.pop('area', None) # Add time coordinate. To make cfwriter aware that we want 3D data. my_coords = scn_[band].coords my_coords['time'] = irch.attrs['start_time'] if 'tb' in idtag: save_info[name] = { 'dtype': 'int16', 'scale_factor': 0.01, '_FillValue': -32767, 'zlib': True, 'complevel': 4, 'add_offset': 273.15 } else: save_info[name] = { 'dtype': 'int16', 'scale_factor': 0.01, 'zlib': True, 'complevel': 4, '_FillValue': -32767, 'add_offset': 0.0 } #Encoding for angles and lat/lon for name in ['image11', 'image12', 'image13']: save_info[name] = { 'dtype': 'int16', 'scale_factor': 0.01, 'zlib': True, 'complevel': 4, '_FillValue': -32767, 'add_offset': 0.0 } for name in ['lon', 'lat']: save_info[name] = { 'dtype': 'float32', 'zlib': True, 'complevel': 4, '_FillValue': -999.0 } header_attrs = scn_.attrs.copy() header_attrs['start_time'] = time.strftime( "%Y-%m-%d %H:%M:%S", irch.attrs['start_time'].timetuple()) header_attrs['end_time'] = time.strftime( "%Y-%m-%d %H:%M:%S", irch.attrs['end_time'].timetuple()) header_attrs['sensor'] = sensor.lower() header_attrs.pop('platform_name', None) scn_.save_datasets(writer='cf', filename=filename, header_attrs=header_attrs, engine='netcdf4', encoding=save_info, include_lonlats=False, pretty=True, flatten_attrs=True, exclude_attrs=['raw_metadata']) print("Saved file {:s} after {:3.1f} seconds".format( os.path.basename(filename), time.time() - tic)) #About 40 seconds return filename
def process_one_file(gac_file, out_path='.', reader_kwargs=None, engine='h5netcdf', orbit_n=99999): """Make level 1c files in PPS-format.""" tic = time.time() if reader_kwargs is None: reader_kwargs = {} if 'tle_dir' not in reader_kwargs: from pygac.configuration import get_config conf = get_config() tle_dir = conf.get('tle', 'tledir', raw=True) tle_name = conf.get('tle', 'tlename', raw=True) reader_kwargs['tle_dir'] = tle_dir reader_kwargs['tle_name'] = tle_name scn_ = Scene(reader='avhrr_l1b_gaclac', filenames=[gac_file], reader_kwargs=reader_kwargs) # Loading all at once sometimes fails with newer satpy, so start with BANDNAMES ... scn_.load(BANDNAMES) scn_.load([ 'latitude', 'longitude', 'qual_flags', 'sensor_zenith_angle', 'solar_zenith_angle', 'solar_azimuth_angle', 'sensor_azimuth_angle', 'sun_sensor_azimuth_difference_angle' ]) # one ir channel irch = scn_['4'] # Set header and band attributes set_header_and_band_attrs(scn_, orbit_n=orbit_n) # Rename longitude, latitude to lon, lat. rename_latitude_longitude(scn_) # Convert angles to PPS convert_angles(scn_) update_angle_attributes(scn_, irch) # Handle gac specific datasets qual_flags and scanline_timestamps update_ancilliary_datasets(scn_) filename = compose_filename(scn_, out_path, instrument='avhrr', band=irch) encoding = get_encoding_gac(scn_) encoding['scanline_timestamps'].pop('units') scn_.save_datasets( writer='cf', filename=filename, header_attrs=get_header_attrs(scn_, band=irch, sensor='avhrr'), engine=engine, flatten_attrs=True, include_lonlats=False, # Included anyway as they are datasets in scn_ pretty=True, encoding=encoding) print("Saved file {:s} after {:3.1f} seconds".format( os.path.basename(filename), time.time() - tic)) return filename
def process_one_scan(tslot_files, out_path, rotate=True, engine='h5netcdf'): """Make level 1c files in PPS-format.""" for fname in tslot_files: if not os.path.isfile(fname): raise FileNotFoundError('No such file: {}'.format(fname)) tic = time.time() parser = Parser(HRIT_FILE_PATTERN) platform_shortname = parser.parse(os.path.basename( tslot_files[0]))['platform_shortname'] start_time = parser.parse(os.path.basename(tslot_files[0]))['start_time'] # Load and calibrate data using inter-calibration coefficients from # Meirink et al coefs = get_calibration_for_time(platform=platform_shortname, time=start_time) scn_ = Scene(reader='seviri_l1b_hrit', filenames=tslot_files, reader_kwargs={ 'calib_mode': CALIB_MODE, 'ext_calib_coefs': coefs }) if not scn_.attrs['sensor'] == {'seviri'}: raise ValueError('Not SEVIRI data') scn_.load(BANDNAMES) # By default pixel (0,0) is S-E. Rotate bands so that (0,0) is N-W. if rotate: for band in BANDNAMES: rotate_band(scn_, band) scn_.attrs['image_rotated'] = rotate # Find lat/lon data lons, lats = get_lonlats(scn_['IR_108']) # Compute angles suna, sunz = get_solar_angles(scn_, lons=lons, lats=lats) sata, satz = get_satellite_angles(scn_['IR_108'], lons=lons, lats=lats) azidiff = make_azidiff_angle(sata, suna) # Update coordinates update_coords(scn_) # Add ancillary datasets to the scene add_ancillary_datasets(scn_, lons=lons, lats=lats, sunz=sunz, satz=satz, azidiff=azidiff) add_proj_satpos(scn_) # Set attributes. This changes SEVIRI band names to PPS band names. set_attrs(scn_) # Write datasets to netcdf filename = compose_filename(scene=scn_, out_path=out_path, instrument='seviri', band=scn_['IR_108']) scn_.save_datasets(writer='cf', filename=filename, header_attrs=get_header_attrs(scn_), engine=engine, encoding=get_encoding_seviri(scn_), unlimited_dims=['time'], include_lonlats=False, pretty=True, flatten_attrs=True, exclude_attrs=['raw_metadata']) print("Saved file {:s} after {:3.1f} seconds".format( os.path.basename(filename), time.time() - tic)) # About 40 seconds return filename