def test_extrapolate_rows(self): lons = np.arange(10).reshape((2, 5), order="F") lats = np.arange(10).reshape((2, 5), order="C") lines = np.array([2, 7]) cols = np.array([2, 7, 12, 17, 22]) hlines = np.arange(10) hcols = np.arange(24) satint = SatelliteInterpolator((lons, lats), (lines, cols), (hlines, hcols)) self.assertTrue( np.allclose( satint._extrapolate_rows(satint.tie_data[0]), np.array([[ 6381081.08333225, 6381639.66045187, 6372470.10269454, 6353590.21586788, 6325042.05851245 ], [ 6370997., 6366146.21816553, 6351605.98629588, 6327412.61244969, 6293626.50067273 ], [ 6345786.79166939, 6327412.61244969, 6299445.69529922, 6261968.60390423, 6215087.60607344 ], [ 6335702.70833714, 6311919.17016336, 6278581.57890056, 6235791.00048604, 6183672.04823372 ]])))
def navigate(self): """Return the longitudes and latitudes of the scene. """ tic = datetime.now() lons40km = self._data["pos"][:, :, 1] * 1e-4 lats40km = self._data["pos"][:, :, 0] * 1e-4 try: from geotiepoints import SatelliteInterpolator except ImportError: logger.warning("Could not interpolate lon/lats, " "python-geotiepoints missing.") self.lons, self.lats = lons40km, lats40km else: cols40km = np.arange(24, 2048, 40) cols1km = np.arange(2048) lines = lons40km.shape[0] rows40km = np.arange(lines) rows1km = np.arange(lines) along_track_order = 1 cross_track_order = 3 satint = SatelliteInterpolator( (lons40km, lats40km), (rows40km, cols40km), (rows1km, cols1km), along_track_order, cross_track_order) self.lons, self.lats = satint.interpolate() logger.debug("Navigation time %s", str(datetime.now() - tic))
def test_fill_col_borders(self): lons = np.arange(10).reshape((2, 5), order="F") lats = np.arange(10).reshape((2, 5), order="C") lines = np.array([2, 7]) cols = np.array([2, 7, 12, 17, 22]) hlines = np.arange(10) hcols = np.arange(24) satint = SatelliteInterpolator((lons, lats), (lines, cols), (hlines, hcols)) satint._fill_col_borders() self.assertTrue( np.allclose( satint.tie_data[0], np.array([[ 6372937.31273379, 6370997., 6366146.21816553, 6351605.98629588, 6327412.61244969, 6293626.50067273, 6286869.27831734 ], [ 6353136.46335726, 6345786.79166939, 6327412.61244969, 6299445.69529922, 6261968.60390423, 6215087.60607344, 6205711.40650728 ]]))) self.assertTrue( np.allclose(satint.col_indices, np.array([0, 2, 7, 12, 17, 22, 23])))
def upsample_geolocation(self, dsid, info): """Upsample the geolocation (lon,lat) from the tiepoint grid.""" from geotiepoints import SatelliteInterpolator # Read the fields needed: col_indices = self.nc['nx_reduced'].values row_indices = self.nc['ny_reduced'].values lat_reduced = self.scale_dataset(dsid, self.nc['lat_reduced'], info) lon_reduced = self.scale_dataset(dsid, self.nc['lon_reduced'], info) shape = (self.nc['y'].shape[0], self.nc['x'].shape[0]) cols_full = np.arange(shape[1]) rows_full = np.arange(shape[0]) satint = SatelliteInterpolator( (lon_reduced.values, lat_reduced.values), (row_indices, col_indices), (rows_full, cols_full)) lons, lats = satint.interpolate() self.cache['lon'] = xr.DataArray(lons, attrs=lon_reduced.attrs, dims=['y', 'x']) self.cache['lat'] = xr.DataArray(lats, attrs=lat_reduced.attrs, dims=['y', 'x']) return
def test_extrapolate_cols(self): lons = np.arange(10).reshape((2, 5), order="F") lats = np.arange(10).reshape((2, 5), order="C") lines = np.array([2, 7]) cols = np.array([2, 7, 12, 17, 22]) hlines = np.arange(10) hcols = np.arange(24) satint = SatelliteInterpolator( (lons, lats), (lines, cols), (hlines, hcols)) self.assertTrue(np.allclose(satint._extrapolate_cols(satint.tie_data[0]), TIES_EXP2))
def test_fill_col_borders(self): lons = np.arange(10).reshape((2, 5), order="F") lats = np.arange(10).reshape((2, 5), order="C") lines = np.array([2, 7]) cols = np.array([2, 7, 12, 17, 22]) hlines = np.arange(10) hcols = np.arange(24) satint = SatelliteInterpolator( (lons, lats), (lines, cols), (hlines, hcols)) satint._fill_col_borders() self.assertTrue(np.allclose(satint.tie_data[0], TIES_EXP3)) self.assertTrue(np.allclose(satint.col_indices, np.array([0, 2, 7, 12, 17, 22, 23])))
def test_extrapolate_cols(self): lons = np.arange(10).reshape((2, 5), order="F") lats = np.arange(10).reshape((2, 5), order="C") lines = np.array([2, 7]) cols = np.array([2, 7, 12, 17, 22]) hlines = np.arange(10) hcols = np.arange(24) satint = SatelliteInterpolator((lons, lats), (lines, cols), (hlines, hcols)) self.assertTrue(np.allclose(satint._extrapolate_cols(satint.tie_data[0]), np.array([[ 6372937.31273379, 6370997. , 6366146.21816553, 6351605.98629588, 6327412.61244969, 6293626.50067273, 6286869.27831734], [ 6353136.46335726, 6345786.79166939, 6327412.61244969, 6299445.69529922, 6261968.60390423, 6215087.60607344, 6205711.40650728]])))
def geo_interpolate(lons32km, lats32km): from geotiepoints import SatelliteInterpolator cols32km = np.arange(0, 2048, 32) cols1km = np.arange(2048) lines = lons32km.shape[0] rows32km = np.arange(lines) rows1km = np.arange(lines) along_track_order = 1 cross_track_order = 3 satint = SatelliteInterpolator( (lons32km, lats32km), (rows32km, cols32km), (rows1km, cols1km), along_track_order, cross_track_order) lons, lats = satint.interpolate() return lons, lats
def geo_interpolate(lons32km, lats32km): """Interpolate geo data.""" cols32km = np.arange(0, 2048, 32) cols1km = np.arange(2048) lines = lons32km.shape[0] rows32km = np.arange(lines) rows1km = np.arange(lines) along_track_order = 1 cross_track_order = 3 satint = SatelliteInterpolator((lons32km, lats32km), (rows32km, cols32km), (rows1km, cols1km), along_track_order, cross_track_order) lons, lats = satint.interpolate() return lons, lats
def navigate(self): """Get the longitudes and latitudes of the scene.""" lons40km = self._data["pos"][:, :, 1] * 1e-4 lats40km = self._data["pos"][:, :, 0] * 1e-4 try: from geotiepoints import SatelliteInterpolator except ImportError: logger.warning("Could not interpolate lon/lats, " "python-geotiepoints missing.") self.lons, self.lats = lons40km, lats40km else: cols40km = np.arange(24, 2048, 40) cols1km = np.arange(2048) lines = lons40km.shape[0] rows40km = np.arange(lines) rows1km = np.arange(lines) along_track_order = 1 cross_track_order = 3 satint = SatelliteInterpolator( (lons40km, lats40km), (rows40km, cols40km), (rows1km, cols1km), along_track_order, cross_track_order) self.lons, self.lats = delayed(satint.interpolate, nout=2)() self.lons = da.from_delayed(self.lons, (lines, 2048), lons40km.dtype) self.lats = da.from_delayed(self.lats, (lines, 2048), lats40km.dtype)
def test_extrapolate_rows(self): lons = np.arange(10).reshape((2, 5), order="F") lats = np.arange(10).reshape((2, 5), order="C") lines = np.array([2, 7]) cols = np.array([2, 7, 12, 17, 22]) hlines = np.arange(10) hcols = np.arange(24) satint = SatelliteInterpolator((lons, lats), (lines, cols), (hlines, hcols)) self.assertTrue(np.allclose(satint._extrapolate_rows(satint.tie_data[0]), np.array([[ 6381081.08333225, 6381639.66045187, 6372470.10269454, 6353590.21586788, 6325042.05851245], [ 6370997. , 6366146.21816553, 6351605.98629588, 6327412.61244969, 6293626.50067273], [ 6345786.79166939, 6327412.61244969, 6299445.69529922, 6261968.60390423, 6215087.60607344], [ 6335702.70833714, 6311919.17016336, 6278581.57890056, 6235791.00048604, 6183672.04823372]])))
def upsample_geolocation(self, dsid, info): """Upsample the geolocation (lon,lat) from the tiepoint grid""" from geotiepoints import SatelliteInterpolator # Read the fields needed: col_indices = self.nc['nx_reduced'].values row_indices = self.nc['ny_reduced'].values lat_reduced = self.scale_dataset(dsid, self.nc['lat_reduced'], info) lon_reduced = self.scale_dataset(dsid, self.nc['lon_reduced'], info) shape = (self.nc['y'].shape[0], self.nc['x'].shape[0]) cols_full = np.arange(shape[1]) rows_full = np.arange(shape[0]) satint = SatelliteInterpolator((lon_reduced.values, lat_reduced.values), (row_indices, col_indices), (rows_full, cols_full)) lons, lats = satint.interpolate() self.cache['lon'] = xr.DataArray(lons, attrs=lon_reduced.attrs, dims=['y', 'x']) self.cache['lat'] = xr.DataArray(lats, attrs=lat_reduced.attrs, dims=['y', 'x']) return
def test_fillborders(self): lons = np.arange(20).reshape((4, 5), order="F") lats = np.arange(20).reshape((4, 5), order="C") lines = np.array([2, 7, 12, 17]) cols = np.array([2, 7, 12, 17, 22]) hlines = np.arange(20) hcols = np.arange(24) satint = SatelliteInterpolator((lons, lats), (lines, cols), (hlines, hcols), chunk_size=10) satint.fill_borders('x', 'y') self.assertTrue(np.allclose(satint.tie_data[0], np.array([[ 6384905.78040055, 6381081.08333225, 6371519.34066148, 6328950.00792935, 6253610.69157758, 6145946.19489936, 6124413.29556372], [ 6377591.95940176, 6370997. , 6354509.6014956 , 6305151.62592155, 6223234.99818839, 6109277.14889072, 6086485.57903118], [ 6359307.40690478, 6345786.79166939, 6311985.2535809 , 6245655.67090206, 6147295.76471541, 6017604.5338691 , 5991666.28769983], [ 6351993.58590599, 6335702.70833714, 6294975.51441502, 6221857.28889426, 6116920.07132621, 5980935.48786045, 5953738.5711673 ], [ 6338032.26190294, 6320348.4990906 , 6276139.09205974, 6199670.56624433, 6091551.90273768, 5952590.38414781, 5924798.08042984], [ 6290665.5946295 , 6270385.16249031, 6219684.08214232, 6137100.75832981, 6023313.2794414 , 5879194.72399075, 5850371.01290062], [ 6172248.92644589, 6145476.82098957, 6078546.55734877, 5980676.23854351, 5852716.72120069, 5695705.57359808, 5664303.34407756], [ 6124882.25917245, 6095513.48438928, 6022091.54743135, 5918106.430629 , 5784478.09790441, 5622309.91344102, 5589876.27654834]]))) self.assertTrue(np.allclose(satint.row_indices, np.array([ 0, 2, 7, 9, 10, 12, 17, 19]))) self.assertTrue(np.allclose(satint.col_indices, np.array([ 0, 2, 7, 12, 17, 22, 23])))
def test_fill_row_borders(self): lons = np.arange(20).reshape((4, 5), order="F") lats = np.arange(20).reshape((4, 5), order="C") lines = np.array([2, 7, 12, 17]) cols = np.array([2, 7, 12, 17, 22]) hlines = np.arange(20) hcols = np.arange(24) satint = SatelliteInterpolator((lons, lats), (lines, cols), (hlines, hcols)) satint._fill_row_borders() self.assertTrue(np.allclose(satint.tie_data[0], np.array([[ 6381081.08333225, 6371519.34066148, 6328950.00792935, 6253610.69157758, 6145946.19489936], [ 6370997. , 6354509.6014956 , 6305151.62592155, 6223234.99818839, 6109277.14889072], [ 6345786.79166939, 6311985.2535809 , 6245655.67090206, 6147295.76471541, 6017604.5338691 ], [ 6270385.16249031, 6219684.08214232, 6137100.75832981, 6023313.2794414 , 5879194.72399075], [ 6145476.82098957, 6078546.55734877, 5980676.23854351, 5852716.72120069, 5695705.57359808], [ 6095513.48438928, 6022091.54743135, 5918106.430629 , 5784478.09790441, 5622309.91344102]]))) self.assertTrue(np.allclose(satint.row_indices, np.array([ 0, 2, 7, 12, 17, 19]))) satint = SatelliteInterpolator((lons, lats), (lines, cols), (hlines, hcols), chunk_size=10) satint._fill_row_borders() self.assertTrue(np.allclose(satint.tie_data[0], np.array([[ 6381081.08333225, 6371519.34066148, 6328950.00792935, 6253610.69157758, 6145946.19489936], [ 6370997. , 6354509.6014956 , 6305151.62592155, 6223234.99818839, 6109277.14889072], [ 6345786.79166939, 6311985.2535809 , 6245655.67090206, 6147295.76471541, 6017604.5338691 ], [ 6335702.70833714, 6294975.51441502, 6221857.28889426, 6116920.07132621, 5980935.48786045], [ 6320348.4990906 , 6276139.09205974, 6199670.56624433, 6091551.90273768, 5952590.38414781], [ 6270385.16249031, 6219684.08214232, 6137100.75832981, 6023313.2794414 , 5879194.72399075], [ 6145476.82098957, 6078546.55734877, 5980676.23854351, 5852716.72120069, 5695705.57359808], [ 6095513.48438928, 6022091.54743135, 5918106.430629 , 5784478.09790441, 5622309.91344102]]))) self.assertTrue(np.allclose(satint.row_indices, np.array([ 0, 2, 7, 9, 10, 12, 17, 19])))
def test_fill_row_borders(self): lons = np.arange(20).reshape((4, 5), order="F") lats = np.arange(20).reshape((4, 5), order="C") lines = np.array([2, 7, 12, 17]) cols = np.array([2, 7, 12, 17, 22]) hlines = np.arange(20) hcols = np.arange(24) satint = SatelliteInterpolator((lons, lats), (lines, cols), (hlines, hcols)) satint._fill_row_borders() self.assertTrue( np.allclose( satint.tie_data[0], np.array([[ 6381081.08333225, 6371519.34066148, 6328950.00792935, 6253610.69157758, 6145946.19489936 ], [ 6370997., 6354509.6014956, 6305151.62592155, 6223234.99818839, 6109277.14889072 ], [ 6345786.79166939, 6311985.2535809, 6245655.67090206, 6147295.76471541, 6017604.5338691 ], [ 6270385.16249031, 6219684.08214232, 6137100.75832981, 6023313.2794414, 5879194.72399075 ], [ 6145476.82098957, 6078546.55734877, 5980676.23854351, 5852716.72120069, 5695705.57359808 ], [ 6095513.48438928, 6022091.54743135, 5918106.430629, 5784478.09790441, 5622309.91344102 ]]))) self.assertTrue( np.allclose(satint.row_indices, np.array([0, 2, 7, 12, 17, 19]))) satint = SatelliteInterpolator((lons, lats), (lines, cols), (hlines, hcols), chunk_size=10) satint._fill_row_borders() self.assertTrue( np.allclose( satint.tie_data[0], np.array( [[ 6381081.08333225, 6371519.34066148, 6328950.00792935, 6253610.69157758, 6145946.19489936 ], [ 6370997., 6354509.6014956, 6305151.62592155, 6223234.99818839, 6109277.14889072 ], [ 6345786.79166939, 6311985.2535809, 6245655.67090206, 6147295.76471541, 6017604.5338691 ], [ 6335702.70833714, 6294975.51441502, 6221857.28889426, 6116920.07132621, 5980935.48786045 ], [ 6320348.4990906, 6276139.09205974, 6199670.56624433, 6091551.90273768, 5952590.38414781 ], [ 6270385.16249031, 6219684.08214232, 6137100.75832981, 6023313.2794414, 5879194.72399075 ], [ 6145476.82098957, 6078546.55734877, 5980676.23854351, 5852716.72120069, 5695705.57359808 ], [ 6095513.48438928, 6022091.54743135, 5918106.430629, 5784478.09790441, 5622309.91344102 ]]))) self.assertTrue( np.allclose(satint.row_indices, np.array([0, 2, 7, 9, 10, 12, 17, 19])))
def get_lonlat_into(filename, out_lons, out_lats, out_mask): """Read lon,lat from hdf5 file""" LOG.debug("Geo File = %s", filename) shape = out_lons.shape unzipped = unzip_file(filename) if unzipped: filename = unzipped mda = HDF5MetaData(filename).read() reduced_grid = False h5f = h5py.File(filename, 'r') if "column_indices" in h5f.keys(): col_indices = h5f["column_indices"][:] if "row_indices" in h5f.keys(): row_indices = h5f["row_indices"][:] if "nx_reduced" in h5f: col_indices = h5f["nx_reduced"][:] if "ny_reduced" in h5f: row_indices = h5f["ny_reduced"][:] for key in mda.get_data_keys(): if ((key.endswith("lat") or key.endswith("lon")) or (key.endswith("lat_reduced") or key.endswith("lon_reduced"))): lonlat = h5f[key] fillvalue = lonlat.attrs["_FillValue"] else: continue if key.endswith("lat"): lonlat.read_direct(out_lats) elif key.endswith("lon"): lonlat.read_direct(out_lons) elif key.endswith("lat_reduced"): lat_reduced = lonlat[:] reduced_grid = True elif key.endswith("lon_reduced"): lon_reduced = lonlat[:] if reduced_grid: from geotiepoints import SatelliteInterpolator cols_full = np.arange(shape[1]) rows_full = np.arange(shape[0]) satint = SatelliteInterpolator((lon_reduced, lat_reduced), (row_indices, col_indices), (rows_full, cols_full)) out_lons[:], out_lats[:] = satint.interpolate() new_mask = False # FIXME: this is to mask out the npp bowtie deleted pixels... if "NPP" in h5f.attrs['platform']: new_mask = np.zeros((16, 3200), dtype=bool) new_mask[0, :1008] = True new_mask[1, :640] = True new_mask[14, :640] = True new_mask[15, :1008] = True new_mask[14, 2560:] = True new_mask[1, 2560:] = True new_mask[0, 2192:] = True new_mask[15, 2192:] = True new_mask = np.tile(new_mask, (out_lons.shape[0] / 16, 1)) out_mask[:] = np.logical_or( new_mask, np.logical_and(out_lats <= fillvalue, out_lons <= fillvalue)) h5f.close() if unzipped: os.remove(unzipped)
def get_lonlat_into(filename, out_lons, out_lats, out_mask): """Read lon,lat from hdf5 file""" LOG.debug("Geo File = %s", filename) shape = out_lons.shape unzipped = unzip_file(filename) if unzipped: filename = unzipped mda = HDF5MetaData(filename).read() reduced_grid = False h5f = h5py.File(filename, 'r') if "column_indices" in h5f.keys(): col_indices = h5f["column_indices"][:] if "row_indices" in h5f.keys(): row_indices = h5f["row_indices"][:] if "nx_reduced" in h5f: col_indices = h5f["nx_reduced"][:] if "ny_reduced" in h5f: row_indices = h5f["ny_reduced"][:] for key in mda.get_data_keys(): if ((key.endswith("lat") or key.endswith("lon")) or (key.endswith("lat_reduced") or key.endswith("lon_reduced"))): lonlat = h5f[key] fillvalue = lonlat.attrs["_FillValue"] else: continue if key.endswith("lat"): lonlat.read_direct(out_lats) elif key.endswith("lon"): lonlat.read_direct(out_lons) elif key.endswith("lat_reduced"): lat_reduced = lonlat[:] reduced_grid = True elif key.endswith("lon_reduced"): lon_reduced = lonlat[:] if reduced_grid: from geotiepoints import SatelliteInterpolator cols_full = np.arange(shape[1]) rows_full = np.arange(shape[0]) satint = SatelliteInterpolator((lon_reduced, lat_reduced), (row_indices, col_indices), (rows_full, cols_full)) out_lons[:], out_lats[:] = satint.interpolate() new_mask = False # FIXME: this is to mask out the npp bowtie deleted pixels... if "NPP" in h5f.attrs['platform']: if shape[1] == 3200: # M-bands: new_mask = np.zeros((16, 3200), dtype=bool) new_mask[0, :1008] = True new_mask[1, :640] = True new_mask[14, :640] = True new_mask[15, :1008] = True new_mask[14, 2560:] = True new_mask[1, 2560:] = True new_mask[0, 2192:] = True new_mask[15, 2192:] = True new_mask = np.tile(new_mask, (out_lons.shape[0] / 16, 1)) elif shape[1] == 6400: # I-bands: LOG.info( "PPS on I-band resolution. Mask out bow-tie deletion pixels") LOG.warning("Not yet supported...") new_mask = np.zeros((32, 6400), dtype=bool) new_mask[0:2, :2016] = True new_mask[0:2, 4384:] = True new_mask[2:4, :1280] = True new_mask[2:4, 5120:] = True new_mask[28:30, :1280] = True new_mask[28:30, 5120:] = True new_mask[30:32, :2016] = True new_mask[30:32, 4384:] = True new_mask = np.tile(new_mask, (out_lons.shape[0] / 32, 1)) else: LOG.error("VIIRS shape not supported. " + "No handling of bow-tie deletion pixels: shape = ", str(shape)) out_mask[:] = np.logical_or( new_mask, np.logical_and(out_lats == fillvalue, out_lons == fillvalue)) # new_mask, np.logical_and(out_lats <= fillvalue, out_lons <= fillvalue)) h5f.close() if unzipped: os.remove(unzipped)
def get_lonlat_into(filename, out_lons, out_lats, out_mask): """Read lon,lat from hdf5 file""" LOG.debug("Geo File = %s", filename) shape = out_lons.shape unzipped = unzip_file(filename) if unzipped: filename = unzipped mda = HDF5MetaData(filename).read() reduced_grid = False h5f = h5py.File(filename, 'r') if "column_indices" in h5f.keys(): col_indices = h5f["column_indices"][:] if "row_indices" in h5f.keys(): row_indices = h5f["row_indices"][:] if "nx_reduced" in h5f: col_indices = h5f["nx_reduced"][:] if "ny_reduced" in h5f: row_indices = h5f["ny_reduced"][:] for key in mda.get_data_keys(): if ((key.endswith("lat") or key.endswith("lon")) or (key.endswith("lat_reduced") or key.endswith("lon_reduced"))): lonlat = h5f[key] fillvalue = lonlat.attrs["_FillValue"] else: continue if key.endswith("lat"): lonlat.read_direct(out_lats) elif key.endswith("lon"): lonlat.read_direct(out_lons) elif key.endswith("lat_reduced"): lat_reduced = lonlat[:] reduced_grid = True elif key.endswith("lon_reduced"): lon_reduced = lonlat[:] if reduced_grid: from geotiepoints import SatelliteInterpolator cols_full = np.arange(shape[1]) rows_full = np.arange(shape[0]) satint = SatelliteInterpolator((lon_reduced, lat_reduced), (row_indices, col_indices), (rows_full, cols_full)) out_lons[:], out_lats[:] = satint.interpolate() new_mask = False # FIXME: this is to mask out the npp bowtie deleted pixels... # if "NPP" in h5f.attrs['platform']: if h5f.attrs['platform'] in VIIRS_PLATFORMS: if shape[1] == 3200: # M-bands: new_mask = np.zeros((16, 3200), dtype=bool) new_mask[0, :1008] = True new_mask[1, :640] = True new_mask[14, :640] = True new_mask[15, :1008] = True new_mask[14, 2560:] = True new_mask[1, 2560:] = True new_mask[0, 2192:] = True new_mask[15, 2192:] = True new_mask = np.tile(new_mask, (out_lons.shape[0] / 16, 1)) elif shape[1] == 6400: # I-bands: LOG.info( "PPS on I-band resolution. Mask out bow-tie deletion pixels") LOG.warning("Not yet supported...") new_mask = np.zeros((32, 6400), dtype=bool) new_mask[0:2, :2016] = True new_mask[0:2, 4384:] = True new_mask[2:4, :1280] = True new_mask[2:4, 5120:] = True new_mask[28:30, :1280] = True new_mask[28:30, 5120:] = True new_mask[30:32, :2016] = True new_mask[30:32, 4384:] = True new_mask = np.tile(new_mask, (out_lons.shape[0] / 32, 1)) else: LOG.error( "VIIRS shape not supported. " + "No handling of bow-tie deletion pixels: shape = ", str(shape)) out_mask[:] = np.logical_or( new_mask, np.logical_and(out_lats == fillvalue, out_lons == fillvalue)) # new_mask, np.logical_and(out_lats <= fillvalue, out_lons <= fillvalue)) h5f.close() if unzipped: os.remove(unzipped)
def load(self, satscene, *args, **kwargs): """Read data from file and load it into *satscene*. """ lonlat_is_loaded = False geofilename = kwargs.get('geofilename') prodfilename = kwargs.get('filename') products = [] if "CTTH" in satscene.channels_to_load: products.append("ctth") if "CT" in satscene.channels_to_load: products.append("cloudtype") if "CMA" in satscene.channels_to_load: products.append("cloudmask") if "PC" in satscene.channels_to_load: products.append("precipclouds") if "CPP" in satscene.channels_to_load: products.append("cpp") if len(products) == 0: return try: area_name = satscene.area_id or satscene.area.area_id except AttributeError: area_name = "satproj_?????_?????" # Looking for geolocation file conf = ConfigParser() conf.read(os.path.join(CONFIG_PATH, satscene.fullname + ".cfg")) try: geodir = conf.get(satscene.instrument_name + "-level3", "cloud_product_geodir", vars=os.environ) except NoOptionError: LOG.warning("No option 'geodir' in level3 section") geodir = None if not geofilename and geodir: # Load geo file from config file: try: if not satscene.orbit: orbit = "" else: orbit = satscene.orbit geoname_tmpl = conf.get(satscene.instrument_name + "-level3", "cloud_product_geofilename", raw=True, vars=os.environ) filename_tmpl = (satscene.time_slot.strftime(geoname_tmpl) % {"orbit": str(orbit).zfill(5) or "*", "area": area_name, "satellite": satscene.satname + satscene.number}) file_list = glob.glob(os.path.join(geodir, filename_tmpl)) if len(file_list) > 1: LOG.warning("More than 1 file matching for geoloaction: " + str(file_list)) elif len(file_list) == 0: LOG.warning( "No geolocation file matching!: " + os.path.join(geodir, filename_tmpl)) else: geofilename = file_list[0] except NoOptionError: geofilename = None # Reading the products classes = {"ctth": CloudTopTemperatureHeight, "cloudtype": CloudType, "cloudmask": CloudMask, "precipclouds": PrecipitationClouds, "cpp": CloudPhysicalProperties } nodata_mask = False area = None lons = None lats = None chn = None shape = None read_external_geo = {} for product in products: LOG.debug("Loading " + product) if isinstance(prodfilename, (list, tuple, set)): for fname in prodfilename: kwargs['filename'] = fname self.load(satscene, *args, **kwargs) return elif (prodfilename and os.path.basename(prodfilename).startswith('S_NWC')): if os.path.basename(prodfilename).split("_")[2] == NEW_PRODNAMES[product]: filename = prodfilename else: continue else: filename = conf.get(satscene.instrument_name + "-level3", "cloud_product_filename", raw=True, vars=os.environ) directory = conf.get(satscene.instrument_name + "-level3", "cloud_product_dir", vars=os.environ) pathname_tmpl = os.path.join(directory, filename) LOG.debug("Path = " + str(pathname_tmpl)) if not satscene.orbit: orbit = "" else: orbit = satscene.orbit filename_tmpl = (satscene.time_slot.strftime(pathname_tmpl) % {"orbit": str(orbit).zfill(5) or "*", "area": area_name, "satellite": satscene.satname + satscene.number, "product": product}) file_list = glob.glob(filename_tmpl) if len(file_list) == 0: product_name = NEW_PRODNAMES.get(product, product) LOG.info("No " + str(product) + " product in old format matching") filename_tmpl = (satscene.time_slot.strftime(pathname_tmpl) % {"orbit": str(orbit).zfill(5) or "*", "area": area_name, "satellite": satscene.satname + satscene.number, "product": product_name}) file_list = glob.glob(filename_tmpl) if len(file_list) > 1: LOG.warning("More than 1 file matching for " + product + "! " + str(file_list)) continue elif len(file_list) == 0: LOG.warning( "No " + product + " matching!: " + filename_tmpl) continue else: filename = file_list[0] chn = classes[product]() chn.read(filename, lonlat_is_loaded == False) satscene.channels.append(chn) # Check if geolocation is loaded: if not chn.area: read_external_geo[product] = chn shape = chn.shape # Check if some 'channel'/product needs geolocation. If some product does # not have geolocation, get it from the geofilename: if not read_external_geo: LOG.info("Loading PPS parameters done.") return # Load geolocation interpolate = False if geofilename: geodict = get_lonlat(geofilename) lons, lats = geodict['lon'], geodict['lat'] if lons.shape != shape or lats.shape != shape: interpolate = True row_indices = geodict['row_indices'] column_indices = geodict['col_indices'] lonlat_is_loaded = True else: LOG.warning("No Geo file specified: " + "Geolocation will be loaded from product") if lonlat_is_loaded: if interpolate: from geotiepoints import SatelliteInterpolator cols_full = np.arange(shape[1]) rows_full = np.arange(shape[0]) satint = SatelliteInterpolator((lons, lats), (row_indices, column_indices), (rows_full, cols_full)) # satint.fill_borders("y", "x") lons, lats = satint.interpolate() try: from pyresample import geometry lons = np.ma.masked_array(lons, nodata_mask) lats = np.ma.masked_array(lats, nodata_mask) area = geometry.SwathDefinition(lons=lons, lats=lats) except ImportError: area = None for chn in read_external_geo.values(): if area: chn.area = area else: chn.lat = lats chn.lon = lons LOG.info("Loading PPS parameters done.") return
tie_lats = params['tiepoints']['lats'] tie_cols = params['tiepoints']['cols'] tie_rows = params['tiepoints']['rows'] # From tie_cols and tie_rows, generate a gegulaer grid #fine_rows = np.arange(0, 3085, 257) #fine_cols = np.arange(0, 6313, 332) fine_rows = np.arange(0, 15436, 250) fine_cols = np.arange(0, 31561, 250) #print params #fine_cols = np.arange(0, data.shape[1]) #fine_rows = np.arange(0, data.shape[0]) interpolator = SatelliteInterpolator( (tie_lons, tie_lats), (tie_rows, tie_cols), (fine_rows, fine_cols), 1, 3) #np.save('tie_lons.npy', tie_lons) #np.save('tie_lats.npy', tie_lats) #np.save('tie_cols.npy', tie_cols) #np.save('tie_rows.npy', tie_rows) #np.save('fine_cols.npy', fine_cols) #np.save('fine_rows.npy', fine_rows) lons, lats = interpolator.interpolate() print 'RESULT :' print lons print lats np.save('result_lons.npy', lons) np.save('result_lats.npy', lats) #print 'DATA' #print data.shape
def load(self, satscene, *args, **kwargs): """Read data from file and load it into *satscene*. """ lonlat_is_loaded = False geofilename = kwargs.get('geofilename') prodfilename = kwargs.get('filename') products = [] if "CTTH" in satscene.channels_to_load: products.append("ctth") if "CT" in satscene.channels_to_load: products.append("cloudtype") if "CMA" in satscene.channels_to_load: products.append("cloudmask") if "PC" in satscene.channels_to_load: products.append("precipclouds") if "CPP" in satscene.channels_to_load: products.append("cpp") if len(products) == 0: return try: area_name = satscene.area_id or satscene.area.area_id except AttributeError: area_name = "satproj_?????_?????" # Looking for geolocation file conf = ConfigParser() conf.read(os.path.join(CONFIG_PATH, satscene.fullname + ".cfg")) try: geodir = conf.get(satscene.instrument_name + "-level3", "cloud_product_geodir", vars=os.environ) except NoOptionError: LOG.warning("No option 'geodir' in level3 section") geodir = None if not geofilename and geodir: # Load geo file from config file: try: if not satscene.orbit: orbit = "" else: orbit = satscene.orbit geoname_tmpl = conf.get(satscene.instrument_name + "-level3", "cloud_product_geofilename", raw=True, vars=os.environ) filename_tmpl = (satscene.time_slot.strftime(geoname_tmpl) % {"orbit": str(orbit).zfill(5) or "*", "area": area_name, "satellite": satscene.satname + satscene.number}) file_list = glob.glob(os.path.join(geodir, filename_tmpl)) if len(file_list) > 1: LOG.warning("More than 1 file matching for geoloaction: " + str(file_list)) elif len(file_list) == 0: LOG.warning( "No geolocation file matching!: " + filename_tmpl) else: geofilename = file_list[0] except NoOptionError: geofilename = None # Reading the products classes = {"ctth": CloudTopTemperatureHeight, "cloudtype": CloudType, "cloudmask": CloudMask, "precipclouds": PrecipitationClouds, "cpp": CloudPhysicalProperties } nodata_mask = False area = None lons = None lats = None chn = None shape = None read_external_geo = {} for product in products: LOG.debug("Loading " + product) if isinstance(prodfilename, (list, tuple, set)): for fname in prodfilename: kwargs['filename'] = fname self.load(satscene, *args, **kwargs) return elif (prodfilename and os.path.basename(prodfilename).startswith('S_NWC')): if os.path.basename(prodfilename).split("_")[2] == NEW_PRODNAMES[product]: filename = prodfilename else: continue else: filename = conf.get(satscene.instrument_name + "-level3", "cloud_product_filename", raw=True, vars=os.environ) directory = conf.get(satscene.instrument_name + "-level3", "cloud_product_dir", vars=os.environ) pathname_tmpl = os.path.join(directory, filename) LOG.debug("Path = " + str(pathname_tmpl)) if not satscene.orbit: orbit = "" else: orbit = satscene.orbit filename_tmpl = (satscene.time_slot.strftime(pathname_tmpl) % {"orbit": str(orbit).zfill(5) or "*", "area": area_name, "satellite": satscene.satname + satscene.number, "product": product}) file_list = glob.glob(filename_tmpl) if len(file_list) == 0: product_name = NEW_PRODNAMES.get(product, product) LOG.info("No " + str(product) + " product in old format matching") filename_tmpl = (satscene.time_slot.strftime(pathname_tmpl) % {"orbit": str(orbit).zfill(5) or "*", "area": area_name, "satellite": satscene.satname + satscene.number, "product": product_name}) file_list = glob.glob(filename_tmpl) if len(file_list) > 1: LOG.warning("More than 1 file matching for " + product + "! " + str(file_list)) continue elif len(file_list) == 0: LOG.warning( "No " + product + " matching!: " + filename_tmpl) continue else: filename = file_list[0] chn = classes[product]() chn.read(filename, lonlat_is_loaded == False) satscene.channels.append(chn) # Check if geolocation is loaded: if not chn.area: read_external_geo[product] = chn shape = chn.shape # Check if some 'channel'/product needs geolocation. If some product does # not have geolocation, get it from the geofilename: if not read_external_geo: LOG.info("Loading PPS parameters done.") return # Load geolocation interpolate = False if geofilename: geodict = get_lonlat(geofilename) lons, lats = geodict['lon'], geodict['lat'] if lons.shape != shape or lats.shape != shape: interpolate = True row_indices = geodict['row_indices'] column_indices = geodict['col_indices'] lonlat_is_loaded = True else: LOG.warning("No Geo file specified: " + "Geolocation will be loaded from product") if lonlat_is_loaded: if interpolate: from geotiepoints import SatelliteInterpolator cols_full = np.arange(shape[1]) rows_full = np.arange(shape[0]) satint = SatelliteInterpolator((lons, lats), (row_indices, column_indices), (rows_full, cols_full)) #satint.fill_borders("y", "x") lons, lats = satint.interpolate() try: from pyresample import geometry lons = np.ma.masked_array(lons, nodata_mask) lats = np.ma.masked_array(lats, nodata_mask) area = geometry.SwathDefinition(lons=lons, lats=lats) except ImportError: area = None for chn in read_external_geo.values(): if area: chn.area = area else: chn.lat = lats chn.lon = lons LOG.info("Loading PPS parameters done.") return
def read(self, filename, load_lonlat=True): """Read product in hdf format from *filename* """ LOG.debug("Filename: %s" % filename) is_temp = False if not h5py.is_hdf5(filename): # Try see if it is bzipped: import bz2 bz2file = bz2.BZ2File(filename) import tempfile tmpfilename = tempfile.mktemp() try: ofpt = open(tmpfilename, 'wb') ofpt.write(bz2file.read()) ofpt.close() is_temp = True except IOError: import traceback traceback.print_exc() raise IOError("Failed to read the file %s" % filename) filename = tmpfilename if not h5py.is_hdf5(filename): if is_temp: os.remove(filename) raise IOError("File is not a hdf5 file!" % filename) h5f = h5py.File(filename, "r") # Read the global attributes self._md = dict(h5f.attrs) self._md["satellite"] = h5f.attrs['satellite_id'] self._md["orbit"] = h5f.attrs['orbit_number'] self._md["time_slot"] = (timedelta(seconds=long(h5f.attrs['sec_1970'])) + datetime(1970, 1, 1, 0, 0)) # Read the data and attributes # This covers only one level of data. This could be made recursive. for key, dataset in h5f.iteritems(): setattr(self, key, InfoObject()) getattr(self, key).info = dict(dataset.attrs) for skey, value in dataset.attrs.iteritems(): if isinstance(value, h5py.h5r.Reference): self._refs[(key, skey)] = h5f[value].name.split("/")[1] if type(dataset.id) is h5py.h5g.GroupID: LOG.warning("Format reader does not support groups") continue try: getattr(self, key).data = dataset[:] is_palette = (dataset.attrs.get("CLASS", None) == "PALETTE") if(len(dataset.shape) > 1 and not is_palette and key not in ["lon", "lat", "row_indices", "column_indices"]): self._projectables.append(key) if self.shape is None: self.shape = dataset.shape elif self.shape != dataset.shape: raise ValueError("Different variable shapes !") else: self._keys.append(key) except TypeError: setattr(self, key, np.dtype(dataset)) self._keys.append(key) h5f.close() if is_temp: os.remove(filename) if not load_lonlat: return # Setup geolocation # We need a no-data mask from one of the projectables to # mask out bow-tie deletion pixels from the geolocation array # So far only relevant for VIIRS. # Preferably the lon-lat data in the PPS VIIRS geolocation # file should already be masked. # The no-data values in the products are not only where geo-location is absent # Only the Cloud Type can be used as a proxy so far. # Adam Dybbroe, 2012-08-31 nodata_mask = False # np.ma.masked_equal(np.ones(self.shape), 0).mask for key in self._projectables: projectable = getattr(self, key) if key in ['cloudtype']: nodata_array = np.ma.array(projectable.data) nodata_mask = np.ma.masked_equal(nodata_array, 0).mask break try: from pyresample import geometry except ImportError: return tiepoint_grid = False if hasattr(self, "row_indices") and hasattr(self, "column_indices"): column_indices = self.column_indices.data row_indices = self.row_indices.data tiepoint_grid = True interpolate = False if hasattr(self, "lon") and hasattr(self, "lat"): if 'intercept' in self.lon.info: offset_lon = self.lon.info["intercept"] elif 'offset' in self.lon.info: offset_lon = self.lon.info["offset"] if 'gain' in self.lon.info: gain_lon = self.lon.info["gain"] lons = self.lon.data * gain_lon + offset_lon if 'intercept' in self.lat.info: offset_lat = self.lat.info["intercept"] elif 'offset' in self.lat.info: offset_lat = self.lat.info["offset"] if 'gain' in self.lat.info: gain_lat = self.lat.info["gain"] lats = self.lat.data * gain_lat + offset_lat if lons.shape != self.shape or lats.shape != self.shape: # Data on tiepoint grid: interpolate = True if not tiepoint_grid: errmsg = ("Interpolation needed but insufficient" + "information on the tiepoint grid") raise IOError(errmsg) else: # Geolocation available on the full grid: # We neeed to mask out nodata (VIIRS Bow-tie deletion...) # We do it for all instruments, checking only against the # nodata lons = np.ma.masked_array(lons, nodata_mask) lats = np.ma.masked_array(lats, nodata_mask) self.area = geometry.SwathDefinition(lons=lons, lats=lats) elif hasattr(self, "region") and self.region.data["area_extent"].any(): region = self.region.data proj_dict = dict([elt.split('=') for elt in region["pcs_def"].split(',')]) self.area = geometry.AreaDefinition(region["id"], region["name"], region["proj_id"], proj_dict, region["xsize"], region["ysize"], region["area_extent"]) if interpolate: from geotiepoints import SatelliteInterpolator cols_full = np.arange(self.shape[1]) rows_full = np.arange(self.shape[0]) satint = SatelliteInterpolator((lons, lats), (row_indices, column_indices), (rows_full, cols_full)) #satint.fill_borders("y", "x") lons, lats = satint.interpolate() self.area = geometry.SwathDefinition(lons=lons, lats=lats)
def load(scene, geofilename=None, **kwargs): del kwargs import glob lonlat_is_loaded = False products = [] if "CTTH" in scene.channels_to_load: products.append("CTTH") if "CloudType" in scene.channels_to_load: products.append("CT") if "CMa" in scene.channels_to_load: products.append("CMA") if "PC" in scene.channels_to_load: products.append("PC") if "CPP" in scene.channels_to_load: products.append("CPP") if len(products) == 0: return try: area_name = scene.area_id or scene.area.area_id except AttributeError: area_name = "satproj_?????_?????" conf = ConfigParser.ConfigParser() conf.read(os.path.join(CONFIG_PATH, scene.fullname+".cfg")) directory = conf.get(scene.instrument_name+"-level3", "dir") try: geodir = conf.get(scene.instrument_name+"-level3", "geodir") except NoOptionError: LOG.warning("No option 'geodir' in level3 section") geodir = None filename = conf.get(scene.instrument_name+"-level3", "filename", raw=True) pathname_tmpl = os.path.join(directory, filename) if not geofilename and geodir: # Load geo file from config file: try: if not scene.orbit: orbit = "" else: orbit = scene.orbit geoname_tmpl = conf.get(scene.instrument_name+"-level3", "geofilename", raw=True) filename_tmpl = (scene.time_slot.strftime(geoname_tmpl) %{"orbit": orbit.zfill(5) or "*", "area": area_name, "satellite": scene.satname + scene.number}) file_list = glob.glob(os.path.join(geodir, filename_tmpl)) if len(file_list) > 1: LOG.warning("More than 1 file matching for geoloaction: " + str(file_list)) elif len(file_list) == 0: LOG.warning("No geolocation file matching!: " + filename_tmpl) else: geofilename = file_list[0] except NoOptionError: geofilename = None classes = {"CTTH": CloudTopTemperatureHeight, "CT": CloudType, "CMA": CloudMask, "PC": PrecipitationClouds, "CPP": CloudPhysicalProperties } nodata_mask = False chn = None for product in products: LOG.debug("Loading " + product) if not scene.orbit: orbit = "" else: orbit = scene.orbit filename_tmpl = (scene.time_slot.strftime(pathname_tmpl) %{"orbit": orbit.zfill(5) or "*", "area": area_name, "satellite": scene.satname + scene.number, "product": product}) file_list = glob.glob(filename_tmpl) if len(file_list) > 1: LOG.warning("More than 1 file matching for " + product + "! " + str(file_list)) continue elif len(file_list) == 0: LOG.warning("No " + product + " matching!: " + filename_tmpl) continue else: filename = file_list[0] chn = classes[product]() chn.read(filename, lonlat_is_loaded==False) scene.channels.append(chn) # Setup geolocation # We need a no-data mask from one of the projectables to # mask out bow-tie deletion pixels from the geolocation array # So far only relevant for VIIRS. # Preferably the lon-lat data in the PPS VIIRS geolocation # file should already be masked. # The no-data values in the products are not only where geo-location is absent # Only the Cloud Type can be used as a proxy so far. # Adam Dybbroe, 2012-08-31 if hasattr(chn, '_projectables'): for key in chn._projectables: projectable = getattr(chn, key) if key in ['ct']: nodata_array = np.ma.array(projectable.data) nodata_mask = np.ma.masked_equal(\ nodata_array, projectable.info["_FillValue"]).mask break else: LOG.warning("Channel has no '_projectables' member." + " No nodata-mask set...") if chn is None: return # Is this safe!? AD 2012-08-25 shape = chn.shape interpolate = False if geofilename: geodict = get_lonlat(geofilename) lons, lats = geodict['lon'], geodict['lat'] if lons.shape != shape or lats.shape != shape: interpolate = True row_indices = geodict['row_indices'] column_indices = geodict['column_indices'] lonlat_is_loaded = True else: LOG.warning("No Geo file specified: " + "Geolocation will be loaded from product") if lonlat_is_loaded: if interpolate: from geotiepoints import SatelliteInterpolator cols_full = np.arange(shape[1]) rows_full = np.arange(shape[0]) satint = SatelliteInterpolator((lons, lats), (row_indices, column_indices), (rows_full, cols_full)) #satint.fill_borders("y", "x") lons, lats = satint.interpolate() try: from pyresample import geometry lons = np.ma.masked_array(lons, nodata_mask) lats = np.ma.masked_array(lats, nodata_mask) scene.area = geometry.SwathDefinition(lons=lons, lats=lats) except ImportError: scene.area = None scene.lat = lats scene.lon = lons LOG.info("Loading PPS parameters done.")
def test_fillborders(self): lons = np.arange(20).reshape((4, 5), order="F") lats = np.arange(20).reshape((4, 5), order="C") lines = np.array([2, 7, 12, 17]) cols = np.array([2, 7, 12, 17, 22]) hlines = np.arange(20) hcols = np.arange(24) satint = SatelliteInterpolator((lons, lats), (lines, cols), (hlines, hcols), chunk_size=10) satint.fill_borders('x', 'y') self.assertTrue( np.allclose( satint.tie_data[0], np.array( [[ 6384905.78040055, 6381081.08333225, 6371519.34066148, 6328950.00792935, 6253610.69157758, 6145946.19489936, 6124413.29556372 ], [ 6377591.95940176, 6370997., 6354509.6014956, 6305151.62592155, 6223234.99818839, 6109277.14889072, 6086485.57903118 ], [ 6359307.40690478, 6345786.79166939, 6311985.2535809, 6245655.67090206, 6147295.76471541, 6017604.5338691, 5991666.28769983 ], [ 6351993.58590599, 6335702.70833714, 6294975.51441502, 6221857.28889426, 6116920.07132621, 5980935.48786045, 5953738.5711673 ], [ 6338032.26190294, 6320348.4990906, 6276139.09205974, 6199670.56624433, 6091551.90273768, 5952590.38414781, 5924798.08042984 ], [ 6290665.5946295, 6270385.16249031, 6219684.08214232, 6137100.75832981, 6023313.2794414, 5879194.72399075, 5850371.01290062 ], [ 6172248.92644589, 6145476.82098957, 6078546.55734877, 5980676.23854351, 5852716.72120069, 5695705.57359808, 5664303.34407756 ], [ 6124882.25917245, 6095513.48438928, 6022091.54743135, 5918106.430629, 5784478.09790441, 5622309.91344102, 5589876.27654834 ]]))) self.assertTrue( np.allclose(satint.row_indices, np.array([0, 2, 7, 9, 10, 12, 17, 19]))) self.assertTrue( np.allclose(satint.col_indices, np.array([0, 2, 7, 12, 17, 22, 23])))
def load(scene, geofilename=None, **kwargs): del kwargs import glob lonlat_is_loaded = False products = [] if "CTTH" in scene.channels_to_load: products.append("ctth") if "CloudType" in scene.channels_to_load: products.append("cloudtype") if "CMa" in scene.channels_to_load: products.append("cloudmask") if "PC" in scene.channels_to_load: products.append("precipclouds") if "CPP" in scene.channels_to_load: products.append("cpp") if len(products) == 0: return try: area_name = scene.area_id or scene.area.area_id except AttributeError: area_name = "satproj_?????_?????" conf = ConfigParser.ConfigParser() conf.read(os.path.join(CONFIG_PATH, scene.fullname + ".cfg")) directory = conf.get(scene.instrument_name + "-level3", "dir") try: geodir = conf.get(scene.instrument_name + "-level3", "geodir") except NoOptionError: LOG.warning("No option 'geodir' in level3 section") geodir = None filename = conf.get(scene.instrument_name + "-level3", "filename", raw=True) pathname_tmpl = os.path.join(directory, filename) if not geofilename and geodir: # Load geo file from config file: try: if not scene.orbit: orbit = "" else: orbit = scene.orbit geoname_tmpl = conf.get(scene.instrument_name + "-level3", "geofilename", raw=True) filename_tmpl = (scene.time_slot.strftime(geoname_tmpl) % {"orbit": orbit.zfill(5) or "*", "area": area_name, "satellite": scene.satname + scene.number}) file_list = glob.glob(os.path.join(geodir, filename_tmpl)) if len(file_list) > 1: LOG.warning("More than 1 file matching for geoloaction: " + str(file_list)) elif len(file_list) == 0: LOG.warning("No geolocation file matching!: " + filename_tmpl) else: geofilename = file_list[0] except NoOptionError: geofilename = None classes = {"ctth": CloudTopTemperatureHeight, "cloudtype": CloudType, "cloudmask": CloudMask, "precipclouds": PrecipitationClouds, "cpp": CloudPhysicalProperties } nodata_mask = False chn = None for product in products: LOG.debug("Loading " + product) if not scene.orbit: orbit = "" else: orbit = scene.orbit filename_tmpl = (scene.time_slot.strftime(pathname_tmpl) % {"orbit": orbit.zfill(5) or "*", "area": area_name, "satellite": scene.satname + scene.number, "product": product}) file_list = glob.glob(filename_tmpl) if len(file_list) > 1: LOG.warning("More than 1 file matching for " + product + "! " + str(file_list)) continue elif len(file_list) == 0: LOG.warning("No " + product + " matching!: " + filename_tmpl) continue else: filename = file_list[0] chn = classes[product]() chn.read(filename, lonlat_is_loaded == False) scene.channels.append(chn) # Setup geolocation # We need a no-data mask from one of the projectables to # mask out bow-tie deletion pixels from the geolocation array # So far only relevant for VIIRS. # Preferably the lon-lat data in the PPS VIIRS geolocation # file should already be masked. # The no-data values in the products are not only where geo-location is absent # Only the Cloud Type can be used as a proxy so far. # Adam Dybbroe, 2012-08-31 if hasattr(chn, '_projectables'): for key in chn._projectables: projectable = getattr(chn, key) if key in ['cloudtype']: nodata_array = np.ma.array(projectable.data) nodata_mask = np.ma.masked_equal(nodata_array, 0).mask break else: LOG.warning("Channel has no '_projectables' member." + " No nodata-mask set...") if chn is None: return # Is this safe!? AD 2012-08-25 shape = chn.shape interpolate = False if geofilename: geodict = get_lonlat(geofilename) lons, lats = geodict['lon'], geodict['lat'] if lons.shape != shape or lats.shape != shape: interpolate = True row_indices = geodict['row_indices'] column_indices = geodict['column_indices'] lonlat_is_loaded = True else: LOG.warning("No Geo file specified: " + "Geolocation will be loaded from product") if lonlat_is_loaded: if interpolate: from geotiepoints import SatelliteInterpolator cols_full = np.arange(shape[1]) rows_full = np.arange(shape[0]) satint = SatelliteInterpolator((lons, lats), (row_indices, column_indices), (rows_full, cols_full)) #satint.fill_borders("y", "x") lons, lats = satint.interpolate() try: from pyresample import geometry lons = np.ma.masked_array(lons, nodata_mask) lats = np.ma.masked_array(lats, nodata_mask) scene.area = geometry.SwathDefinition(lons=lons, lats=lats) except ImportError: scene.area = None scene.lat = lats scene.lon = lons LOG.info("Loading PPS parameters done.")
def read(self, filename, load_lonlat=True): """Read product in hdf format from *filename* """ LOG.debug("Filename: %s" % filename) is_temp = False if not h5py.is_hdf5(filename): # Try see if it is bzipped: import bz2 bz2file = bz2.BZ2File(filename) import tempfile tmpfilename = tempfile.mktemp() try: ofpt = open(tmpfilename, 'wb') ofpt.write(bz2file.read()) ofpt.close() is_temp = True except IOError: import traceback traceback.print_exc() raise IOError("Failed to read the file %s" % filename) filename = tmpfilename if not h5py.is_hdf5(filename): if is_temp: os.remove(filename) raise IOError("File is not a hdf5 file!" % filename) h5f = h5py.File(filename, "r") # Read the global attributes self._md = dict(h5f.attrs) self._md["satellite"] = h5f.attrs['platform'] self._md["orbit"] = h5f.attrs['orbit_number'] self._md["time_slot"] = (timedelta(seconds=long(h5f.attrs['sec_1970'])) + datetime(1970, 1, 1, 0, 0)) # Read the data and attributes # This covers only one level of data. This could be made recursive. for key, dataset in h5f.iteritems(): setattr(self, key, InfoObject()) getattr(self, key).info = dict(dataset.attrs) for skey, value in dataset.attrs.iteritems(): if isinstance(value, h5py.h5r.Reference): self._refs[(key, skey)] = h5f[value].name.split("/")[1] if type(dataset.id) is h5py.h5g.GroupID: LOG.warning("Format reader does not support groups") continue try: getattr(self, key).data = dataset[:] is_palette = (dataset.attrs.get("CLASS", None) == "PALETTE") if(len(dataset.shape) > 1 and not is_palette and key not in ["lon", "lat", "row_indices", "column_indices"]): self._projectables.append(key) if self.shape is None: self.shape = dataset.shape elif self.shape != dataset.shape: raise ValueError("Different variable shapes !") else: self._keys.append(key) except TypeError: setattr(self, key, np.dtype(dataset)) self._keys.append(key) h5f.close() if is_temp: os.remove(filename) if not load_lonlat: return # Setup geolocation # We need a no-data mask from one of the projectables to # mask out bow-tie deletion pixels from the geolocation array # So far only relevant for VIIRS. # Preferably the lon-lat data in the PPS VIIRS geolocation # file should already be masked. # The no-data values in the products are not only where geo-location is absent # Only the Cloud Type can be used as a proxy so far. # Adam Dybbroe, 2012-08-31 nodata_mask = False #np.ma.masked_equal(np.ones(self.shape), 0).mask for key in self._projectables: projectable = getattr(self, key) if key in ['cloudtype']: nodata_array = np.ma.array(projectable.data) nodata_mask = np.ma.masked_equal(nodata_array, 0).mask break try: from pyresample import geometry except ImportError: return tiepoint_grid = False if hasattr(self, "row_indices") and hasattr(self, "column_indices"): column_indices = self.column_indices.data row_indices = self.row_indices.data tiepoint_grid = True interpolate = False if hasattr(self, "lon") and hasattr(self, "lat"): if 'intercept' in self.lon.info: offset_lon = self.lon.info["intercept"] elif 'offset' in self.lon.info: offset_lon = self.lon.info["offset"] if 'gain' in self.lon.info: gain_lon = self.lon.info["gain"] lons = self.lon.data * gain_lon + offset_lon if 'intercept' in self.lat.info: offset_lat = self.lat.info["intercept"] elif 'offset' in self.lat.info: offset_lat = self.lat.info["offset"] if 'gain' in self.lat.info: gain_lat = self.lat.info["gain"] lats = self.lat.data * gain_lat + offset_lat if lons.shape != self.shape or lats.shape != self.shape: # Data on tiepoint grid: interpolate = True if not tiepoint_grid: errmsg = ("Interpolation needed but insufficient" + "information on the tiepoint grid") raise IOError(errmsg) else: # Geolocation available on the full grid: # We neeed to mask out nodata (VIIRS Bow-tie deletion...) # We do it for all instruments, checking only against the nodata lons = np.ma.masked_array(lons, nodata_mask) lats = np.ma.masked_array(lats, nodata_mask) self.area = geometry.SwathDefinition(lons=lons, lats=lats) elif hasattr(self, "region") and self.region.data["area_extent"].any(): region = self.region.data proj_dict = dict([elt.split('=') for elt in region["pcs_def"].split(',')]) self.area = geometry.AreaDefinition(region["id"], region["name"], region["proj_id"], proj_dict, region["xsize"], region["ysize"], region["area_extent"]) if interpolate: from geotiepoints import SatelliteInterpolator cols_full = np.arange(self.shape[1]) rows_full = np.arange(self.shape[0]) satint = SatelliteInterpolator((lons, lats), (row_indices, column_indices), (rows_full, cols_full)) #satint.fill_borders("y", "x") lons, lats = satint.interpolate() self.area = geometry.SwathDefinition(lons=lons, lats=lats)
tie_cols = params['tiepoints']['cols'] tie_rows = params['tiepoints']['rows'] # From tie_cols and tie_rows, generate a gegulaer grid #fine_rows = np.arange(0, 3085, 257) #fine_cols = np.arange(0, 6313, 332) fine_rows = np.arange(0, 15436, 250) fine_cols = np.arange(0, 31561, 250) #print params #fine_cols = np.arange(0, data.shape[1]) #fine_rows = np.arange(0, data.shape[0]) interpolator = SatelliteInterpolator((tie_lons, tie_lats), (tie_rows, tie_cols), (fine_rows, fine_cols), 1, 3) #np.save('tie_lons.npy', tie_lons) #np.save('tie_lats.npy', tie_lats) #np.save('tie_cols.npy', tie_cols) #np.save('tie_rows.npy', tie_rows) #np.save('fine_cols.npy', fine_cols) #np.save('fine_rows.npy', fine_rows) lons, lats = interpolator.interpolate() print 'RESULT :' print lons print lats np.save('result_lons.npy', lons) np.save('result_lats.npy', lats) #print 'DATA' #print data.shape