def __call__(self, projectables, *args, **kwargs): """Create the SAR Ice composite.""" (mhh, mhv) = projectables green = overlay(mhh, mhv) green.attrs = combine_metadata(mhh, mhv) return super(SARIce, self).__call__((mhv, green, mhh), *args, **kwargs)
def __call__(self, projectables, nonprojectables=None, **info): if len(projectables) != 2: raise ValueError("Expected 2 datasets, got %d" % (len(projectables), )) info = combine_metadata(*projectables) info['name'] = self.attrs['name'] return Dataset(projectables[0] - projectables[1], **info)
def __call__(self, projectables, *args, **kwargs): """Create the SAR QuickLook composite.""" (mhh, mhv) = projectables blue = mhv / mhh blue.attrs = combine_metadata(mhh, mhv) return super(SARQuickLook, self).__call__((mhh, mhv, blue), *args, **kwargs)
def sub_arrays(proj1, proj2): """Substract two DataArrays and combine their attrs.""" attrs = combine_metadata(proj1.attrs, proj2.attrs) if (attrs.get('area') is None and proj1.attrs.get('area') is not None and proj2.attrs.get('area') is not None): raise IncompatibleAreas res = proj1 - proj2 res.attrs = attrs return res
def __call__(self, projectables, **kwargs): day_data = projectables[0] night_data = projectables[1] lim_low = np.cos(np.deg2rad(self.lim_low)) lim_high = np.cos(np.deg2rad(self.lim_high)) try: coszen = xu.cos(xu.deg2rad(projectables[2])) except IndexError: from pyorbital.astronomy import cos_zen LOG.debug("Computing sun zenith angles.") # Get chunking that matches the data try: chunks = day_data.sel(bands=day_data['bands'][0]).chunks except KeyError: chunks = day_data.chunks lons, lats = day_data.attrs["area"].get_lonlats_dask(chunks) coszen = xr.DataArray(cos_zen(day_data.attrs["start_time"], lons, lats), dims=['y', 'x'], coords=[day_data['y'], day_data['x']]) # Calculate blending weights coszen -= np.min((lim_high, lim_low)) coszen /= np.abs(lim_low - lim_high) coszen = coszen.clip(0, 1) # Apply enhancements to get images day_data = enhance2dataset(day_data) night_data = enhance2dataset(night_data) # Adjust bands so that they match # L/RGB -> RGB/RGB # LA/RGB -> RGBA/RGBA # RGB/RGBA -> RGBA/RGBA day_data = add_bands(day_data, night_data['bands']) night_data = add_bands(night_data, day_data['bands']) # Get merged metadata attrs = combine_metadata(day_data, night_data) # Blend the two images together data = (1 - coszen) * night_data + coszen * day_data data.attrs = attrs # Split to separate bands so the mode is correct data = [data.sel(bands=b) for b in data['bands']] res = super(DayNightCompositor, self).__call__(data, **kwargs) return res
def combine_info(self, all_infos): """Combine metadata for multiple datasets. When loading data from multiple files it can be non-trivial to combine things like start_time, end_time, start_orbit, end_orbit, etc. By default this method will produce a dictionary containing all values that were equal across **all** provided info dictionaries. Additionally it performs the logical comparisons to produce the following if they exist: - start_time - end_time - start_orbit - end_orbit - satellite_altitude - satellite_latitude - satellite_longitude Also, concatenate the areas. """ combined_info = combine_metadata(*all_infos) new_dict = self._combine(all_infos, min, 'start_time', 'start_orbit') new_dict.update(self._combine(all_infos, max, 'end_time', 'end_orbit')) new_dict.update(self._combine(all_infos, np.mean, 'satellite_longitude', 'satellite_latitude', 'satellite_altitude')) try: area = SwathDefinition(lons=np.ma.vstack([info['area'].lons for info in all_infos]), lats=np.ma.vstack([info['area'].lats for info in all_infos])) area.name = '_'.join([info['area'].name for info in all_infos]) combined_info['area'] = area except KeyError: pass new_dict.update(combined_info) return new_dict
def __call__(self, projectables, *args, **kwargs): """Call the compositor.""" projectables = self.match_data_arrays(projectables) # Get enhanced datasets foreground = enhance2dataset(projectables[0]) background = enhance2dataset(projectables[1]) # Adjust bands so that they match # L/RGB -> RGB/RGB # LA/RGB -> RGBA/RGBA # RGB/RGBA -> RGBA/RGBA foreground = add_bands(foreground, background['bands']) background = add_bands(background, foreground['bands']) # Get merged metadata attrs = combine_metadata(foreground, background) if attrs.get('sensor') is None: # sensor can be a set attrs['sensor'] = self._get_sensors(projectables) # Stack the images if 'A' in foreground.attrs['mode']: # Use alpha channel as weight and blend the two composites alpha = foreground.sel(bands='A') data = [] # NOTE: there's no alpha band in the output image, it will # be added by the data writer for band in foreground.mode[:-1]: fg_band = foreground.sel(bands=band) bg_band = background.sel(bands=band) chan = (fg_band * alpha + bg_band * (1 - alpha)) chan = xr.where(chan.isnull(), bg_band, chan) data.append(chan) else: data = xr.where(foreground.isnull(), background, foreground) # Split to separate bands so the mode is correct data = [data.sel(bands=b) for b in data['bands']] res = super(BackgroundCompositor, self).__call__(data, **kwargs) res.attrs.update(attrs) return res
def __call__(self, projectables, nonprojectables=None, **info): """Generate a SnowAge RGB composite. The algorithm and the product are described in this presentation : http://www.ssec.wisc.edu/meetings/cspp/2015/Agenda%20PDF/Wednesday/Roquet_snow_product_cspp2015.pdf For further information you may contact Bernard Bellec at [email protected] or Pascale Roquet at [email protected] """ if len(projectables) != 5: raise ValueError("Expected 5 datasets, got %d" % (len(projectables), )) # Collect information that is the same between the projectables info = combine_metadata(*projectables) # Update that information with configured information (including name) info.update(self.attrs) # Force certain pieces of metadata that we *know* to be true info["wavelength"] = None m07 = projectables[0] * 255. / 160. m08 = projectables[1] * 255. / 160. m09 = projectables[2] * 255. / 160. m10 = projectables[3] * 255. / 160. m11 = projectables[4] * 255. / 160. refcu = m11 - m10 refcu = refcu.clip(min=0) ch1 = m07 - refcu / 2. - m09 / 4. ch2 = m08 + refcu / 4. + m09 / 4. ch3 = m11 + m09 # GenericCompositor needs valid DataArrays with 'area' metadata ch1.attrs = info ch2.attrs = info ch3.attrs = info return super(SnowAge, self).__call__([ch1, ch2, ch3], **info)
def __call__(self, projectables, nonprojectables=None, **attrs): """Build the composite.""" num = len(projectables) mode = attrs.get('mode') if mode is None: # num may not be in `self.modes` so only check if we need to mode = self.modes[num] if len(projectables) > 1: data = self._concat_datasets(projectables, mode) else: data = projectables[0] # if inputs have a time coordinate that may differ slightly between # themselves then find the mid time and use that as the single # time coordinate value if len(projectables) > 1: time = check_times(projectables) if time is not None and 'time' in data.dims: data['time'] = [time] new_attrs = combine_metadata(*projectables) # remove metadata that shouldn't make sense in a composite new_attrs["wavelength"] = None new_attrs.pop("units", None) new_attrs.pop('calibration', None) new_attrs.pop('modifiers', None) new_attrs.update( {key: val for (key, val) in attrs.items() if val is not None}) new_attrs.update(self.attrs) new_attrs["sensor"] = self._get_sensors(projectables) new_attrs["mode"] = mode return xr.DataArray(data=data.data, attrs=new_attrs, dims=data.dims, coords=data.coords)
def __call__(self, projectables, nonprojectables=None, **attrs): """Build the composite.""" num = len(projectables) mode = attrs.get('mode') if mode is None: # num may not be in `self.modes` so only check if we need to mode = self.modes[num] if len(projectables) > 1: data = self._concat_datasets(projectables, mode) else: data = projectables[0] # if inputs have a time coordinate that may differ slightly between # themselves then find the mid time and use that as the single # time coordinate value if len(projectables) > 1: time = check_times(projectables) if time is not None and 'time' in data.dims: data['time'] = [time] new_attrs = combine_metadata(*projectables) # remove metadata that shouldn't make sense in a composite new_attrs["wavelength"] = None new_attrs.pop("units", None) new_attrs.pop('calibration', None) new_attrs.pop('modifiers', None) new_attrs.update({key: val for (key, val) in attrs.items() if val is not None}) new_attrs.update(self.attrs) new_attrs["sensor"] = self._get_sensors(projectables) new_attrs["mode"] = mode return xr.DataArray(data=data.data, attrs=new_attrs, dims=data.dims, coords=data.coords)
def __call__(self, datasets, optional_datasets=None, **info): if len(datasets) != 3: raise ValueError("Expected 3 datasets, got %d" % (len(datasets), )) if not all(x.shape == datasets[0].shape for x in datasets[1:]) or \ (optional_datasets and optional_datasets[0].shape != datasets[0].shape): raise IncompatibleAreas('RatioSharpening requires datasets of ' 'the same size. Must resample first.') new_attrs = {} if optional_datasets: datasets = self.check_areas(datasets + optional_datasets) high_res = datasets[-1] p1, p2, p3 = datasets[:3] if 'rows_per_scan' in high_res.attrs: new_attrs.setdefault('rows_per_scan', high_res.attrs['rows_per_scan']) new_attrs.setdefault('resolution', high_res.attrs['resolution']) if self.high_resolution_band == "red": LOG.debug("Sharpening image with high resolution red band") ratio = high_res / p1 # make ratio a no-op (multiply by 1) where the ratio is NaN or # infinity or it is negative. ratio = ratio.where(xu.isfinite(ratio) | (ratio >= 0), 1.) r = high_res g = p2 * ratio b = p3 * ratio g.attrs = p2.attrs.copy() b.attrs = p3.attrs.copy() elif self.high_resolution_band == "green": LOG.debug("Sharpening image with high resolution green band") ratio = high_res / p2 ratio = ratio.where(xu.isfinite(ratio) | (ratio >= 0), 1.) r = p1 * ratio g = high_res b = p3 * ratio r.attrs = p1.attrs.copy() b.attrs = p3.attrs.copy() elif self.high_resolution_band == "blue": LOG.debug("Sharpening image with high resolution blue band") ratio = high_res / p3 ratio = ratio.where(xu.isfinite(ratio) | (ratio >= 0), 1.) r = p1 * ratio g = p2 * ratio b = high_res r.attrs = p1.attrs.copy() g.attrs = p2.attrs.copy() else: # no sharpening r = p1 g = p2 b = p3 else: datasets = self.check_areas(datasets) r, g, b = datasets[:3] # combine the masks mask = ~(da.isnull(r.data) | da.isnull(g.data) | da.isnull(b.data)) r = r.where(mask) g = g.where(mask) b = b.where(mask) # Collect information that is the same between the projectables # we want to use the metadata from the original datasets since the # new r, g, b arrays may have lost their metadata during calculations info = combine_metadata(*datasets) info.update(new_attrs) # Update that information with configured information (including name) info.update(self.attrs) # Force certain pieces of metadata that we *know* to be true info.setdefault("standard_name", "true_color") return super(RatioSharpenedRGB, self).__call__((r, g, b), **info)
def combine_info(self, all_infos): """Combine metadata for multiple datasets. When loading data from multiple files it can be non-trivial to combine things like start_time, end_time, start_orbit, end_orbit, etc. By default this method will produce a dictionary containing all values that were equal across **all** provided info dictionaries. Additionally it performs the logical comparisons to produce the following if they exist: - start_time - end_time - start_orbit - end_orbit - satellite_altitude - satellite_latitude - satellite_longitude - orbital_parameters Also, concatenate the areas. """ combined_info = combine_metadata(*all_infos) new_dict = self._combine(all_infos, min, 'start_time', 'start_orbit') new_dict.update(self._combine(all_infos, max, 'end_time', 'end_orbit')) new_dict.update( self._combine(all_infos, np.mean, 'satellite_longitude', 'satellite_latitude', 'satellite_altitude')) # Average orbital parameters orb_params = [info.get('orbital_parameters', {}) for info in all_infos] if all(orb_params): # Collect all available keys orb_params_comb = {} for d in orb_params: orb_params_comb.update(d) # Average known keys keys = [ 'projection_longitude', 'projection_latitude', 'projection_altitude', 'satellite_nominal_longitude', 'satellite_nominal_latitude', 'satellite_actual_longitude', 'satellite_actual_latitude', 'satellite_actual_altitude', 'nadir_longitude', 'nadir_latitude' ] orb_params_comb.update(self._combine(orb_params, np.mean, *keys)) new_dict['orbital_parameters'] = orb_params_comb try: area = SwathDefinition( lons=np.ma.vstack([info['area'].lons for info in all_infos]), lats=np.ma.vstack([info['area'].lats for info in all_infos])) area.name = '_'.join([info['area'].name for info in all_infos]) combined_info['area'] = area except KeyError: pass new_dict.update(combined_info) return new_dict
def __call__(self, datasets, optional_datasets=None, **info): if len(datasets) != 3: raise ValueError("Expected 3 datasets, got %d" % (len(datasets), )) if not all(x.shape == datasets[0].shape for x in datasets[1:]) or \ (optional_datasets and optional_datasets[0].shape != datasets[0].shape): raise IncompatibleAreas('RatioSharpening requires datasets of ' 'the same size. Must resample first.') new_attrs = {} if optional_datasets: datasets = self.check_areas(datasets + optional_datasets) high_res = datasets[-1] p1, p2, p3 = datasets[:3] if 'rows_per_scan' in high_res.attrs: new_attrs.setdefault('rows_per_scan', high_res.attrs['rows_per_scan']) new_attrs.setdefault('resolution', high_res.attrs['resolution']) if self.high_resolution_band == "red": LOG.debug("Sharpening image with high resolution red band") ratio = high_res / p1 # make ratio a no-op (multiply by 1) where the ratio is NaN or # infinity or it is negative. ratio = ratio.where(xu.isfinite(ratio) | (ratio >= 0), 1.) r = high_res g = p2 * ratio b = p3 * ratio g.attrs = p2.attrs.copy() b.attrs = p3.attrs.copy() elif self.high_resolution_band == "green": LOG.debug("Sharpening image with high resolution green band") ratio = high_res / p2 ratio = ratio.where(xu.isfinite(ratio) | (ratio >= 0), 1.) r = p1 * ratio g = high_res b = p3 * ratio r.attrs = p1.attrs.copy() b.attrs = p3.attrs.copy() elif self.high_resolution_band == "blue": LOG.debug("Sharpening image with high resolution blue band") ratio = high_res / p3 ratio = ratio.where(xu.isfinite(ratio) | (ratio >= 0), 1.) r = p1 * ratio g = p2 * ratio b = high_res r.attrs = p1.attrs.copy() g.attrs = p2.attrs.copy() else: # no sharpening r = p1 g = p2 b = p3 else: datasets = self.check_areas(datasets) r, g, b = datasets[:3] # combine the masks mask = ~(da.isnull(r.data) | da.isnull(g.data) | da.isnull(b.data)) r = r.where(mask) g = g.where(mask) b = b.where(mask) # Collect information that is the same between the projectables # we want to use the metadata from the original datasets since the # new r, g, b arrays may have lost their metadata during calculations info = combine_metadata(*datasets) info.update(new_attrs) # Update that information with configured information (including name) info.update(self.attrs) # Force certain pieces of metadata that we *know* to be true info.setdefault("standard_name", "true_color") return super(RatioSharpenedRGB, self).__call__((r, g, b), **info)
def msg1NDVI(dateSnap, avail_times, fldrs): """ What does this function do? This definition/function is meant for computing NDVI from SEVIRI data Ref: https://nbviewer.jupyter.org/github/pytroll/pytroll-examples/blob/master/satpy/hrit_msg_tutorial.ipynb :param dateSnap: :param avail_times: :param fldrs: :return: NDVI """ # Start the logic import os, sys, glob from satpy.utils import debug_on from satpy.scene import Scene from satpy.dataset import combine_metadata from datetime import datetime from myDefinitions import nc_write_sat_level_2, embellish, imResize debug_on() print("\n \t \t \t STARTING THE msg1NDVI run @ time: %s \t \t \t \n \n" % str(datetime.now())) print("\n.Processing Date set is: %s" % dateSnap) # Test whether all data folders are appropriately set or not. basDir, datDir, outDir, logDir, webDir, geoTdir, GSHHS_ROOT = fldrs print("\n.Base directory is set to: %s" % basDir) print("\n.Data directory is set to %s" % datDir) print("\n.NetCDF output directory is set to: %s" % outDir) print("\n.Log directory is set to: %s" % logDir) print("\n.Web directory is set to: %s" % webDir) print("\n.GeoTiff directory is set to: %s" % geoTdir) avail_times = str(avail_times).split() for tt in avail_times: # Start for-loop-1 print("..Started processing for time: %s" % tt) files = glob.glob(datDir + 'H-000-MSG1*' + dateSnap + tt + '-*') print(">>>>>>>>>>> Testing 123: <<<<<<<<<<<<<<<\n") print(files) # Start reading filename in satpy scn = Scene(filenames=files, reader='hrit_msg') # start the NDVI computation scn.load(['VIS006', 0.6]) scn.load(['VIS008', 0.8]) ndvi = (scn[0.8] - scn[0.6]) / (scn[0.8] + scn[0.6]) ndvi.attrs = combine_metadata(scn[0.6], scn[0.8]) scn['ndvi'] = ndvi composite = 'ndvi' prodStr = 'ndvi' capStr = 'NDVI' # resample the data to Indian region indScn = scn.resample('IndiaSC') # save the data # # Save as netCDF data ---- TO BE IMPLEMENTED ---- outImgStr1 = outDir + 'ind_MSG-1_RGB_' + prodStr + '_' + dateSnap + '_' + tt + '.nc' nc_write_sat_level_2(indScn, outImgStr1, prodStr) # Save as Full Resolution GeoTIFF files outImgStr2 = geoTdir + 'ind_MSG-1_RGB_' + prodStr + '_' + dateSnap + '_' + tt + '.tiff' indScn.save_dataset(composite, filename = outImgStr2, writer = 'geotiff') # Add graphics # img2 = embellish(basDir, GSHHS_ROOT, outImgStr2, capStr, dateSnap, tt) # img2.save(outImgStr2) # Save the data as resized png files outImgStr3 = webDir + 'ind_MSG1_RGB_' + prodStr + '_' + dateSnap + '_' + tt + '.png' indScn.save_dataset(composite, filename = outImgStr3, writer = "simple_image") outImgStr3 = imResize(outImgStr3) # Add graphics img3 = embellish(basDir, GSHHS_ROOT, outImgStr3, capStr, dateSnap, tt) img3.save(outImgStr3) print("msg1NDVI() says: Finished with processing of time-slot - %s - at: %s " % (tt, str(datetime.now())))
def load_data(self): self.scene.load(["M15", "M16"]) self.scene["btd"] = self.scene["M15"] - self.scene["M16"] self.scene["btd"].attrs = combine_metadata(self.scene["M15"], self.scene["M16"])
def msg1NDVI(dateSnap, avail_times, fldrs): """ What does this function do? This definition/function is meant for computing NDVI from SEVIRI data Ref: https://nbviewer.jupyter.org/github/pytroll/pytroll-examples/blob/master/satpy/hrit_msg_tutorial.ipynb :param dateSnap: :param avail_times: :param fldrs: :return: NDVI """ # Start the logic import os, sys, glob #from satpy.utils import debug_on from satpy.scene import Scene from satpy.dataset import combine_metadata from datetime import datetime from trollimage.colormap import greys, greens from trollimage.image import Image from myDefinitions import nc_write_sat_level_2, embellish, imResize #debug_on() print("\n \t \t \t STARTING THE msg1NDVI run @ time: %s \t \t \t \n \n" % str(datetime.now())) print("\n.Processing Date set is: %s" % dateSnap) # Test whether all data folders are appropriately set or not. basDir, datDir, outDir, logDir, webDir, geoTdir, msg1Src, exeDir, GSHHS_ROOT, tmpDir = fldrs print("\n.Base directory is set to: %s" % basDir) print("\n.Data directory is set to %s" % datDir) print("\n.NetCDF output directory is set to: %s" % outDir) print("\n.Log directory is set to: %s" % logDir) print("\n.Web directory is set to: %s" % webDir) print("\n.GeoTiff directory is set to: %s" % geoTdir) print("\n.msg1Src directory is set to: %s" % msg1Src) print("\n.exeDir directory is set to: %s" % exeDir) print("\n.GSHHS directory is set to: %s" % GSHHS_ROOT) print("\n.tmpDir directory is set to: %s" % tmpDir) avail_times = str(avail_times).split() for tt in avail_times: # Start for-loop-1 print("..Started processing for time: %s" % tt) searchStr = datDir + 'H-000-MSG1*' + dateSnap + tt + '-*' print("\n \t \t Testing 123: \n \n ") print(searchStr) files = glob.glob(searchStr) #print("\n Testing 123: \n") #print(files) # Start reading filename in satpy scn = Scene(filenames=files, reader='hrit_msg') # start the NDVI computation scn.load(['VIS006', 0.6]) scn.load(['VIS008', 0.8]) ndvi = (scn[0.8] - scn[0.6]) / (scn[0.8] + scn[0.6]) ndvi.attrs = combine_metadata(scn[0.6], scn[0.8]) scn['ndvi'] = ndvi composite = 'ndvi' prodStr = 'NDVI' capStr = 'NDVI' # resample the data to Indian region indScn = scn.resample('India_SC') # save the data # # # Save as netCDF data ---- TO BE IMPLEMENTED ---- # outImgStr1 = outDir + 'ind_MSG-1_RGB_' + prodStr + '_' + dateSnap + '_' + tt + '.nc' # nc_write_sat_level_2(indScn, outImgStr1, prodStr) # # # Save as Full Resolution GeoTIFF files # outImgStr2 = geoTdir + 'ind_MSG-1_RGB_' + prodStr + '_' + dateSnap + '_' + tt + '.tiff' # indScn.save_dataset(composite, filename = outImgStr2, writer = 'geotiff') # # Add graphics # # img2 = embellish(basDir, GSHHS_ROOT, outImgStr2, capStr, dateSnap, tt) # # img2.save(outImgStr2) # Save the data as resized png files outImgStr3 = tmpDir + 'ind_MSG1_RGB_' + prodStr + '_' + dateSnap + '_' + tt + '.png' outImgStr3w = webDir + 'ind_MSG1_RGB_' + prodStr + '_' + dateSnap + '_' + tt + '.png' # Apply color palette from trollimage ndvi_data = indScn['ndvi'].compute().data ndvi_img = Image(ndvi_data, mode="L") # greys.set_range(ndvi_data.min(), -0.00001) # greens.set_range(0,ndvi_data.max()) greys.set_range(-0.8, -0.00001) greens.set_range(0, 0.8) my_cm = greys + greens ndvi_img.colorize(my_cm) ndvi_img.save(outImgStr3) # indScn.save_dataset(composite, filename = outImgStr3, writer = "simple_image") outImgStr3 = imResize(outImgStr3) # Add graphics img3 = embellish(basDir, GSHHS_ROOT, outImgStr3, capStr, dateSnap, tt) img3.save(outImgStr3) # move the tmp files to proper web area mv2WebCmd = 'mv ' + outImgStr3 + ' ' + outImgStr3w os.system(mv2WebCmd) print( "msg1NDVI() says: Finished with processing of time-slot - %s - at: %s " % (tt, str(datetime.now())))