def resample(self, destination_area, **kwargs): """Resample the current projectable and return the resampled one. Args: destination_area: The destination onto which to project the data, either a full blown area definition or a string corresponding to the name of the area as defined in the area file. **kwargs: The extra parameters to pass to the resampling functions. Returns: A resampled projectable, with updated .info["area"] field """ # avoid circular imports, this is just a convenience function anyway from satpy.resample import resample, get_area_def # call the projection stuff here source_area = self.info["area"] if isinstance(source_area, (str, six.text_type)): source_area = get_area_def(source_area) if isinstance(destination_area, (str, six.text_type)): destination_area = get_area_def(destination_area) if self.ndim == 3: data = np.rollaxis(self, 0, 3) else: data = self new_data = resample(source_area, data, destination_area, **kwargs) if new_data.ndim == 3: new_data = np.rollaxis(new_data, 2) # FIXME: is this necessary with the ndarray subclass ? res = Projectable(new_data, **self.info) res.info["area"] = destination_area return res
def resample(self, destination_area, **kwargs): """Resample the current projectable and return the resampled one. Args: destination_area: The destination onto which to project the data, either a full blown area definition or a string corresponding to the name of the area as defined in the area file. **kwargs: The extra parameters to pass to the resampling functions. Returns: A resampled projectable, with updated .info["area"] field """ # avoid circular imports, this is just a convenience function anyway from satpy.resample import resample, get_area_def # call the projection stuff here source_area = self.info["area"] if isinstance(source_area, (str, six.text_type)): source_area = get_area_def(source_area) if isinstance(destination_area, (str, six.text_type)): destination_area = get_area_def(destination_area) if self.ndim == 3: data = np.rollaxis(self, 0, 3) else: data = self new_data = resample(source_area, data, destination_area, **kwargs) if new_data.ndim == 3: new_data = np.rollaxis(new_data, 2) # FIXME: is this necessary with the ndarray subclass ? res = Projectable(new_data, **self.info) res.info["area"] = destination_area return res
def get_region(areaid): try: from satpy.resample import get_area_def except ImportError: from mpop.projector import get_area_def return get_area_def(areaid)
def get_region(areaid): try: from satpy.resample import get_area_def except ImportError: from mpop.projector import get_area_def return get_area_def(areaid)
def add_overlay(orig, area, coast_dir, color=(0, 0, 0), width=0.5, resolution=None): """Add coastline and political borders to image, using *color* (tuple of integers between 0 and 255). Warning: Loses the masks ! *resolution* is chosen automatically if None (default), otherwise it should be one of: +-----+-------------------------+---------+ | 'f' | Full resolution | 0.04 km | | 'h' | High resolution | 0.2 km | | 'i' | Intermediate resolution | 1.0 km | | 'l' | Low resolution | 5.0 km | | 'c' | Crude resolution | 25 km | +-----+-------------------------+---------+ """ img = orig.pil_image() if area is None: raise ValueError("Area of image is None, can't add overlay.") from satpy.resample import get_area_def if isinstance(area, str): area = get_area_def(area) LOG.info("Add coastlines and political borders to image.") if resolution is None: x_resolution = ((area.area_extent[2] - area.area_extent[0]) / area.x_size) y_resolution = ((area.area_extent[3] - area.area_extent[1]) / area.y_size) res = min(x_resolution, y_resolution) if res > 25000: resolution = "c" elif res > 5000: resolution = "l" elif res > 1000: resolution = "i" elif res > 200: resolution = "h" else: resolution = "f" LOG.debug("Automagically choose resolution " + resolution) from pycoast import ContourWriterAGG cw_ = ContourWriterAGG(coast_dir) cw_.add_coastlines(img, area, outline=color, resolution=resolution, width=width) cw_.add_borders(img, area, outline=color, resolution=resolution, width=width) arr = np.array(img) if len(orig.channels) == 1: orgi.channels[0] = np.ma.array(arr[:, :] / 255.0) else: for idx in range(len(orig.channels)): orig.channels[idx] = np.ma.array(arr[:, :, idx] / 255.0)
def _get_area_def_from_name(area_name, input_scene, grid_manager, custom_areas): if area_name is None: # no resampling area_def = None elif area_name == "MAX": area_def = input_scene.max_area() elif area_name == "MIN": area_def = input_scene.min_area() elif area_name in custom_areas: area_def = custom_areas[area_name] elif area_name in grid_manager: p2g_def = grid_manager[area_name] area_def = p2g_def.to_satpy_area() if isinstance( area_def, DynamicAreaDefinition) and p2g_def["cell_width"] is not None: logger.info("Computing dynamic grid parameters...") area_def = area_def.freeze( input_scene.max_area(), resolution=(abs(p2g_def["cell_width"]), abs(p2g_def["cell_height"]))) logger.debug("Frozen dynamic area: %s", area_def) else: area_def = get_area_def(area_name) return area_def
def _construct_area_def(self, dataset_id): """Construct the area definition. Returns: AreaDefinition: A pyresample AreaDefinition object containing the area definition. """ res = dataset_id.resolution area_naming_input_dict = {'platform_name': 'mtg', 'instrument_name': 'fci', 'resolution': res, } area_naming = get_geos_area_naming({**area_naming_input_dict, **get_service_mode('fci', self.ssp_lon)}) # Construct area definition from standardized area definition. stand_area_def = get_area_def(area_naming['area_id']) if (stand_area_def.x_size != self.ncols) | (stand_area_def.y_size != self.nlines): raise NotImplementedError('Unrecognised AreaDefinition.') mod_area_extent = self._modify_area_extent(stand_area_def.area_extent) area_def = geometry.AreaDefinition( stand_area_def.area_id, stand_area_def.description, "", stand_area_def.proj_dict, stand_area_def.x_size, stand_area_def.y_size, mod_area_extent) return area_def
def _construct_area_def(self, dataset_id): """Construct a standardized AreaDefinition based on satellite, instrument, resolution and sub-satellite point. Returns: AreaDefinition: A pyresample AreaDefinition object containing the area definition. """ res = dataset_id['resolution'] area_naming_input_dict = { 'platform_name': 'msg', 'instrument_name': 'seviri', 'resolution': res, } area_naming = get_geos_area_naming({ **area_naming_input_dict, **get_service_mode('seviri', self.ssp_lon) }) # Datasets with a segment size of 3 pixels extend outside the original SEVIRI 3km grid (with 1238 x 1238 # segments a 3 pixels). Hence, we need to use corresponding area defintions in areas.yaml if self.seg_size == 3: area_naming['area_id'] += '_ext' area_naming[ 'description'] += ' (extended outside original 3km grid)' # Construct AreaDefinition from standardized area definition in areas.yaml. stand_area_def = get_area_def(area_naming['area_id']) return stand_area_def
def add_overlay(orig, area, coast_dir, color=(0, 0, 0), width=0.5, resolution=None, level_coast=1, level_borders=1): """Add coastline and political borders to image, using *color* (tuple of integers between 0 and 255). Warning: Loses the masks ! *resolution* is chosen automatically if None (default), otherwise it should be one of: +-----+-------------------------+---------+ | 'f' | Full resolution | 0.04 km | | 'h' | High resolution | 0.2 km | | 'i' | Intermediate resolution | 1.0 km | | 'l' | Low resolution | 5.0 km | | 'c' | Crude resolution | 25 km | +-----+-------------------------+---------+ """ if area is None: raise ValueError("Area of image is None, can't add overlay.") from pycoast import ContourWriterAGG if isinstance(area, str): area = get_area_def(area) LOG.info("Add coastlines and political borders to image.") if resolution is None: x_resolution = ((area.area_extent[2] - area.area_extent[0]) / area.x_size) y_resolution = ((area.area_extent[3] - area.area_extent[1]) / area.y_size) res = min(x_resolution, y_resolution) if res > 25000: resolution = "c" elif res > 5000: resolution = "l" elif res > 1000: resolution = "i" elif res > 200: resolution = "h" else: resolution = "f" LOG.debug("Automagically choose resolution %s", resolution) img = orig.pil_image() cw_ = ContourWriterAGG(coast_dir) cw_.add_coastlines(img, area, outline=color, resolution=resolution, width=width, level=level_coast) cw_.add_borders(img, area, outline=color, resolution=resolution, width=width, level=level_borders) arr = da.from_array(np.array(img) / 255.0, chunks=CHUNK_SIZE) orig.data = xr.DataArray(arr, dims=['y', 'x', 'bands'], coords={'y': orig.data.coords['y'], 'x': orig.data.coords['x'], 'bands': list(img.mode)})
def _resampled_scene(self, new_scn, destination_area, **resample_kwargs): """Resample `datasets` to the `destination` area.""" new_datasets = {} datasets = list(new_scn.datasets.values()) max_area = None if isinstance(destination_area, (str, six.text_type)): destination_area = get_area_def(destination_area) if hasattr(destination_area, 'freeze'): try: max_area = new_scn.max_area() destination_area = destination_area.freeze(max_area) except ValueError: raise ValueError("No dataset areas available to freeze " "DynamicAreaDefinition.") resamplers = {} for dataset, parent_dataset in dataset_walker(datasets): ds_id = DatasetID.from_dict(dataset.attrs) pres = None if parent_dataset is not None: pres = new_datasets[DatasetID.from_dict(parent_dataset.attrs)] if ds_id in new_datasets: replace_anc(dataset, pres) continue if dataset.attrs.get('area') is None: if parent_dataset is None: new_scn.datasets[ds_id] = dataset else: replace_anc(dataset, pres) continue LOG.debug("Resampling %s", ds_id) source_area = dataset.attrs['area'] try: slice_x, slice_y = source_area.get_area_slices( destination_area) source_area = source_area[slice_y, slice_x] dataset = dataset.isel(x=slice_x, y=slice_y) assert ('x', source_area.x_size) in dataset.sizes.items() assert ('y', source_area.y_size) in dataset.sizes.items() dataset.attrs['area'] = source_area except NotImplementedError: LOG.info("Not reducing data before resampling.") if source_area not in resamplers: key, resampler = prepare_resampler(source_area, destination_area, **resample_kwargs) resamplers[source_area] = resampler self.resamplers[key] = resampler kwargs = resample_kwargs.copy() kwargs['resampler'] = resamplers[source_area] res = resample_dataset(dataset, destination_area, **kwargs) new_datasets[ds_id] = res if parent_dataset is None: new_scn.datasets[ds_id] = res else: replace_anc(res, pres)
def check_file_covers_area(file_handler, check_area): """Checks if the file covers the current area. If the file doesn't provide any bounding box information or 'area' was not provided in `filter_parameters`, the check returns True. """ try: gbb = Boundary(*file_handler.get_bounding_box()) except NotImplementedError as err: logger.debug("Bounding box computation not implemented: %s", str(err)) else: abb = AreaDefBoundary(get_area_def(check_area), frequency=1000) intersection = gbb.contour_poly.intersection(abb.contour_poly) if not intersection: return False return True
def covers(overpass, area_name, min_coverage, logger): try: area_def = get_area_def(area_name) if min_coverage == 0 or overpass is None: return True min_coverage /= 100.0 coverage = overpass.area_coverage(area_def) if coverage <= min_coverage: logger.info("Coverage too small %.1f%% (out of %.1f%%) " "with %s", coverage * 100, min_coverage * 100, area_name) return False else: logger.info("Coverage %.1f%% with %s", coverage * 100, area_name) except AttributeError: logger.warning("Can't compute area coverage with %s!", area_name) return True
def check_file_covers_area(file_handler, check_area): """Checks if the file covers the current area. If the file doesn't provide any bounding box information or 'area' was not provided in `filter_parameters`, the check returns True. """ try: gbb = Boundary(*file_handler.get_bounding_box()) except NotImplementedError as err: logger.debug("Bounding box computation not implemented: %s", str(err)) else: abb = AreaDefBoundary(get_area_def(check_area), frequency=1000) intersection = gbb.contour_poly.intersection(abb.contour_poly) if not intersection: return False return True
def __init__(self, name, filename=None, area=None, **kwargs): """Collect custom configuration values. Args: filename (str): Filename of the image to load, environment variables are expanded area (str): Name of area definition for the image. Optional for images with built-in area definitions (geotiff) """ if filename is None: raise ValueError("No image configured for static image compositor") self.filename = os.path.expandvars(filename) self.area = None if area is not None: from satpy.resample import get_area_def self.area = get_area_def(area) super(StaticImageCompositor, self).__init__(name, **kwargs)
def check_file_covers_area(file_handler, check_area): """Checks if the file covers the current area. If the file doesn't provide any bounding box information or 'area' was not provided in `filter_parameters`, the check returns True. """ from trollsched.boundary import AreaDefBoundary, Boundary from satpy.resample import get_area_def try: gbb = Boundary(*file_handler.get_bounding_box()) except NotImplementedError: pass else: abb = AreaDefBoundary(get_area_def(check_area), frequency=1000) intersection = gbb.contour_poly.intersection(abb.contour_poly) if not intersection: return False return True
def __init__(self, name, filename=None, url=None, known_hash=None, area=None, **kwargs): """Collect custom configuration values. Args: filename (str): Name to use when storing and referring to the file in the ``data_dir`` cache. If ``url`` is provided (preferred), then this is used as the filename in the cache and will be appended to ``<data_dir>/composites/<class_name>/``. If ``url`` is provided and ``filename`` is not then the ``filename`` will be guessed from the ``url``. If ``url`` is not provided, then it is assumed ``filename`` refers to a local file with an absolute path. Environment variables are expanded. url (str): URL to remote file. When the composite is created the file will be downloaded and cached in Satpy's ``data_dir``. Environment variables are expanded. known_hash (str or None): Hash of the remote file used to verify a successful download. If not provided then the download will not be verified. See :func:`satpy.aux_download.register_file` for more information. area (str): Name of area definition for the image. Optional for images with built-in area definitions (geotiff). """ filename, url = self._get_cache_filename_and_url(filename, url) self._cache_filename = filename self._url = url self._known_hash = known_hash self.area = None if area is not None: from satpy.resample import get_area_def self.area = get_area_def(area) super(StaticImageCompositor, self).__init__(name, **kwargs) cache_keys = self.register_data_files([]) self._cache_key = cache_keys[0]
def _get_mandatory_config_items(self): """Get mandatory config items and log possible errors""" try: self.areaname = self.config.get(self.subject, 'areaname') try: self.area_def = get_area_def(self.areaname) except (IOError, NoOptionError): self.area_def = None logging.warning("Area definition not available") self.in_pattern = self.config.get(self.subject, 'in_pattern') self.out_pattern = self.config.get(self.subject, 'out_pattern') except NoOptionError: logging.error("Required option missing!") logging.error( "Check that 'areaname', 'in_pattern' and " "'out_pattern' are all defined under section %s", self.subject) raise KeyError("Required config item missing") except NoSectionError: logging.error("No config section for message subject %s", self.subject) raise KeyError("Missing config section")
def __init__(self, config): self.config = config self.slots = {} # Structure of self.slots is: # slots = {datetime(): {composite: {"img": None, # "num": 0}, # "timeout": None}} self._parse_settings() self._listener = ListenerContainer(topics=config["topics"]) self._set_message_settings() self._publisher = \ NoisyPublisher("WorldCompositePublisher", port=self.port, aliases=self.aliases, broadcast_interval=self.broadcast_interval, nameservers=self.nameservers) self._publisher.start() self._loop = False if isinstance(config["area_def"], str): self.adef = get_area_def(config["area_def"]) else: self.adef = config["area_def"]
def _get_area_def_from_name(area_name, input_scene, grid_manager, yaml_areas): if area_name is None: # no resampling area_def = None elif area_name == "MAX": area_def = input_scene.finest_area() elif area_name == "MIN": area_def = input_scene.coarsest_area() elif area_name in yaml_areas: area_def = yaml_areas[area_name] elif area_name in grid_manager: p2g_def = grid_manager[area_name] area_def = p2g_def.to_satpy_area() else: # get satpy builtin area area_def = get_area_def(area_name) if isinstance(area_def, DynamicAreaDefinition): logger.info("Computing dynamic grid parameters...") area_def = area_def.freeze(input_scene.max_area()) logger.debug("Frozen dynamic area: %s", area_def) return area_def
def area_def_names_to_extent(area_def_names, proj4_str, default_extent=(-5567248.07, -5570248.48, 5570248.48, 5567248.07)): '''Convert a list of *area_def_names* to maximal area extent in destination projection defined by *proj4_str*. *default_extent* gives the extreme values. Default value is MSG3 extents at lat0=0.0. If a boundary of one of the area_defs is entirely invalid, the *default_extent* is taken. ''' if not isinstance(area_def_names, (list, tuple, set)): area_def_names = [area_def_names] areas = [] for name in area_def_names: try: areas.append(get_area_def(name)) except pyresample.utils.AreaNotFound: LOGGER.warning('Area definition not found ' + name) continue return area_defs_to_extent(areas, proj4_str, default_extent)
def check_sunlight_coverage(job): """Remove products with too low/high sunlight coverage. This plugins looks for a dictionary called `sunlight_coverage` in the product list, with members `min` and/or `max` that define the minimum and/or maximum allowed sunlight coverage within the scene. The limits are expressed in % (so between 0 and 100). If the sunlit fraction is outside the set limits, the affected products will be discarded. It is also possible to define `check_pass: True` in this dictionary to check the sunlit fraction within the overpass of an polar-orbiting satellite. """ if get_twilight_poly is None: LOG.error( "Trollsched import failed, sunlight coverage calculation not possible" ) LOG.info("Keeping all products") return scn_mda = _get_scene_metadata(job) scn_mda.update(job['input_mda']) platform_name = scn_mda['platform_name'] start_time = scn_mda['start_time'] end_time = scn_mda['end_time'] sensor = scn_mda['sensor'] if isinstance(sensor, (list, tuple, set)): sensor = list(sensor) if len(sensor) > 1: LOG.warning( "Multiple sensors given, taking only one for " "coverage calculations: %s", sensor[0]) sensor = sensor[0] product_list = job['product_list'] areas = list(product_list['product_list']['areas'].keys()) for area in areas: products = list( product_list['product_list']['areas'][area]['products'].keys()) try: area_def = get_area_def(area) except AreaNotFound: area_def = None coverage = {True: None, False: None} overpass = None for product in products: prod_path = "/product_list/areas/%s/products/%s" % (area, product) config = get_config_value(product_list, prod_path, "sunlight_coverage") if config is None: continue min_day = config.get('min') max_day = config.get('max') check_pass = config.get('check_pass', False) if min_day is None and max_day is None: LOG.debug("Sunlight coverage not configured for %s / %s", product, area) continue if area_def is None: area_def = _get_product_area_def(job, area, product) if area_def is None: continue if check_pass and overpass is None: overpass = Pass(platform_name, start_time, end_time, instrument=sensor) if coverage[check_pass] is None: coverage[check_pass] = _get_sunlight_coverage( area_def, start_time, overpass) area_conf = product_list['product_list']['areas'][area] area_conf[ 'area_sunlight_coverage_percent'] = coverage[check_pass] * 100 if min_day is not None and coverage[check_pass] < (min_day / 100.0): LOG.info("Not enough sunlight coverage for " f"product '{product!s}', removed. Needs at least " f"{min_day:.1f}%, got {coverage[check_pass]:.1%}.") dpath.util.delete(product_list, prod_path) if max_day is not None and coverage[check_pass] > (max_day / 100.0): LOG.info("Too much sunlight coverage for " f"product '{product!s}', removed. Needs at most " f"{max_day:.1f}%, got {coverage[check_pass]:.1%}.") dpath.util.delete(product_list, prod_path)
def get_scene_coverage(platform_name, start_time, end_time, sensor, area_id): """Get scene area coverage in percentages.""" overpass = Pass(platform_name, start_time, end_time, instrument=sensor) area_def = get_area_def(area_id) return 100 * overpass.area_coverage(area_def)
from satpy.utils import debug_on from my_msg_module import get_last_SEVIRI_date debug_on() if __name__ == '__main__': scn = Scene(sensor='ahi', start_time=datetime(2016, 11, 25, 8, 0), end_time=datetime(2016, 11, 25, 8, 10), base_dir="/data/cinesat/in/eumetcast1/") print scn print scn.available_composites() composite = 'B09' areadef = get_area_def("australia") scn.load(['true_color']) scn['true_color'].mask[scn['true_color'] < 0] = True scn['true_color'][scn['true_color'] < 0] = 0 scn['true_color'][scn['true_color'] > 110] = 110 import numpy as np info = scn['true_color'].info scn['true_color'] = np.ma.log10(scn['true_color'] / 100.0) scn['true_color'] = (scn['true_color'] - np.log10(0.0223)) / (1.0 - np.log10(0.0223)) * 100 scn['true_color'].info = info scn.show('true_color')
def add_overlay(orig, area, coast_dir, color=(0, 0, 0), width=0.5, resolution=None, level_coast=1, level_borders=1, fill_value=None): """Add coastline and political borders to image. Uses ``color`` for feature colors where ``color`` is a 3-element tuple of integers between 0 and 255 representing (R, G, B). .. warning:: This function currently loses the data mask (alpha band). ``resolution`` is chosen automatically if None (default), otherwise it should be one of: +-----+-------------------------+---------+ | 'f' | Full resolution | 0.04 km | | 'h' | High resolution | 0.2 km | | 'i' | Intermediate resolution | 1.0 km | | 'l' | Low resolution | 5.0 km | | 'c' | Crude resolution | 25 km | +-----+-------------------------+---------+ """ if area is None: raise ValueError("Area of image is None, can't add overlay.") from pycoast import ContourWriterAGG if isinstance(area, str): area = get_area_def(area) LOG.info("Add coastlines and political borders to image.") if resolution is None: x_resolution = ((area.area_extent[2] - area.area_extent[0]) / area.x_size) y_resolution = ((area.area_extent[3] - area.area_extent[1]) / area.y_size) res = min(x_resolution, y_resolution) if res > 25000: resolution = "c" elif res > 5000: resolution = "l" elif res > 1000: resolution = "i" elif res > 200: resolution = "h" else: resolution = "f" LOG.debug("Automagically choose resolution %s", resolution) if hasattr(orig, 'convert'): # image must be in RGB space to work with pycoast/pydecorate orig = orig.convert('RGBA' if orig.mode.endswith('A') else 'RGB') elif not orig.mode.startswith('RGB'): raise RuntimeError("'trollimage' 1.6+ required to support adding " "overlays/decorations to non-RGB data.") img = orig.pil_image(fill_value=fill_value) cw_ = ContourWriterAGG(coast_dir) cw_.add_coastlines(img, area, outline=color, resolution=resolution, width=width, level=level_coast) cw_.add_borders(img, area, outline=color, resolution=resolution, width=width, level=level_borders) arr = da.from_array(np.array(img) / 255.0, chunks=CHUNK_SIZE) new_data = xr.DataArray(arr, dims=['y', 'x', 'bands'], coords={'y': orig.data.coords['y'], 'x': orig.data.coords['x'], 'bands': list(img.mode)}, attrs=orig.data.attrs) return XRImage(new_data)
def _is_georef_offset_present(date): # Reference: Product User Manual, section 3. # https://doi.org/10.5676/EUM_SAF_CM/CLAAS/V002_01 return date < datetime.date(2017, 12, 6) def _adjust_area_to_match_shifted_data(area): # Reference: # https://github.com/pytroll/satpy/wiki/SEVIRI-georeferencing-offset-correction offset = area.pixel_size_x / 2 llx, lly, urx, ury = area.area_extent new_extent = [llx + offset, lly - offset, urx + offset, ury - offset] return area.copy(area_extent=new_extent) FULL_DISK = get_area_def("msg_seviri_fes_3km") FULL_DISK_WITH_OFFSET = _adjust_area_to_match_shifted_data(FULL_DISK) class CLAAS2(NetCDF4FileHandler): """Handle CMSAF CLAAS-2 files.""" grid_size = 3636 def __init__(self, *args, **kwargs): """Initialise class.""" super().__init__(*args, **kwargs, cache_handle=False, auto_maskandscale=True) @property def start_time(self):
def setup(decoder): """Setup the granule triggerer. """ granule_triggers = [] for section in CONFIG.sections(): regions = [ get_area_def(region) for region in CONFIG.get(section, "regions").split() ] timeliness = timedelta(minutes=CONFIG.getint(section, "timeliness")) try: duration = timedelta(seconds=CONFIG.getfloat(section, "duration")) except NoOptionError: duration = None collectors = [ region_collector.RegionCollector(region, timeliness, duration) for region in regions ] try: observer_class = CONFIG.get(section, "watcher") pattern = CONFIG.get(section, "pattern") parser = Parser(pattern) glob = parser.globify() except NoOptionError: observer_class = None try: publish_topic = CONFIG.get(section, "publish_topic") except NoOptionError: publish_topic = None try: nameserver = CONFIG.get(section, "nameserver") except NoOptionError: nameserver = "localhost" try: publish_message_after_each_reception = CONFIG.get( section, "publish_message_after_each_reception") LOGGER.debug( "Publish message after each reception config: {}".format( publish_message_after_each_reception)) except NoOptionError: publish_message_after_each_reception = False if observer_class in ["PollingObserver", "Observer"]: LOGGER.debug("Using %s for %s", observer_class, section) granule_trigger = \ trigger.WatchDogTrigger(collectors, terminator, decoder, [glob], observer_class, publish_topic=publish_topic) else: LOGGER.debug("Using posttroll for %s", section) granule_trigger = trigger.PostTrollTrigger( collectors, terminator, CONFIG.get(section, 'service').split(','), CONFIG.get(section, 'topics').split(','), publish_topic=publish_topic, nameserver=nameserver, publish_message_after_each_reception= publish_message_after_each_reception) granule_triggers.append(granule_trigger) return granule_triggers
def select_files(self, base_dir=None, filenames=None, sensor=None): res = super(FileYAMLReader, self).select_files(base_dir, filenames, sensor) # Organize filenames in to file types and create file handlers remaining_filenames = set(self.info['filenames']) for filetype, filetype_info in self.config['file_types'].items(): filetype_cls = filetype_info['file_reader'] patterns = filetype_info['file_patterns'] file_handlers = [] for pattern in patterns: used_filenames = set() levels = len(pattern.split('/')) # correct separator if needed pattern = os.path.join(*pattern.split('/')) for filename in remaining_filenames: filebase = os.path.join( *filename.split(os.path.sep)[-levels:]) if fnmatch(filebase, globify(pattern)): # we know how to use this file (even if we may not use # it later) used_filenames.add(filename) filename_info = parse(pattern, filebase) file_handler = filetype_cls(filename, filename_info, filetype_info) # Only add this file handler if it is within the time # we want if self._start_time and file_handler.start_time < self._start_time: continue if self._end_time and file_handler.end_time > self._end_time: continue if self._area: from trollsched.boundary import AreaDefBoundary, Boundary from satpy.resample import get_area_def try: gbb = Boundary( *file_handler.get_bounding_box()) except NotImplementedError: pass else: abb = AreaDefBoundary( get_area_def(self._area), frequency=1000) intersection = gbb.contour_poly.intersection( abb.contour_poly) if not intersection: continue file_handlers.append(file_handler) remaining_filenames -= used_filenames # Only create an entry in the file handlers dictionary if # we have those files if file_handlers: # Sort the file handlers by start time file_handlers.sort(key=lambda fh: fh.start_time) self.file_handlers[filetype] = file_handlers return res
def add_overlay(orig_img, area, coast_dir, color=None, width=None, resolution=None, level_coast=None, level_borders=None, fill_value=None, grid=None, overlays=None): """Add coastline, political borders and grid(graticules) to image. Uses ``color`` for feature colors where ``color`` is a 3-element tuple of integers between 0 and 255 representing (R, G, B). .. warning:: This function currently loses the data mask (alpha band). ``resolution`` is chosen automatically if None (default), otherwise it should be one of: +-----+-------------------------+---------+ | 'f' | Full resolution | 0.04 km | +-----+-------------------------+---------+ | 'h' | High resolution | 0.2 km | +-----+-------------------------+---------+ | 'i' | Intermediate resolution | 1.0 km | +-----+-------------------------+---------+ | 'l' | Low resolution | 5.0 km | +-----+-------------------------+---------+ | 'c' | Crude resolution | 25 km | +-----+-------------------------+---------+ ``grid`` is a dictionary with key values as documented in detail in pycoast eg. overlay={'grid': {'major_lonlat': (10, 10), 'write_text': False, 'outline': (224, 224, 224), 'width': 0.5}} Here major_lonlat is plotted every 10 deg for both longitude and latitude, no labels for the grid lines are plotted, the color used for the grid lines is light gray, and the width of the gratucules is 0.5 pixels. For grid if aggdraw is used, font option is mandatory, if not ``write_text`` is set to False:: font = aggdraw.Font('black', '/usr/share/fonts/truetype/msttcorefonts/Arial.ttf', opacity=127, size=16) """ if area is None: raise ValueError("Area of image is None, can't add overlay.") from pycoast import ContourWriterAGG if isinstance(area, str): area = get_area_def(area) LOG.info("Add coastlines and political borders to image.") old_args = [color, width, resolution, grid, level_coast, level_borders] if any(arg is not None for arg in old_args): warnings.warn( "'color', 'width', 'resolution', 'grid', 'level_coast', 'level_borders'" " arguments will be deprecated soon. Please use 'overlays' instead.", DeprecationWarning) if hasattr(orig_img, 'convert'): # image must be in RGB space to work with pycoast/pydecorate res_mode = ('RGBA' if orig_img.final_mode(fill_value).endswith('A') else 'RGB') orig_img = orig_img.convert(res_mode) elif not orig_img.mode.startswith('RGB'): raise RuntimeError("'trollimage' 1.6+ required to support adding " "overlays/decorations to non-RGB data.") if overlays is None: overlays = dict() # fill with sensible defaults general_params = {'outline': color or (0, 0, 0), 'width': width or 0.5} for key, val in general_params.items(): if val is not None: overlays.setdefault('coasts', {}).setdefault(key, val) overlays.setdefault('borders', {}).setdefault(key, val) if level_coast is None: level_coast = 1 overlays.setdefault('coasts', {}).setdefault('level', level_coast) if level_borders is None: level_borders = 1 overlays.setdefault('borders', {}).setdefault('level', level_borders) if grid is not None: if 'major_lonlat' in grid and grid['major_lonlat']: major_lonlat = grid.pop('major_lonlat') minor_lonlat = grid.pop('minor_lonlat', major_lonlat) grid.update({'Dlonlat': major_lonlat, 'dlonlat': minor_lonlat}) for key, val in grid.items(): overlays.setdefault('grid', {}).setdefault(key, val) cw_ = ContourWriterAGG(coast_dir) new_image = orig_img.apply_pil(_burn_overlay, res_mode, None, {'fill_value': fill_value}, (area, cw_, overlays), None) return new_image
def main(argv=sys.argv[1:]): global LOG from satpy import Scene from satpy.resample import get_area_def from satpy.writers import compute_writer_results from dask.diagnostics import ProgressBar from polar2grid.core.script_utils import ( setup_logging, rename_log_file, create_exc_handler) import argparse prog = os.getenv('PROG_NAME', sys.argv[0]) # "usage: " will be printed at the top of this: usage = """ %(prog)s -h see available products: %(prog)s -r <reader> -w <writer> --list-products -f file1 [file2 ...] basic processing: %(prog)s -r <reader> -w <writer> [options] -f file1 [file2 ...] basic processing with limited products: %(prog)s -r <reader> -w <writer> [options] -p prod1 prod2 -f file1 [file2 ...] """ parser = argparse.ArgumentParser(prog=prog, usage=usage, description="Load, composite, resample, and save datasets.") parser.add_argument('-v', '--verbose', dest='verbosity', action="count", default=0, help='each occurrence increases verbosity 1 level through ERROR-WARNING-INFO-DEBUG (default INFO)') parser.add_argument('-l', '--log', dest="log_fn", default=None, help="specify the log filename") parser.add_argument('--progress', action='store_true', help="show processing progress bar (not recommended for logged output)") parser.add_argument('--num-workers', type=int, default=4, help="specify number of worker threads to use (default: 4)") parser.add_argument('--match-resolution', dest='preserve_resolution', action='store_false', help="When using the 'native' resampler for composites, don't save data " "at its native resolution, use the resolution used to create the " "composite.") parser.add_argument('-w', '--writers', nargs='+', help='writers to save datasets with') parser.add_argument("--list-products", dest="list_products", action="store_true", help="List available reader products and exit") subgroups = add_scene_argument_groups(parser) subgroups += add_resample_argument_groups(parser) argv_without_help = [x for x in argv if x not in ["-h", "--help"]] args, remaining_args = parser.parse_known_args(argv_without_help) # get the logger if we know the readers and writers that will be used if args.reader is not None and args.writers is not None: glue_name = args.reader + "_" + "-".join(args.writers or []) LOG = logging.getLogger(glue_name) # add writer arguments if args.writers is not None: for writer in (args.writers or []): parser_func = WRITER_PARSER_FUNCTIONS.get(writer) if parser_func is None: continue subgroups += parser_func(parser) args = parser.parse_args(argv) if args.reader is None: parser.print_usage() parser.exit(1, "\nERROR: Reader must be provided (-r flag).\n" "Supported readers:\n\t{}\n".format('\n\t'.join(['abi_l1b', 'ahi_hsd', 'hrit_ahi']))) if args.writers is None: parser.print_usage() parser.exit(1, "\nERROR: Writer must be provided (-w flag) with one or more writer.\n" "Supported writers:\n\t{}\n".format('\n\t'.join(['geotiff']))) def _args_to_dict(group_actions): return {ga.dest: getattr(args, ga.dest) for ga in group_actions if hasattr(args, ga.dest)} scene_args = _args_to_dict(subgroups[0]._group_actions) load_args = _args_to_dict(subgroups[1]._group_actions) resample_args = _args_to_dict(subgroups[2]._group_actions) writer_args = {} for idx, writer in enumerate(args.writers): sgrp1, sgrp2 = subgroups[3 + idx * 2: 5 + idx * 2] wargs = _args_to_dict(sgrp1._group_actions) if sgrp2 is not None: wargs.update(_args_to_dict(sgrp2._group_actions)) writer_args[writer] = wargs # get default output filename if 'filename' in wargs and wargs['filename'] is None: wargs['filename'] = get_default_output_filename(args.reader, writer) if not args.filenames: parser.print_usage() parser.exit(1, "\nERROR: No data files provided (-f flag)\n") # Prepare logging rename_log = False if args.log_fn is None: rename_log = True args.log_fn = glue_name + "_fail.log" levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG] setup_logging(console_level=levels[min(3, args.verbosity)], log_filename=args.log_fn) logging.getLogger('rasterio').setLevel(levels[min(2, args.verbosity)]) sys.excepthook = create_exc_handler(LOG.name) if levels[min(3, args.verbosity)] > logging.DEBUG: import warnings warnings.filterwarnings("ignore") LOG.debug("Starting script with arguments: %s", " ".join(sys.argv)) # Set up dask and the number of workers if args.num_workers: from multiprocessing.pool import ThreadPool dask.config.set(pool=ThreadPool(args.num_workers)) # Parse provided files and search for files if provided directories scene_args['filenames'] = get_input_files(scene_args['filenames']) # Create a Scene, analyze the provided files LOG.info("Sorting and reading input files...") try: scn = Scene(**scene_args) except ValueError as e: LOG.error("{} | Enable debug message (-vvv) or see log file for details.".format(str(e))) LOG.debug("Further error information: ", exc_info=True) return -1 except OSError: LOG.error("Could not open files. Enable debug message (-vvv) or see log file for details.") LOG.debug("Further error information: ", exc_info=True) return -1 if args.list_products: print("\n".join(sorted(scn.available_dataset_names(composites=True)))) return 0 # Rename the log file if rename_log: rename_log_file(glue_name + scn.attrs['start_time'].strftime("_%Y%m%d_%H%M%S.log")) # Load the actual data arrays and metadata (lazy loaded as dask arrays) if load_args['products'] is None: try: reader_mod = importlib.import_module('polar2grid.readers.' + scene_args['reader']) load_args['products'] = reader_mod.DEFAULT_PRODUCTS LOG.info("Using default product list: {}".format(load_args['products'])) except (ImportError, AttributeError): LOG.error("No default products list set, please specify with `--products`.") return -1 LOG.info("Loading product metadata from files...") scn.load(load_args['products']) resample_kwargs = resample_args.copy() areas_to_resample = resample_kwargs.pop('grids') grid_configs = resample_kwargs.pop('grid_configs') resampler = resample_kwargs.pop('resampler') if areas_to_resample is None and resampler in [None, 'native']: # no areas specified areas_to_resample = ['MAX'] elif areas_to_resample is None: raise ValueError("Resampling method specified (--method) without any destination grid/area (-g flag).") elif not areas_to_resample: # they don't want any resampling (they used '-g' with no args) areas_to_resample = [None] has_custom_grid = any(g not in ['MIN', 'MAX', None] for g in areas_to_resample) if has_custom_grid and resampler == 'native': LOG.error("Resampling method 'native' can only be used with 'MIN' or 'MAX' grids " "(use 'nearest' method instead).") return -1 p2g_grid_configs = [x for x in grid_configs if x.endswith('.conf')] pyresample_area_configs = [x for x in grid_configs if not x.endswith('.conf')] if not grid_configs or p2g_grid_configs: # if we were given p2g grid configs or we weren't given any to choose from from polar2grid.grids import GridManager grid_manager = GridManager(*p2g_grid_configs) else: grid_manager = {} if pyresample_area_configs: from pyresample.utils import parse_area_file custom_areas = parse_area_file(pyresample_area_configs) custom_areas = {x.area_id: x for x in custom_areas} else: custom_areas = {} ll_bbox = resample_kwargs.pop('ll_bbox') if ll_bbox: scn = scn.crop(ll_bbox=ll_bbox) wishlist = scn.wishlist.copy() preserve_resolution = get_preserve_resolution(args, resampler, areas_to_resample) if preserve_resolution: preserved_products = set(wishlist) & set(scn.datasets.keys()) resampled_products = set(wishlist) - preserved_products # original native scene to_save = write_scene(scn, args.writers, writer_args, preserved_products) else: preserved_products = set() resampled_products = set(wishlist) to_save = [] LOG.debug("Products to preserve resolution for: {}".format(preserved_products)) LOG.debug("Products to use new resolution for: {}".format(resampled_products)) for area_name in areas_to_resample: if area_name is None: # no resampling area_def = None elif area_name == 'MAX': area_def = scn.max_area() elif area_name == 'MIN': area_def = scn.min_area() elif area_name in custom_areas: area_def = custom_areas[area_name] elif area_name in grid_manager: from pyresample.geometry import DynamicAreaDefinition p2g_def = grid_manager[area_name] area_def = p2g_def.to_satpy_area() if isinstance(area_def, DynamicAreaDefinition) and p2g_def['cell_width'] is not None: area_def = area_def.freeze(scn.max_area(), resolution=(abs(p2g_def['cell_width']), abs(p2g_def['cell_height']))) else: area_def = get_area_def(area_name) if resampler is None and area_def is not None: rs = 'native' if area_name in ['MIN', 'MAX'] else 'nearest' LOG.debug("Setting default resampling to '{}' for grid '{}'".format(rs, area_name)) else: rs = resampler if area_def is not None: LOG.info("Resampling data to '%s'", area_name) new_scn = scn.resample(area_def, resampler=rs, **resample_kwargs) elif not preserve_resolution: # the user didn't want to resample to any areas # the user also requested that we don't preserve resolution # which means we have to save this Scene's datasets # because they won't be saved new_scn = scn to_save = write_scene(new_scn, args.writers, writer_args, resampled_products, to_save=to_save) if args.progress: pbar = ProgressBar() pbar.register() LOG.info("Computing products and saving data to writers...") compute_writer_results(to_save) LOG.info("SUCCESS") return 0
def select_files(self, base_dir=None, filenames=None, sensor=None): res = super(FileYAMLReader, self).select_files(base_dir, filenames, sensor) # Organize filenames in to file types and create file handlers remaining_filenames = set(self.info['filenames']) for filetype, filetype_info in self.config['file_types'].items(): filetype_cls = filetype_info['file_reader'] patterns = filetype_info['file_patterns'] file_handlers = [] for pattern in patterns: used_filenames = set() levels = len(pattern.split('/')) # correct separator if needed pattern = os.path.join(*pattern.split('/')) for filename in remaining_filenames: filebase = os.path.join( *filename.split(os.path.sep)[-levels:]) if fnmatch(filebase, globify(pattern)): # we know how to use this file (even if we may not use # it later) used_filenames.add(filename) filename_info = parse(pattern, filebase) file_handler = filetype_cls(filename, filename_info, filetype_info) # Only add this file handler if it is within the time # we want if self._start_time and file_handler.start_time < self._start_time: continue if self._end_time and file_handler.end_time > self._end_time: continue if self._area: from trollsched.boundary import AreaDefBoundary, Boundary from satpy.resample import get_area_def try: gbb = Boundary( *file_handler.get_bounding_box()) except NotImplementedError: pass else: abb = AreaDefBoundary(get_area_def(self._area), frequency=1000) intersection = gbb.contour_poly.intersection( abb.contour_poly) if not intersection: continue file_handlers.append(file_handler) remaining_filenames -= used_filenames # Only create an entry in the file handlers dictionary if # we have those files if file_handlers: # Sort the file handlers by start time file_handlers.sort(key=lambda fh: fh.start_time) self.file_handlers[filetype] = file_handlers return res
def add_overlay(orig, area, coast_dir, color=(0, 0, 0), width=0.5, resolution=None, level_coast=1, level_borders=1, fill_value=None, grid=None): """Add coastline, political borders and grid(graticules) to image. Uses ``color`` for feature colors where ``color`` is a 3-element tuple of integers between 0 and 255 representing (R, G, B). .. warning:: This function currently loses the data mask (alpha band). ``resolution`` is chosen automatically if None (default), otherwise it should be one of: +-----+-------------------------+---------+ | 'f' | Full resolution | 0.04 km | +-----+-------------------------+---------+ | 'h' | High resolution | 0.2 km | +-----+-------------------------+---------+ | 'i' | Intermediate resolution | 1.0 km | +-----+-------------------------+---------+ | 'l' | Low resolution | 5.0 km | +-----+-------------------------+---------+ | 'c' | Crude resolution | 25 km | +-----+-------------------------+---------+ ``grid`` is a dictionary with key values as documented in detail in pycoast eg. overlay={'grid': {'major_lonlat': (10, 10), 'write_text': False, 'outline': (224, 224, 224), 'width': 0.5}} Here major_lonlat is plotted every 10 deg for both longitude and latitude, no labels for the grid lines are plotted, the color used for the grid lines is light gray, and the width of the gratucules is 0.5 pixels. For grid if aggdraw is used, font option is mandatory, if not ``write_text`` is set to False:: font = aggdraw.Font('black', '/usr/share/fonts/truetype/msttcorefonts/Arial.ttf', opacity=127, size=16) """ if area is None: raise ValueError("Area of image is None, can't add overlay.") from pycoast import ContourWriterAGG if isinstance(area, str): area = get_area_def(area) LOG.info("Add coastlines and political borders to image.") if resolution is None: x_resolution = ((area.area_extent[2] - area.area_extent[0]) / area.x_size) y_resolution = ((area.area_extent[3] - area.area_extent[1]) / area.y_size) res = min(x_resolution, y_resolution) if res > 25000: resolution = "c" elif res > 5000: resolution = "l" elif res > 1000: resolution = "i" elif res > 200: resolution = "h" else: resolution = "f" LOG.debug("Automagically choose resolution %s", resolution) if hasattr(orig, 'convert'): # image must be in RGB space to work with pycoast/pydecorate orig = orig.convert('RGBA' if orig.mode.endswith('A') else 'RGB') elif not orig.mode.startswith('RGB'): raise RuntimeError("'trollimage' 1.6+ required to support adding " "overlays/decorations to non-RGB data.") img = orig.pil_image(fill_value=fill_value) cw_ = ContourWriterAGG(coast_dir) cw_.add_coastlines(img, area, outline=color, resolution=resolution, width=width, level=level_coast) cw_.add_borders(img, area, outline=color, resolution=resolution, width=width, level=level_borders) # Only add grid if major_lonlat is given. if grid and 'major_lonlat' in grid and grid['major_lonlat']: major_lonlat = grid.pop('major_lonlat') minor_lonlat = grid.pop('minor_lonlat', major_lonlat) cw_.add_grid(img, area, major_lonlat, minor_lonlat, **grid) arr = da.from_array(np.array(img) / 255.0, chunks=CHUNK_SIZE) new_data = xr.DataArray(arr, dims=['y', 'x', 'bands'], coords={ 'y': orig.data.coords['y'], 'x': orig.data.coords['x'], 'bands': list(img.mode) }, attrs=orig.data.attrs) return XRImage(new_data)