def invoke(self, context):
        """Invoke"""
        # Set locking status, default to False
        self.use_lock = context.get("use_lock", False)
        self.logger.debug("Locking is used in resampler: %s",
                          str(self.use_lock))
        if self.use_lock:
            self.logger.debug(
                "Compositor acquires lock of previous "
                "worker: %s", str(context["prev_lock"]))
            utils.acquire_lock(context["prev_lock"])

        # Check for terminator
        if context["content"] is None:
            context["output_queue"].put(None)
        else:
            # Process the scene
            self._process(context)

        # After all the items have been processed, release the lock for
        # the previous step
        utils.release_locks([context["prev_lock"]],
                            log=self.logger.debug,
                            log_msg="Resampler releses lock of previous " +
                            "worker: %s" % str(context["prev_lock"]))
Esempio n. 2
0
    def invoke(self, context):
        """Invoke"""
        self.use_lock = context.get("use_lock", False)
        self.logger.debug("Locking is used in coverage checker: %s",
                          str(self.use_lock))
        if self.use_lock:
            self.logger.debug(
                "Scene loader acquires lock of previous "
                "worker: %s", str(context["prev_lock"]))
            utils.acquire_lock(context["prev_lock"])

        scene = context["content"]
        overpass = Pass(scene.info["platform_name"],
                        scene.info['start_time'],
                        scene.info['end_time'],
                        instrument=scene.info["sensor"][0])
        areas = []
        for area_name in scene.info["areas"]:
            self.logger.info("Checking coverage of %s", area_name)

            try:
                min_coverage = context["min_coverage"][area_name]
            except KeyError:
                self.logger.warning("No minimum coverage given, "
                                    "assuming 0 % coverage needed")
                areas.append(area_name)
                continue

            if utils.covers(overpass, area_name, min_coverage, self.logger):
                areas.append(area_name)
            else:
                self.logger.info("Area coverage too low, skipping %s",
                                 area_name)
                continue

        if len(areas) > 0:
            scene.info["areas"] = areas
            context["output_queue"].put(scene)
        else:
            self.logger.info("No areas with enough coverage")

        if utils.release_locks([context["lock"]]):
            self.logger.debug("Scene loader releases own lock %s",
                              str(context["lock"]))
            time.sleep(1)

        # Wait until the lock has been released downstream
        if self.use_lock:
            utils.acquire_lock(context["lock"])
            utils.release_locks([context["lock"]])

        # After all the items have been processed, release the lock for
        # the previous step
        utils.release_locks([context["prev_lock"]],
                            log=self.logger.debug,
                            log_msg="Scene loader releases lock of " +
                            "previous worker")
Esempio n. 3
0
    def invoke(self, context):
        """Invoke"""
        # Set locking status, default to False
        self.use_lock = context.get("use_lock", False)
        self.logger.debug("Locking is used in pansharpener: %s",
                          str(self.use_lock))
        if self.use_lock:
            self.logger.debug(
                "Pansharpener acquires lock of previous "
                "worker: %s", str(context["prev_lock"]))
            utils.acquire_lock(context["prev_lock"])

        glbl = context["content"]
        # Read list of channels to be sharpened
        pan_chans = context["pan_sharpen_chans"]
        self.logger.info("Applying pansharpening to channels: %s",
                         str(pan_chans))
        # Check if the original data should be overwritten (default)
        # or create a new channel named "pan_"+chan.name
        try:
            overwrite = context["overwrite"]
        except KeyError:
            overwrite = True

        if overwrite:
            self.logger.info("Original data will be overwritten.")
        else:
            self.logger.info("Pansharpened channels will be named with "
                             "'pan_' prefix.")

        # Apply pansharpening
        pansharpen(glbl, pan_chans, overwrite)

        # Put enhanced data to output queue
        context["output_queue"].put(glbl)

        if utils.release_locks([context["lock"]]):
            self.logger.debug("Pansharpener releases own lock %s",
                              str(context["lock"]))
            time.sleep(1)

        # Wait until the lock has been released downstream
        if self.use_lock:
            utils.acquire_lock(context["lock"])
            utils.release_locks([context["lock"]])

        # After all the items have been processed, release the lock for
        # the previous step
        utils.release_locks([context["prev_lock"]],
                            log=self.logger.debug,
                            log_msg="Pansharpener releases lock of " +
                            "previous worker")
Esempio n. 4
0
    def run(self):
        """Run the thread."""
        self._loop = True
        # Get save settings
        kwargs = self._save_settings.copy()

        # Initialize publisher context
        with Publish("l2producer",
                     port=self._port,
                     nameservers=self._nameservers) as self.pub:

            while self._loop:
                if self.queue is not None:
                    try:
                        data = self.queue.get(True, 1)
                        if self.prev_lock is not None:
                            self.logger.debug(
                                "Writer acquires lock of "
                                "previous worker: %s", str(self.prev_lock))
                            utils.acquire_lock(self.prev_lock)
                        self.queue.task_done()
                    except queue_empty:
                        continue

                    if data is None:
                        self._compute()
                        self.data = []
                        self.messages = []
                    else:
                        self._process(data, **kwargs)

                    # After all the items have been processed, release the
                    # lock for the previous worker

                    if self.prev_lock is not None:
                        utils.release_locks(
                            [self.prev_lock], self.logger.debug,
                            "Writer releses lock of "
                            "previous worker: %s" % str(self.prev_lock))
                else:
                    time.sleep(1)
Esempio n. 5
0
    def invoke(self, context):
        """Invoke."""
        # Set locking status, default to False
        self.use_lock = context.get("use_lock", False)
        self.logger.debug("Locking is used in resampler: %s",
                          str(self.use_lock))
        if self.use_lock:
            self.logger.debug("Compositor acquires lock of previous "
                              "worker: %s", str(context["prev_lock"]))
            utils.acquire_lock(context["prev_lock"])

        instruments = context.get("instruments", None)
        if instruments is None:
            utils.release_locks([context["lock"], context["prev_lock"]],
                                log=self.logger.error,
                                log_msg="No instruments configured!")
            return

        with open(context["product_list"], "r") as fid:
            product_config = yaml.load(fid)
        msg = deepcopy(context['content'])
        for key, val in context.items():
            if key.startswith('ignore_') and val is True:
                msg.data.pop(key[7:], None)

        global_data = self.create_scene_from_message(msg, instruments)
        if global_data is None:
            utils.release_locks([context["lock"], context["prev_lock"]],
                                log=self.logger.info,
                                log_msg="Unable to create Scene, " +
                                "skipping data")
            return

        monitor_topic = context.get("monitor_topic", None)
        if monitor_topic is not None:
            nameservers = context.get("nameservers", None)
            port = context.get("port", 0)
            service = context.get("service", None)
            monitor_metadata = utils.get_monitor_metadata(msg.data,
                                                          status="start",
                                                          service=service)
            utils.send_message(monitor_topic,
                               "monitor",
                               monitor_metadata,
                               nameservers=nameservers, port=port)

        # use_extern_calib = product_config["common"].get("use_extern_calib",
        #                                                 "False")

        for group in product_config["groups"]:
            # Set lock if locking is used
            if self.use_lock:
                self.logger.debug("Compositor acquires own lock %s",
                                  str(context["lock"]))
                utils.acquire_lock(context["lock"])

            if "collection_area_id" in msg.data:
                if group != msg.data["collection_area_id"]:
                    utils.release_locks([context["lock"]],
                                        log=self.logger.debug,
                                        log_msg="Collection not for this " +
                                        "area, skipping")
                    continue

            all_composites = \
                utils.get_satpy_group_composite_names(product_config,
                                                      group)

            # Check solar elevations and remove those composites that
            # are outside of their specified ranges
            composites = set()

            try:
                start_time = global_data.attrs['start_time']
            except AttributeError:
                start_time = global_data.info['start_time']

            for composite in all_composites:
                if utils.bad_sunzen_range_satpy(
                        product_config,
                        group, composite,
                        start_time):
                    self.logger.info("Removing composite '%s'; out of "
                                     "valid solar angle range", composite)
                else:
                    composites.add(composite)

            prev_reqs = {itm.name for itm in global_data.datasets}
            reqs_to_unload = prev_reqs - composites
            if len(reqs_to_unload) > 0:
                self.logger.debug("Unloading unnecessary channels: %s",
                                  str(sorted(reqs_to_unload)))
                global_data.unload(list(reqs_to_unload))
            self.logger.info("Loading required data for this group: %s",
                             ', '.join(sorted(composites)))

            # use_extern_calib=use_extern_calib
            global_data.load(composites)
            try:
                global_data.attrs['products'] = composites
            except AttributeError:
                global_data.info['products'] = composites
            context["output_queue"].put(global_data)

            if utils.release_locks([context["lock"]]):
                self.logger.debug("Compositor releases own lock %s",
                                  str(context["lock"]))
                # Wait 1 second to ensure next worker has time to acquire the
                # lock
                time.sleep(1)

        del global_data
        global_data = None

        # Wait until the lock has been released downstream
        if self.use_lock:
            utils.acquire_lock(context["lock"])
            utils.release_locks([context["lock"]])

        if monitor_topic is not None:
            monitor_metadata["status"] = "completed"
            utils.send_message(monitor_topic,
                               "monitor",
                               monitor_metadata,
                               nameservers=nameservers,
                               port=port)

        # After all the items have been processed, release the lock for
        # the previous step
        utils.release_locks([context["prev_lock"]], log=self.logger.debug,
                            log_msg="Compositor releases lock of previous "
                            "worker")
Esempio n. 6
0
    def run(self):
        """Run the thread."""
        self._loop = True
        # Parse settings for saving
        compression = self._save_settings.get('compression', 6)
        tags = self._save_settings.get('tags', None)
        fformat = self._save_settings.get('fformat', None)
        gdal_options = self._save_settings.get('gdal_options', None)
        blocksize = self._save_settings.get('blocksize', None)

        kwargs = {
            'compression': compression,
            'tags': tags,
            'fformat': fformat,
            'gdal_options': gdal_options,
            'blocksize': blocksize
        }

        # Initialize publisher context
        with Publish("l2producer",
                     port=self._port,
                     nameservers=self._nameservers) as pub:

            while self._loop:
                if self.queue is not None:
                    try:
                        lcl = self.queue.get(True, 1)
                        if self.prev_lock is not None:
                            self.logger.debug(
                                "Writer acquires lock of "
                                "previous worker: %s", str(self.prev_lock))
                            utils.acquire_lock(self.prev_lock)
                        self.queue.task_done()
                    except Queue.Empty:
                        # After all the items have been processed, release the
                        # lock for the previous worker
                        continue

                    try:
                        info = lcl.attrs.copy()
                        product_config = lcl.attrs["product_config"]
                        products = lcl.attrs["products"]
                    except AttributeError:
                        info = lcl.info.copy()
                        product_config = lcl.info["product_config"]
                        products = lcl.info["products"]

                    # Available composite names
                    composite_names = [dset.name for dset in lcl.keys()]

                    for i, prod in enumerate(products):
                        # Skip the removed composites
                        if prod not in composite_names:
                            continue
                        fnames, _ = utils.create_fnames(
                            info, product_config, prod)
                        # Some of the files might have specific
                        # writers, use them if configured
                        writers = utils.get_writer_names(
                            product_config, prod, info["area_id"])

                        for j, fname in enumerate(fnames):
                            if writers[j]:
                                self.logger.info("Saving %s with writer %s",
                                                 fname, writers[j])
                            else:
                                self.logger.info(
                                    "Saving %s with default writer", fname)

                            lcl.save_dataset(prod,
                                             filename=fname,
                                             writer=writers[j],
                                             **kwargs)

                            self.logger.info("Saved %s", fname)

                            try:
                                area = lcl[prod].attrs.get("area")
                            except AttributeError:
                                area = lcl[prod].info.get("area")

                            try:
                                area_data = {
                                    "name": area.name,
                                    "area_id": area.area_id,
                                    "proj_id": area.proj_id,
                                    "proj4": area.proj4_string,
                                    "shape": (area.x_size, area.y_size)
                                }
                            except AttributeError:
                                area_data = None

                            to_send = dict(info) if '*' \
                                in self._publish_vars else {}

                            for dest_key in self._publish_vars:
                                if dest_key != '*':
                                    to_send[dest_key] = info.get(
                                        self._publish_vars[dest_key]
                                        if isinstance(self._publish_vars, dict
                                                      ) else dest_key)

                            to_send_fix = {
                                "nominal_time": info["start_time"],
                                "uid": os.path.basename(fname),
                                "uri": os.path.abspath(fname),
                                "area": area_data,
                                "productname": info["productname"]
                            }
                            to_send.update(to_send_fix)

                            if self._topic is not None:
                                topic = self._topic
                                if area_data is not None:
                                    topic = compose(topic, area_data)
                                else:
                                    topic = compose(topic,
                                                    {'area_id': 'satproj'})

                                msg = Message(topic, "file", to_send)
                                pub.send(str(msg))
                                self.logger.debug("Sent message: %s", str(msg))

                    del lcl
                    lcl = None
                    # After all the items have been processed, release the
                    # lock for the previous worker
                    if self.prev_lock is not None:
                        utils.release_locks(
                            [self.prev_lock], self.logger.debug,
                            "Writer releses lock of "
                            "previous worker: %s" % str(self.prev_lock))
                else:
                    time.sleep(1)
Esempio n. 7
0
    def invoke(self, context):
        """Invoke"""
        # Set locking status, default to False
        self.use_lock = context.get("use_lock", False)
        self.logger.debug("Locking is used in compositor: %s",
                          str(self.use_lock))
        if self.use_lock:
            self.logger.debug(
                "Scene loader acquires lock of previous "
                "worker: %s", str(context["prev_lock"]))
            utils.acquire_lock(context["prev_lock"])

        instruments = context.get("instruments", None)
        if instruments is None:
            utils.release_locks([context["lock"], context["prev_lock"]],
                                log=self.logger.error,
                                log_msg="No instruments configured!")
            return

        with open(context["product_list"], "r") as fid:
            product_config = yaml.load(fid)

        # Read message
        msg = context['content']

        global_data = self.create_scene_from_message(msg, instruments)

        if global_data is None:
            utils.release_locks([context["lock"], context["prev_lock"]],
                                log=self.logger.info,
                                log_msg="Unable to create Scene, " +
                                "skipping data")
            return

        monitor_topic = context.get("monitor_topic", None)
        if monitor_topic is not None:
            nameservers = context.get("nameservers", None)
            port = context.get("port", 0)
            service = context.get("service", None)
            monitor_metadata = utils.get_monitor_metadata(msg,
                                                          status="start",
                                                          service=service)
            utils.send_message(monitor_topic,
                               "monitor",
                               monitor_metadata,
                               nameservers=nameservers,
                               port=port)

        fnames = get_data_fnames(msg)
        use_extern_calib = product_config["common"].get(
            "use_extern_calib", "False")
        keywords = {'use_extern_calib': use_extern_calib, 'load_again': True}
        if fnames is not None:
            keywords['filename'] = fnames
            self.logger.debug("Loading from files: %s", str(fnames))

        for group in product_config["groups"]:
            # Set lock if locking is used
            if self.use_lock:
                self.logger.debug("Scene loader acquires own lock %s",
                                  str(context["lock"]))
                utils.acquire_lock(context["lock"])

            if "collection_area_id" in msg.data:
                if group != msg.data["collection_area_id"]:
                    utils.release_locks([context["lock"]],
                                        log=self.logger.debug,
                                        log_msg="Collection not for this " +
                                        "area, skipping")
                    continue

            grp_area_def_names = product_config["groups"][group]

            self.logger.debug("Loading data for group %s with areas %s", group,
                              str(grp_area_def_names))

            reqs = utils.get_prerequisites_yaml(global_data,
                                                product_config["product_list"],
                                                grp_area_def_names)

            self.logger.info("Loading required channels for this group: %s",
                             str(sorted(reqs)))

            try:
                if "satproj" in grp_area_def_names:
                    try:
                        del keywords["area_def_names"]
                    except KeyError:
                        pass
                    global_data.load(reqs, **keywords)
                else:
                    keywords["area_def_names"] = grp_area_def_names
                    global_data.load(reqs, **keywords)
            except (TypeError, StructError, IOError) as err:
                utils.release_locks([context["lock"]],
                                    log=self.logger.error,
                                    log_msg="Data could not be read: %s" %
                                    str(err))
                continue

            global_data.info["areas"] = grp_area_def_names
            context["output_queue"].put(global_data)

            if utils.release_locks([context["lock"]]):
                self.logger.debug("Scene loader releases own lock %s",
                                  str(context["lock"]))
                # Wait 1 second to ensure next worker has time to acquire the
                # lock
                time.sleep(1)

        del global_data
        global_data = None

        # Wait until the lock has been released downstream
        if self.use_lock:
            utils.acquire_lock(context["lock"])
            utils.release_locks([context["lock"]])

        if monitor_topic is not None:
            monitor_metadata["status"] = "completed"
            utils.send_message(monitor_topic,
                               "monitor",
                               monitor_metadata,
                               nameservers=nameservers,
                               port=port)

        # After all the items have been processed, release the lock for
        # the previous step
        utils.release_locks([context["prev_lock"]],
                            log=self.logger.debug,
                            log_msg="Scene loader releases lock of " +
                            "previous worker")
    def invoke(self, context):
        """Invoke."""
        # Set locking status, default to False
        self.use_lock = context.get("use_lock", False)
        self.logger.debug("Locking is used in compositor: %s",
                          str(self.use_lock))
        if self.use_lock:
            self.logger.debug(
                "Compositor acquires lock of previous "
                "worker: %s", str(context["prev_lock"]))
            utils.acquire_lock(context["prev_lock"])

        instruments = context.get("instruments", None)
        if instruments is None:
            utils.release_locks([context["lock"], context["prev_lock"]],
                                log=self.logger.error,
                                log_msg="No instruments configured!")
            return

        readers = context.get("readers", None)

        with open(context["product_list"], "r") as fid:
            product_config = ordered_load(fid)
        msg = deepcopy(context['content'])
        for key, val in context.items():
            if key.startswith('ignore_') and val is True:
                msg.data.pop(key[7:], None)

        # Rename the instrument in the message if an alias is given for it
        instrument_aliases = context.get("instrument_aliases", {})
        if instrument_aliases:
            orig_sensor = msg.data['sensor']
            if isinstance(orig_sensor, list):
                orig_sensor = orig_sensor[0]
            sensor = instrument_aliases.get(orig_sensor, orig_sensor)
            if sensor != orig_sensor:
                msg.data['sensor'] = sensor
                self.logger.info(
                    "Adjusted message instrument name from %s to %s",
                    orig_sensor, sensor)

        global_data = self.create_scene_from_message(msg,
                                                     instruments,
                                                     readers=readers)
        if global_data is None:
            utils.release_locks([context["lock"], context["prev_lock"]],
                                log=self.logger.info,
                                log_msg="Unable to create Scene, " +
                                "skipping data")
            return

        monitor_topic = context.get("monitor_topic", None)
        if monitor_topic is not None:
            nameservers = context.get("nameservers", None)
            port = context.get("port", 0)
            service = context.get("service", None)
            monitor_metadata = utils.get_monitor_metadata(msg.data,
                                                          status="start",
                                                          service=service)
            utils.send_message(monitor_topic,
                               "monitor",
                               monitor_metadata,
                               nameservers=nameservers,
                               port=port)

        # TODO: add usage of external calibration coefficients
        # use_extern_calib = product_config["common"].get("use_extern_calib",
        #                                                 "False")

        process_by_area = product_config["common"].get("process_by_area", True)
        # Set lock if locking is used
        if self.use_lock:
            self.logger.debug("Compositor acquires own lock %s",
                              str(context["lock"]))
            utils.acquire_lock(context["lock"])

        for area_id in product_config["product_list"]:
            extra_metadata = {}

            # Check if the data was collected for specific area
            if "collection_area_id" in msg.data:
                if area_id != msg.data["collection_area_id"]:
                    utils.release_locks([context["lock"]],
                                        log=self.logger.debug,
                                        log_msg="Collection not for this " +
                                        "area, skipping")
                    continue

            # Load and unload composites for this area
            composites = self.load_composites(global_data, product_config,
                                              area_id)

            extra_metadata['products'] = composites
            extra_metadata['area_id'] = area_id
            context["output_queue"].put({
                'scene': global_data,
                'extra_metadata': extra_metadata
            })
            if process_by_area:
                context["output_queue"].put(None)

        # Add "terminator" to the queue to trigger computations for
        # this global scene, if not already done
        if not process_by_area:
            context["output_queue"].put(None)

        if utils.release_locks([context["lock"]]):
            self.logger.debug("Compositor releases own lock %s",
                              str(context["lock"]))
            # Wait 1 second to ensure next worker has time to acquire the
            # lock
            time.sleep(1)

        del global_data
        global_data = None

        # Wait until the lock has been released downstream
        if self.use_lock:
            utils.acquire_lock(context["lock"])
            utils.release_locks([context["lock"]])

        if monitor_topic is not None:
            monitor_metadata = utils.get_monitor_metadata(msg.data,
                                                          status="completed",
                                                          service=service)
            utils.send_message(monitor_topic,
                               "monitor",
                               monitor_metadata,
                               nameservers=nameservers,
                               port=port)

        # After all the items have been processed, release the lock for
        # the previous step
        utils.release_locks([context["prev_lock"]],
                            log=self.logger.debug,
                            log_msg="Compositor releases lock of previous "
                            "worker")
Esempio n. 9
0
    def invoke(self, context):
        """Invoke"""
        # Set locking status, default to False
        self.use_lock = context.get("use_lock", False)
        self.logger.debug("Locking is used in compositor: %s",
                          str(self.use_lock))
        if self.use_lock:
            self.logger.debug(
                "Compositor acquires lock of previous "
                "worker: %s", str(context["prev_lock"]))
            utils.acquire_lock(context["prev_lock"])

        data = context["content"]

        with open(context["product_list"], "r") as fid:
            product_config = yaml.load(fid)

        for prod in data.info["products"]:
            # Set lock if locking is used
            if self.use_lock:
                self.logger.debug("Compositor acquires own lock %s",
                                  str(context["lock"]))
                utils.acquire_lock(context["lock"])

            if utils.bad_sunzen_range(data.area, product_config,
                                      data.info["area_id"], prod,
                                      data.time_slot):
                utils.release_locks([context["lock"]],
                                    log=self.logger.info,
                                    log_msg="Sun zenith angle out of valid " +
                                    "range, skipping")
                continue

            self.logger.info("Creating composite %s", prod)
            try:
                func = getattr(data.image, prod)
                img = func()
                if img is None:
                    utils.release_locks([context["lock"]])
                    continue
                img.info.update(data.info)
            except (AttributeError, KeyError, NoSectionError):
                utils.release_locks([context["lock"]],
                                    log=self.logger.warning,
                                    log_msg="Invalid composite, skipping")
                continue

            # Get filename and product name from product config
            fnames, productname = utils.create_fnames(data.info,
                                                      product_config, prod)

            if fnames is None:
                self.logger.error("Could not generate valid filename(s), "
                                  "product not saved!")
            else:
                img.info["fnames"] = fnames
                img.info["productname"] = productname
                context["output_queue"].put(img)
            del img
            img = None

            if self.use_lock:
                utils.release_locks([context["lock"]],
                                    log=self.logger.debug,
                                    log_msg="Compositor releases own lock %s" %
                                    str(context["lock"]))
                # Wait 1 second to ensure next worker has time to acquire the
                # lock
                time.sleep(1)

        # Wait until the lock has been released downstream
        if self.use_lock:
            utils.acquire_lock(context["lock"])
            utils.release_locks([context["lock"]])

        # After all the items have been processed, release the lock for
        # the previous step
        utils.release_locks([context["prev_lock"]],
                            log=self.logger.debug,
                            log_msg="Compositor releses lock of "
                            "previous worker: %s" % str(context["prev_lock"]))
Esempio n. 10
0
    def invoke(self, context):
        """Invoke"""
        # Set locking status, default to False
        self.use_lock = context.get("use_lock", False)
        self.logger.debug("Locking is used in compositor: %s",
                          str(self.use_lock))
        if self.use_lock:
            self.logger.debug("Resampler acquires lock of previous worker: %s",
                              str(context["prev_lock"]))
            utils.acquire_lock(context["prev_lock"])

        glbl = context["content"]
        with open(context["product_list"], "r") as fid:
            product_config = yaml.load(fid)

        # Handle config options
        try:
            precompute = context["precompute"]
            self.logger.debug("Setting precompute to %s", str(precompute))
        except KeyError:
            precompute = False
        try:
            nprocs = context["nprocs"]
            self.logger.debug("Using %d CPUs for resampling.", nprocs)
        except KeyError:
            nprocs = 1
        try:
            proj_method = context["proj_method"]
            self.logger.debug("Using resampling method: '%s'.", proj_method)
        except KeyError:
            proj_method = "nearest"
        try:
            radius = context["radius"]
        except (AttributeError, KeyError):
            radius = None

        if radius is None:
            self.logger.debug("Using default search radius.")
        else:
            self.logger.debug("Using search radius %d meters.", int(radius))

        prod_list = product_config["product_list"]
        for area_id in prod_list:
            # Set lock if locking is used
            if self.use_lock:
                self.logger.debug("Resampler acquires own lock %s",
                                  str(context["lock"]))
                utils.acquire_lock(context["lock"])
            if area_id not in glbl.info["areas"]:
                utils.release_locks([context["lock"]])
                continue

            # Reproject only needed channels
            channels = utils.get_prerequisites_yaml(glbl, prod_list, [
                area_id,
            ])
            if area_id == "satproj":
                self.logger.info("Using satellite projection")
                lcl = glbl
            else:
                self.logger.info("Resampling to area %s", area_id)
                lcl = glbl.project(area_id,
                                   channels=channels,
                                   precompute=precompute,
                                   mode=proj_method,
                                   radius=radius,
                                   nprocs=nprocs)
            lcl.info["area_id"] = area_id
            lcl.info["products"] = prod_list[area_id]['products']
            context["output_queue"].put(lcl)
            del lcl
            lcl = None
            if utils.release_locks([context["lock"]]):
                self.logger.debug("Resampler releases own lock %s",
                                  str(context["lock"]))
                # Wait 1 second to ensure next worker has time to acquire the
                # lock
                time.sleep(1)

        # Wait until the lock has been released downstream
        if self.use_lock:
            utils.acquire_lock(context["lock"])
            utils.release_locks([context["lock"]])

        # After all the items have been processed, release the lock for
        # the previous step
        utils.release_locks([context["prev_lock"]],
                            log=self.logger.debug,
                            log_msg="Resampler releses lock of previous " +
                            "worker: %s" % str(context["prev_lock"]))
    def _process(self, context):
        """Process a context."""

        glbl = context["content"]["scene"]
        extra_metadata = context["content"]["extra_metadata"]

        with open(context["product_list"], "r") as fid:
            product_config = ordered_load(fid)

        # Handle config options
        kwargs = {}

        kwargs['mask_area'] = context.get('mask_area', True)
        self.logger.debug("Setting area masking to %s",
                          str(kwargs['mask_area']))

        kwargs['nprocs'] = context.get('nprocs', 1)
        self.logger.debug("Using %d CPUs for resampling.", kwargs['nprocs'])

        kwargs['resampler'] = context.get('resampler', "nearest")
        self.logger.debug("Using resampling method: '%s'.",
                          kwargs['resampler'])

        try:
            kwargs['cache_dir'] = context['cache_dir']
            self.logger.debug("Setting projection cache dir to %s",
                              kwargs['cache_dir'])
        except (AttributeError, KeyError):
            pass

        prod_list = product_config["product_list"]

        # Overpass for coverage calculations
        scn_metadata = glbl.attrs
        if product_config['common'].get('coverage_check', True) and Pass:
            overpass = Pass(scn_metadata['platform_name'],
                            scn_metadata['start_time'],
                            scn_metadata['end_time'],
                            instrument=scn_metadata['sensor'][0])
        else:
            overpass = None

        # Get the area ID from metadata dict
        area_id = extra_metadata['area_id']

        # Check for area coverage
        if overpass is not None:
            min_coverage = prod_list[area_id].get("min_coverage", 0.0)
            if not utils.covers(overpass, area_id, min_coverage, self.logger):
                return

        kwargs['radius_of_influence'] = None
        try:
            area_config = product_config["product_list"][area_id]
            kwargs['radius_of_influence'] = \
                area_config.get("srch_radius", context["radius"])
        except (AttributeError, KeyError):
            kwargs['radius_of_influence'] = 10000.

        if kwargs['radius_of_influence'] is None:
            self.logger.debug("Using default search radius.")
        else:
            self.logger.debug("Using search radius %d meters.",
                              int(kwargs['radius_of_influence']))
        # Set lock if locking is used
        if self.use_lock:
            self.logger.debug("Resampler acquires own lock %s",
                              str(context["lock"]))
            utils.acquire_lock(context["lock"])

        if area_id == "satproj":
            self.logger.info("Using satellite projection")
            lcl = glbl
        else:
            metadata = glbl.attrs
            self.logger.info("Resampling time slot %s to area %s",
                             metadata["start_time"], area_id)
            lcl = glbl.resample(area_id, **kwargs)

        # Add area ID to the scene attributes so everything needed
        # in filename composing is in the same dictionary
        lcl.attrs["area_id"] = area_id

        metadata = extra_metadata.copy()
        metadata["product_config"] = product_config
        metadata["products"] = prod_list[area_id]['products']

        self.logger.debug(
            "Inserting lcl (area: %s, start_time: %s) "
            "to writer's queue", area_id, str(scn_metadata["start_time"]))
        context["output_queue"].put({'scene': lcl, 'extra_metadata': metadata})

        if utils.release_locks([context["lock"]]):
            self.logger.debug("Resampler releases own lock %s",
                              str(context["lock"]))
            # Wait 1 second to ensure next worker has time to acquire the
            # lock
            time.sleep(1)

        # Wait until the lock has been released downstream
        if self.use_lock:
            utils.acquire_lock(context["lock"])
            utils.release_locks([context["lock"]])

        del lcl
        lcl = None
Esempio n. 12
0
 def test_acquire_lock(self, trollflow_acquire_lock):
     trollflow_acquire_lock.return_value = 'foo'
     res = utils.acquire_lock(None)
     self.assertEqual(res, 'foo')
     self.assertTrue(trollflow_acquire_lock.called)
Esempio n. 13
0
    def run(self):
        """Run the thread."""
        self._loop = True
        # Parse settings for saving
        compression = self._save_settings.get('compression', 6)
        tags = self._save_settings.get('tags', None)
        fformat = self._save_settings.get('fformat', None)
        gdal_options = self._save_settings.get('gdal_options', None)
        blocksize = self._save_settings.get('blocksize', None)

        # Initialize publisher context
        with Publish("l2producer",
                     port=self._port,
                     nameservers=self._nameservers) as pub:

            while self._loop:
                if self.queue is not None:
                    try:
                        obj = self.queue.get(True, 1)
                        if self.prev_lock is not None:
                            self.logger.debug(
                                "Writer acquires lock of "
                                "previous worker: %s", str(self.prev_lock))
                            utils.acquire_lock(self.prev_lock)
                        self.queue.task_done()
                    except Queue.Empty:
                        continue
                    for fname in obj.info["fnames"]:
                        self.logger.info("Saving %s", fname)
                        obj.save(fname,
                                 compression=compression,
                                 tags=tags,
                                 fformat=fformat,
                                 gdal_options=gdal_options,
                                 blocksize=blocksize)

                        area = getattr(obj, "area")
                        try:
                            area_data = {
                                "name": area.name,
                                "area_id": area.area_id,
                                "proj_id": area.proj_id,
                                "proj4": area.proj4_string,
                                "shape": (area.x_size, area.y_size)
                            }
                        except AttributeError:
                            area_data = None

                        to_send = {
                            "nominal_time": getattr(obj, "time_slot"),
                            "uid": os.path.basename(fname),
                            "uri": os.path.abspath(fname),
                            "area": area_data,
                            "productname": obj.info["productname"]
                        }
                        # if self._topic is not None:
                        if self._topic is not None:
                            topic = self._topic
                            if area_data is not None:
                                topic = compose(topic, area_data)
                            else:
                                topic = compose(topic, {'area_id': 'satproj'})
                            msg = Message(topic, "file", to_send)
                            pub.send(str(msg))
                            self.logger.debug("Sent message: %s", str(msg))
                        self.logger.info("Saved %s", fname)

                    del obj
                    obj = None
                    # After all the items have been processed, release the
                    # lock for the previous worker
                    if self.prev_lock is not None:
                        utils.release_locks(
                            [self.prev_lock], self.logger.debug,
                            "Writer releses lock of "
                            "previous worker: %s" % str(self.prev_lock))
                else:
                    time.sleep(1)
Esempio n. 14
0
    def invoke(self, context):
        """Invoke"""
        # Set locking status, default to False
        self.use_lock = context.get("use_lock", False)
        self.logger.debug("Locking is used in resampler: %s",
                          str(self.use_lock))
        if self.use_lock:
            self.logger.debug(
                "Compositor acquires lock of previous "
                "worker: %s", str(context["prev_lock"]))
            utils.acquire_lock(context["prev_lock"])

        glbl = context["content"]
        with open(context["product_list"], "r") as fid:
            product_config = yaml.load(fid)

        # Handle config options
        kwargs = {}

        kwargs['precompute'] = context.get('precompute', False)
        kwargs['mask_area'] = context.get('mask_area', True)
        self.logger.debug("Setting precompute to %s and masking to %s",
                          str(kwargs['precompute']), str(kwargs['mask_area']))

        kwargs['nprocs'] = context.get('nprocs', 1)
        self.logger.debug("Using %d CPUs for resampling.", kwargs['nprocs'])

        kwargs['resampler'] = context.get('proj_method', "nearest")
        self.logger.debug("Using resampling method: '%s'.",
                          kwargs['resampler'])

        try:
            kwargs['cache_dir'] = context['cache_dir']
            self.logger.debug("Setting projection cache dir to %s",
                              kwargs['cache_dir'])
        except (AttributeError, KeyError):
            pass

        prod_list = product_config["product_list"]

        # Overpass for coverage calculations
        try:
            metadata = glbl.attrs
        except AttributeError:
            metadata = glbl.info
        if product_config['common'].get('coverage_check', True):
            overpass = Pass(metadata['platform_name'],
                            metadata['start_time'],
                            metadata['end_time'],
                            instrument=metadata['sensor'][0])
        else:
            overpass = None

        for area_id in prod_list:
            # Check for area coverage
            if overpass is not None:
                min_coverage = prod_list[area_id].get("min_coverage", 0.0)
                if not utils.covers(overpass, area_id, min_coverage,
                                    self.logger):
                    continue

            kwargs['radius_of_influence'] = None
            try:
                area_config = product_config["product_list"][area_id]
                kwargs['radius_of_influence'] = \
                    area_config.get("srch_radius", context["radius"])
            except (AttributeError, KeyError):
                kwargs['radius_of_influence'] = 10000.

            if kwargs['radius_of_influence'] is None:
                self.logger.debug("Using default search radius.")
            else:
                self.logger.debug("Using search radius %d meters.",
                                  int(kwargs['radius_of_influence']))
            # Set lock if locking is used
            if self.use_lock:
                self.logger.debug("Resampler acquires own lock %s",
                                  str(context["lock"]))
                utils.acquire_lock(context["lock"])
            # if area_id not in glbl.info["areas"]:
            #     utils.release_locks([context["lock"]])
            #     continue

            if area_id == "satproj":
                self.logger.info("Using satellite projection")
                lcl = glbl
            else:
                try:
                    metadata = glbl.attrs
                except AttributeError:
                    metadata = glbl.info
                self.logger.info("Resampling time slot %s to area %s",
                                 metadata["start_time"], area_id)
                lcl = glbl.resample(area_id, **kwargs)
            try:
                metadata = lcl.attrs
            except AttributeError:
                metadata = lcl.info
            metadata["product_config"] = product_config
            metadata["area_id"] = area_id
            metadata["products"] = prod_list[area_id]['products']

            self.logger.debug(
                "Inserting lcl (area: %s, start_time: %s) "
                "to writer's queue", area_id, str(metadata["start_time"]))
            context["output_queue"].put(lcl)
            del lcl
            lcl = None

            if utils.release_locks([context["lock"]]):
                self.logger.debug("Resampler releases own lock %s",
                                  str(context["lock"]))
                # Wait 1 second to ensure next worker has time to acquire the
                # lock
                time.sleep(1)

        # Wait until the lock has been released downstream
        if self.use_lock:
            utils.acquire_lock(context["lock"])
            utils.release_locks([context["lock"]])

        # After all the items have been processed, release the lock for
        # the previous step
        utils.release_locks([context["prev_lock"]],
                            log=self.logger.debug,
                            log_msg="Resampler releses lock of previous " +
                            "worker: %s" % str(context["prev_lock"]))