Exemplo n.º 1
0
 def __init__(self, config):
     # Configuration dictionary for ForestFire class
     self.config = config
     # Unprojected data with all the required channels, angles and
     # coordinates
     self.data = None
     # Channel metadata
     self.metadata = None
     # Common mask for all the datasets in self.data
     # All invalid pixels are set as True.  After processing the locations
     # marked as False are the valid forest fires.
     self.mask = None
     # Built-in cloud mask
     self.cloud_mask = None
     # NWC SAF PPS cloud mask
     self.nwc_mask = None
     # Result of fire mapping
     self.fires = {}
     # Publisher, if configured
     if "publisher" in self.config and NoisyPublisher:
         self._pub = NoisyPublisher("satfire", **self.config["publisher"])
         self.pub = self._pub.start()
     else:
         self._pub = None
         self.pub = None
Exemplo n.º 2
0
    def __init__(self,
                 topic,
                 instrument,
                 config_item,
                 posttroll_port=0,
                 filepattern=None,
                 aliases=None,
                 tbus_orbit=False,
                 history=0,
                 granule_length=0,
                 custom_vars=None,
                 nameservers=[],
                 watchManager=None):
        super(EventHandler, self).__init__()

        self._pub = NoisyPublisher("trollstalker_" + config_item,
                                   posttroll_port,
                                   topic,
                                   nameservers=nameservers)
        self.pub = self._pub.start()
        self.topic = topic
        self.info = OrderedDict()
        if filepattern is None:
            filepattern = '{filename}'
        self.file_parser = Parser(filepattern)
        self.instrument = instrument
        self.aliases = aliases
        self.custom_vars = custom_vars
        self.tbus_orbit = tbus_orbit
        self.granule_length = granule_length
        self._deque = deque([], history)
        self._watchManager = watchManager
        self._watched_dirs = dict()
Exemplo n.º 3
0
    def __init__(self, config):
        self.config = config.copy()
        if isinstance(config["filepattern"], (str, bytes)):
            self.config["filepattern"] = [self.config["filepattern"]]

        self.parsers = [
            Parser(filepattern) for filepattern in self.config["filepattern"]
        ]

        self.aliases = parse_aliases(config)

        self.topic = self.config["topic"]
        self.tbus_orbit = self.config.get("tbus_orbit", False)
        logger.debug("Looking for: %s",
                     str([parser.globify() for parser in self.parsers]))
        AbstractWatchDogProcessor.__init__(
            self, [parser.globify() for parser in self.parsers],
            config.get("watcher", "Observer"))

        self._pub = NoisyPublisher("trollstalker",
                                   int(self.config["posttroll_port"]),
                                   self.config["topic"])
        self.pub = None

        obsolete_keys = [
            "topic", "filepattern", "tbus_orbit", "posttroll_port", "watch",
            "config_item", "configuration_file"
        ]

        for key in list(self.config.keys()):
            if key.startswith("alias_") or key in obsolete_keys:
                del self.config[key]
Exemplo n.º 4
0
 def __new__(cls):
     """Create new instance."""
     self = super().__new__(cls)
     LOG.debug('Starting publisher')
     self.pub = NoisyPublisher('l2processor')
     self.pub.start()
     return self
Exemplo n.º 5
0
 def _setup_and_start_communication(self):
     """Set up the Posttroll communication and start the publisher."""
     LOG.debug("Input topic: %s", self.input_topic)
     self.listener = ListenerContainer(topics=[self.input_topic])
     self.publisher = NoisyPublisher("end_user_notifier")
     self.publisher.start()
     self.loop = True
     signal.signal(signal.SIGTERM, self.signal_shutdown)
Exemplo n.º 6
0
def send_message(topic, info, message_type):
    '''Send message with the given topic and info'''
    pub_ = NoisyPublisher("dummy_sender", 0, topic)
    pub = pub_.start()
    time.sleep(2)
    msg = Message(topic, message_type, info)
    print "Sending message: %s" % str(msg)
    pub.send(str(msg))
    pub_.stop()
Exemplo n.º 7
0
 def __setstate__(self, kwargs):
     """Set things running even when loading from YAML."""
     LOG.debug('Starting publisher')
     self.port = kwargs.get('port', 0)
     self.nameservers = kwargs.get('nameservers', None)
     self.pub = NoisyPublisher('l2processor',
                               port=self.port,
                               nameservers=self.nameservers)
     self.pub.start()
Exemplo n.º 8
0
def create_publisher(cfgfile):
    cfg = ConfigParser()
    cfg.read(cfgfile)
    try:
        publisher = cfg.get("local_reception", "publisher")
    except NoOptionError:
        return None
    if publisher:
        from posttroll.publisher import NoisyPublisher
        publisher = NoisyPublisher(publisher, 0)
        publisher.start()
        return publisher
Exemplo n.º 9
0
 def __setstate__(self, kwargs):
     """Set things running even when loading from YAML."""
     LOG.debug('Starting publisher')
     self.port = kwargs.get('port', 0)
     self.nameservers = kwargs.get('nameservers', "")
     if self.nameservers is None:
         self.pub = Publisher("tcp://*:" + str(self.port), "l2processor")
     else:
         self.pub = NoisyPublisher('l2processor',
                                   port=self.port,
                                   nameservers=self.nameservers)
         self.pub.start()
Exemplo n.º 10
0
    def _setup_and_start_communication(self):
        """Set up the Posttroll communication and start the publisher."""
        logger.debug("Starting up... Input topic: %s", self.input_topic)
        now = datetime_from_utc_to_local(datetime.now(), self.timezone)
        logger.debug("Output times for timezone: {zone} Now = {time}".format(
            zone=str(self.timezone), time=now))

        self.listener = ListenerContainer(topics=[self.input_topic])
        self.publisher = NoisyPublisher("active_fires_postprocessing")
        self.publisher.start()
        self.loop = True
        signal.signal(signal.SIGTERM, self.signal_shutdown)
Exemplo n.º 11
0
def create_publisher(cfgfile):
    cfg = ConfigParser()
    cfg.read(cfgfile)
    try:
        publisher = cfg.get("local_reception", "publisher")
    except NoOptionError:
        return None
    if publisher:
        from posttroll.publisher import NoisyPublisher

        publisher = NoisyPublisher(publisher, 0)
        publisher.start()
        return publisher
Exemplo n.º 12
0
 def setup_publisher(self):
     """Initialize publisher."""
     if self._np is None:
         try:
             nameservers = self._config["nameservers"]
             if nameservers:
                 nameservers = nameservers.split()
             self._np = NoisyPublisher("move_it_" + self._name,
                                       port=self._config["publish_port"],
                                       nameservers=nameservers)
             self.publisher = self._np.start()
         except (KeyError, NameError):
             pass
Exemplo n.º 13
0
 def __init__(self, pub, interval=30, **kwargs):
     Thread.__init__(self)
     self._loop = True
     self._event = Event()
     self._to_send = kwargs
     self._interval = interval
     if pub is not None:
         self._pub = pub
         self._stop_pub = False
     else:
         self._pub = NoisyPublisher("Heart", 0)
         self._pub.start()
         self._stop_pub = True
Exemplo n.º 14
0
    def __init__(self, config):
        self.config = config.copy()
        if isinstance(config["filepattern"], (str, unicode)):
            self.config["filepattern"] = [self.config["filepattern"]]

        self.parsers = [Parser(filepattern)
                        for filepattern in self.config["filepattern"]]

        self.aliases = parse_aliases(config)

        self.topic = self.config["topic"]
        self.tbus_orbit = self.config.get("tbus_orbit", False)
        LOGGER.debug("Looking for: %s", str([parser.globify() for parser in self.parsers]))
        AbstractWatchDogProcessor.__init__(self,
                                           [parser.globify()
                                            for parser in self.parsers],
                                           config.get("watcher",
                                                      "Observer"))

        self._pub = NoisyPublisher("trollstalker",
                                   int(self.config["posttroll_port"]),
                                   self.config["topic"])
        self.pub = None

        obsolete_keys = ["topic", "filepattern", "tbus_orbit",
                         "posttroll_port", "watch", "config_item", "configuration_file"]

        for key in self.config.keys():
            if key.startswith("alias_") or key in obsolete_keys:
                del self.config[key]
Exemplo n.º 15
0
 def __init__(self,
              config_file,
              publish_port=None,
              publish_nameservers=None):
     """Initialize dispatcher class."""
     super().__init__()
     self.config = None
     self.topics = None
     self.listener = None
     self.publisher = None
     if publish_port is not None:
         self.publisher = NoisyPublisher("dispatcher",
                                         port=publish_port,
                                         nameservers=publish_nameservers)
         self.publisher.start()
     self.loop = True
     self.config_handler = DispatchConfig(config_file, self.update_config)
     signal.signal(signal.SIGTERM, self.signal_shutdown)
Exemplo n.º 16
0
    def __init__(self,
                 topic,
                 instrument,
                 posttroll_port=0,
                 filepattern=None,
                 aliases=None,
                 tbus_orbit=False):
        super(EventHandler, self).__init__()

        self._pub = NoisyPublisher("trollstalker", posttroll_port, topic)
        self.pub = self._pub.start()
        self.topic = topic
        self.info = {}
        if filepattern is None:
            filepattern = '{filename}'
        self.file_parser = Parser(filepattern)
        self.instrument = instrument
        self.aliases = aliases
        self.tbus_orbit = tbus_orbit
Exemplo n.º 17
0
class FilePublisher(object):
    # todo add support for custom port and nameserver
    def __new__(cls):
        self = super().__new__(cls)
        LOG.debug('Starting publisher')
        self.pub = NoisyPublisher('l2processor')
        self.pub.start()
        return self

    def __call__(self, job):
        # create message
        # send message
        mda = job['input_mda'].copy()
        mda.pop('dataset', None)
        mda.pop('collection', None)
        topic = job['product_list']['common']['publish_topic']
        for area, config in job['product_list']['product_list'].items():
            for prod, pconfig in config['products'].items():
                for fmat in pconfig['formats']:
                    file_mda = mda.copy()
                    file_mda['uri'] = fmat['filename']
                    file_mda['uid'] = os.path.basename(fmat['filename'])
                    msg = Message(topic, 'file', file_mda)
                    LOG.debug('Publishing %s', str(msg))
                    self.pub.send(str(msg))
        self.pub.stop()
Exemplo n.º 18
0
    def __init__(self, name, config):
        """Init a chain object."""
        super(Chain, self).__init__()
        self._config = config
        self._name = name
        self.publisher = None
        self.listeners = {}
        self.listener_died_event = Event()
        self.running = True

        # Setup publisher
        try:
            nameservers = self._config["nameservers"]
            if nameservers:
                nameservers = nameservers.split()
            self.publisher = NoisyPublisher(
                "move_it_" + self._name,
                port=self._config["publish_port"],
                nameservers=nameservers)
            self.publisher.start()
        except (KeyError, NameError):
            pass
Exemplo n.º 19
0
 def __init__(self, config):
     self.config = config
     self.slots = {}
     # Structure of self.slots is:
     # slots = {datetime(): {composite: {"img": None,
     #                              "num": 0},
     #                       "timeout": None}}
     self._parse_settings()
     self._listener = ListenerContainer(topics=config["topics"])
     self._set_message_settings()
     self._publisher = \
         NoisyPublisher("WorldCompositePublisher",
                        port=self.port,
                        aliases=self.aliases,
                        broadcast_interval=self.broadcast_interval,
                        nameservers=self.nameservers)
     self._publisher.start()
     self._loop = False
     if isinstance(config["area_def"], str):
         self.adef = get_area_def(config["area_def"])
     else:
         self.adef = config["area_def"]
Exemplo n.º 20
0
    def __init__(self, topic, instrument, posttroll_port=0, filepattern=None,
                 aliases=None, tbus_orbit=False):
        super(EventHandler, self).__init__()

        self._pub = NoisyPublisher("trollstalker", posttroll_port, topic)
        self.pub = self._pub.start()
        self.topic = topic
        self.info = {}
        if filepattern is None:
            filepattern = '{filename}'
        self.file_parser = Parser(filepattern)
        self.instrument = instrument
        self.aliases = aliases
        self.tbus_orbit = tbus_orbit
Exemplo n.º 21
0
    def test_listener_container(self):
        """Test listener container"""
        pub = NoisyPublisher("test")
        pub.start()
        sub = ListenerContainer(topics=["/counter"])
        time.sleep(2)
        for counter in range(5):
            tested = False
            msg_out = Message("/counter", "info", str(counter))
            pub.send(str(msg_out))

            msg_in = sub.output_queue.get(True, 1)
            if msg_in is not None:
                self.assertEqual(str(msg_in), str(msg_out))
                tested = True
            self.assertTrue(tested)
        pub.stop()
        sub.stop()
Exemplo n.º 22
0
    def test_listener_container(self):
        """Test listener container"""
        from posttroll.message import Message
        from posttroll.publisher import NoisyPublisher
        from posttroll.listener import ListenerContainer

        pub = NoisyPublisher("test")
        pub.start()
        sub = ListenerContainer(topics=["/counter"])
        time.sleep(2)
        for counter in range(5):
            tested = False
            msg_out = Message("/counter", "info", str(counter))
            pub.send(str(msg_out))

            msg_in = sub.output_queue.get(True, 1)
            if msg_in is not None:
                self.assertEqual(str(msg_in), str(msg_out))
                tested = True
            self.assertTrue(tested)
        pub.stop()
        sub.stop()
Exemplo n.º 23
0
class FilePublisher(object):
    """Publisher for generated files."""

    # todo add support for custom port and nameserver
    def __new__(cls):
        """Create new instance."""
        self = super().__new__(cls)
        LOG.debug('Starting publisher')
        self.pub = NoisyPublisher('l2processor')
        self.pub.start()
        return self

    def __call__(self, job):
        """Call the publisher."""
        mda = job['input_mda'].copy()
        mda.pop('dataset', None)
        mda.pop('collection', None)
        for fmat, _fmat_config in plist_iter(
                job['product_list']['product_list']):
            prod_path = "/product_list/areas/%s/products/%s" % (
                fmat['area'], fmat['product'])
            topic_pattern = get_config_value(job['product_list'], prod_path,
                                             "publish_topic")

            file_mda = mda.copy()
            try:
                file_mda['uri'] = fmat['filename']
            except KeyError:
                continue
            file_mda['uid'] = os.path.basename(fmat['filename'])
            topic = compose(topic_pattern, fmat)
            msg = Message(topic, 'file', file_mda)
            LOG.debug('Publishing %s', str(msg))
            self.pub.send(str(msg))
        self.pub.stop()

    def __del__(self):
        """Stop the publisher when last reference is deleted."""
        self.pub.stop()
Exemplo n.º 24
0
class PytrollHandler(logging.Handler):
    """Sends the record through a pytroll publisher.
    """
    def __init__(self, name, port=0):
        logging.Handler.__init__(self)
        self._publisher = NoisyPublisher(name, port)
        self._publisher.start()

    def emit(self, record):
        message = self.format(record)
        self._publisher.send(message)

    def close(self):
        self._publisher.stop()
        logging.Handler.close(self)
Exemplo n.º 25
0
class MoveItClient(MoveItBase):
    """Trollmoves client class."""
    def __init__(self, cmd_args):
        """Initialize client."""
        super(MoveItClient, self).__init__(cmd_args, "client")
        self._np = NoisyPublisher("move_it_client")
        self.sync_publisher = self._np.start()
        self.setup_watchers(cmd_args)

    def run(self):
        """Start the transfer chains."""
        signal.signal(signal.SIGTERM, self.chains_stop)
        signal.signal(signal.SIGHUP, self.signal_reload_cfg_file)
        self.notifier.start()
        self.running = True
        while self.running:
            time.sleep(1)
            self.sync_publisher.heartbeat(30)
            for chain_name in self.chains:
                if not self.chains[chain_name].is_alive():
                    self.chains[chain_name] = self.chains[chain_name].restart()
Exemplo n.º 26
0
class PytrollHandler(logging.Handler):

    """Sends the record through a pytroll publisher.
    """

    def __init__(self, name, port=0):
        logging.Handler.__init__(self)
        self._publisher = NoisyPublisher(name, port)
        self._publisher.start()

    def emit(self, record):
        message = self.format(record)
        self._publisher.send(message)

    def close(self):
        self._publisher.stop()
        logging.Handler.close(self)
Exemplo n.º 27
0
class Heart(Thread):
    """Send heartbeats once in a while.

    *pub* is the publisher to use. If it's None, a new publisher
    will be created.
    *interval* is the interval to send heartbeat on, in seconds.
    *kwargs* is the things you want to send with the beats.
    """

    def __init__(self, pub, interval=30, **kwargs):
        Thread.__init__(self)
        self._loop = True
        self._event = Event()
        self._to_send = kwargs
        self._interval = interval
        if pub is not None:
            self._pub = pub
            self._stop_pub = False
        else:
            self._pub = NoisyPublisher("Heart", 0)
            self._pub.start()
            self._stop_pub = True

    def run(self):
        while self._loop:
            msg = Message("/heart/minion", "heartbeat", self._to_send).encode()
            self._pub.send(msg)
            self._event.wait(self._interval)

    def stop(self):
        """Cardiac arrest
        """
        self._loop = False
        if self._stop_pub:
            self._pub.stop()
        self._event.set()
Exemplo n.º 28
0
class EventHandler(ProcessEvent):
    """
    Event handler class for inotify.
     *topic* - topic of the published messages
     *posttroll_port* - port number to publish the messages on
     *filepattern* - filepattern for finding information from the filename
    """
    def __init__(self,
                 topic,
                 instrument,
                 config_item,
                 posttroll_port=0,
                 filepattern=None,
                 aliases=None,
                 tbus_orbit=False,
                 history=0,
                 granule_length=0,
                 custom_vars=None,
                 nameservers=[],
                 watchManager=None):
        super(EventHandler, self).__init__()

        self._pub = NoisyPublisher("trollstalker_" + config_item,
                                   posttroll_port,
                                   topic,
                                   nameservers=nameservers)
        self.pub = self._pub.start()
        self.topic = topic
        self.info = OrderedDict()
        if filepattern is None:
            filepattern = '{filename}'
        self.file_parser = Parser(filepattern)
        self.instrument = instrument
        self.aliases = aliases
        self.custom_vars = custom_vars
        self.tbus_orbit = tbus_orbit
        self.granule_length = granule_length
        self._deque = deque([], history)
        self._watchManager = watchManager
        self._watched_dirs = dict()

    def stop(self):
        '''Stop publisher.
        '''
        self._pub.stop()

    def __clean__(self):
        '''Clean instance attributes.
        '''
        self.info = OrderedDict()

    def process_IN_CLOSE_WRITE(self, event):
        """When a file is closed, process the associated event.
        """
        LOGGER.debug("trigger: IN_CLOSE_WRITE")
        self.process(event)

    def process_IN_CLOSE_NOWRITE(self, event):
        """When a nonwritable file is closed, process the associated event.
        """
        LOGGER.debug("trigger: IN_CREATE")
        self.process(event)

    def process_IN_MOVED_TO(self, event):
        """When a file is closed, process the associated event.
        """
        LOGGER.debug("trigger: IN_MOVED_TO")
        self.process(event)

    def process_IN_CREATE(self, event):
        """When a file is created, process the associated event.
        """
        LOGGER.debug("trigger: IN_CREATE")
        self.process(event)

    def process_IN_CLOSE_MODIFY(self, event):
        """When a file is modified and closed, process the associated event.
        """
        LOGGER.debug("trigger: IN_MODIFY")
        self.process(event)

    def process_IN_DELETE(self, event):
        """On delete."""
        if (event.mask & pyinotify.IN_ISDIR):
            try:
                try:
                    self._watchManager.rm_watch(
                        self._watched_dirs[event.pathname], quiet=False)
                except pyinotify.WatchManagerError:
                    #As the directory is deleted prior removing the watch will cause a error message
                    #from pyinotify. This is ok, so just pass the exception.
                    LOGGER.debug("Removed watch: {}".format(event.pathname))
                    pass
                finally:
                    del self._watched_dirs[event.pathname]
            except KeyError:
                LOGGER.warning(
                    "Dir {} not watched by inotify. Can not delete watch.".
                    format(event.pathname))
        return

    def process(self, event):
        '''Process the event'''
        # New file created and closed
        if not event.dir:
            LOGGER.debug("processing %s", event.pathname)
            # parse information and create self.info OrderedDict{}
            self.parse_file_info(event)
            if len(self.info) > 0:
                # Check if this file has been recently dealt with
                if event.pathname not in self._deque:
                    self._deque.append(event.pathname)
                    message = self.create_message()
                    LOGGER.info("Publishing message %s", str(message))
                    self.pub.send(str(message))
                else:
                    LOGGER.info("Data has been published recently, skipping.")
            self.__clean__()
        elif (event.mask & pyinotify.IN_ISDIR):
            tmask = (pyinotify.IN_CLOSE_WRITE | pyinotify.IN_MOVED_TO
                     | pyinotify.IN_CREATE | pyinotify.IN_DELETE)
            try:
                self._watched_dirs.update(
                    self._watchManager.add_watch(event.pathname, tmask))
                LOGGER.debug("Added watch on dir: {}".format(event.pathname))
            except AttributeError:
                LOGGER.error(
                    "No watchmanager given. Can not add watch on {}".format(
                        event.pathname))
                pass

    def create_message(self):
        """Create broadcasted message
        """
        return Message(self.topic, 'file', dict(self.info))

    def parse_file_info(self, event):
        '''Parse satellite and orbit information from the filename.
        Message is sent, if a matching filepattern is found.
        '''
        try:
            LOGGER.debug("filter: %s\t event: %s", self.file_parser.fmt,
                         event.pathname)
            pathname_join = os.path.basename(event.pathname)
            if 'origin_inotify_base_dir_skip_levels' in self.custom_vars:
                pathname_list = event.pathname.split('/')
                pathname_join = "/".join(pathname_list[int(
                    self.custom_vars['origin_inotify_base_dir_skip_levels']):])
            else:
                LOGGER.debug(
                    "No origin_inotify_base_dir_skip_levels in self.custom_vars"
                )

            self.info = OrderedDict()
            self.info.update(self.file_parser.parse(pathname_join))
            LOGGER.debug("Extracted: %s", str(self.info))
        except ValueError:
            # Filename didn't match pattern, so empty the info dict
            LOGGER.info("Couldn't extract any usefull information")
            self.info = OrderedDict()
        else:
            self.info['uri'] = event.pathname
            self.info['uid'] = os.path.basename(event.pathname)
            self.info['sensor'] = self.instrument.split(',')
            LOGGER.debug("self.info['sensor']: " + str(self.info['sensor']))

            if self.tbus_orbit and "orbit_number" in self.info:
                LOGGER.info("Changing orbit number by -1!")
                self.info["orbit_number"] -= 1

            # replace values with corresponding aliases, if any are given
            if self.aliases:
                info = self.info.copy()
                for key in info:
                    if key in self.aliases:
                        self.info['orig_' + key] = self.info[key]
                        self.info[key] = self.aliases[key][str(self.info[key])]

            # add start_time and end_time if not present
            try:
                base_time = self.info["time"]
            except KeyError:
                try:
                    base_time = self.info["nominal_time"]
                except KeyError:
                    base_time = self.info["start_time"]
            if "start_time" not in self.info:
                self.info["start_time"] = base_time
            if "start_date" in self.info:
                self.info["start_time"] = \
                    dt.datetime.combine(self.info["start_date"].date(),
                                        self.info["start_time"].time())
                if "end_date" not in self.info:
                    self.info["end_date"] = self.info["start_date"]
                del self.info["start_date"]
            if "end_date" in self.info:
                self.info["end_time"] = \
                    dt.datetime.combine(self.info["end_date"].date(),
                                        self.info["end_time"].time())
                del self.info["end_date"]
            if "end_time" not in self.info and self.granule_length > 0:
                self.info["end_time"] = base_time + \
                    dt.timedelta(seconds=self.granule_length)

            if "end_time" in self.info:
                while self.info["start_time"] > self.info["end_time"]:
                    self.info["end_time"] += dt.timedelta(days=1)

            if self.custom_vars is not None:
                for var_name in self.custom_vars:
                    var_pattern = self.custom_vars[var_name]
                    var_val = None
                    if '%' in var_pattern:
                        var_val = helper_functions.create_aligned_datetime_var(
                            var_pattern, self.info)
                    if var_val is None:
                        var_val = compose(var_pattern, self.info)
                    self.info[var_name] = var_val
Exemplo n.º 29
0
class Dispatcher(Thread):
    """Class that dispatches files."""
    def __init__(self,
                 config_file,
                 publish_port=None,
                 publish_nameservers=None):
        """Initialize dispatcher class."""
        super().__init__()
        self.config = None
        self.topics = None
        self.listener = None
        self.publisher = None
        if publish_port is not None:
            self.publisher = NoisyPublisher("dispatcher",
                                            port=publish_port,
                                            nameservers=publish_nameservers)
            self.publisher.start()
        self.loop = True
        self.config_handler = DispatchConfig(config_file, self.update_config)
        signal.signal(signal.SIGTERM, self.signal_shutdown)

    def signal_shutdown(self, *args, **kwargs):
        """Shutdown dispatcher."""
        self.close()

    def update_config(self, new_config):
        """Update configuration and reload listeners."""
        old_config = self.config
        topics = set()
        try:
            for _client, client_config in new_config.items():
                topics |= set(
                    sum([
                        item['topics']
                        for item in client_config['dispatch_configs']
                    ], []))
            if self.topics != topics:
                if self.listener is not None:
                    # FIXME: make sure to get the last messages though
                    self.listener.stop()
                self.config = new_config
                addresses = client_config.get('subscribe_addresses', None)
                nameserver = client_config.get('nameserver', 'localhost')
                services = client_config.get('subscribe_services', '')
                self.listener = ListenerContainer(topics=topics,
                                                  addresses=addresses,
                                                  nameserver=nameserver,
                                                  services=services)
                self.topics = topics

        except KeyError as err:
            logger.warning(
                'Invalid config for %s, keeping the old one running: %s',
                _client, str(err))
            self.config = old_config

    def run(self):
        """Run dispatcher."""
        while self.loop:
            try:
                msg = self.listener.output_queue.get(timeout=1)
            except Empty:
                continue
            else:
                if msg.type != 'file':
                    continue
                destinations = self.get_destinations(msg)
                if destinations:
                    success = dispatch(msg.data['uri'], destinations)
                    if self.publisher:
                        self._publish(msg, destinations, success)

    def _publish(self, msg, destinations, success):
        """Publish a message.

        The URI is replaced with the URI on the target server.

        """
        for url, params, client in destinations:
            if not success[client]:
                continue
            del params
            info = msg.data.copy()
            info["uri"] = urlsplit(url).path
            topic = self.config[client].get("publish_topic")
            if topic is None:
                logger.error("Publish topic not configured for '%s'", client)
                continue
            topic = compose(topic, info)
            msg = Message(topic, 'file', info)
            logger.debug('Publishing %s', str(msg))
            self.publisher.send(str(msg))

    def get_destinations(self, msg):
        """Get the destinations for this message."""
        destinations = []
        for client, config in self.config.items():
            for disp_config in config['dispatch_configs']:
                for topic in disp_config['topics']:
                    if msg.subject.startswith(topic):
                        break
                else:
                    continue
                if check_conditions(msg, disp_config):
                    destinations.append(
                        self.create_dest_url(msg, client, disp_config))
        return destinations

    def create_dest_url(self, msg, client, disp_config):
        """Create the destination URL and the connection parameters."""
        defaults = self.config[client]
        info_dict = dict()
        for key in ['host', 'directory', 'filepattern']:
            try:
                info_dict[key] = disp_config[key]
            except KeyError:
                info_dict[key] = defaults[key]
        connection_parameters = disp_config.get(
            'connection_parameters', defaults.get('connection_parameters'))
        host = info_dict['host']
        path = os.path.join(info_dict['directory'], info_dict['filepattern'])
        mda = msg.data.copy()

        for key, aliases in defaults.get('aliases', {}).items():
            if isinstance(aliases, dict):
                aliases = [aliases]

            for alias in aliases:
                new_key = alias.pop("_alias_name", key)
                if key in msg.data:
                    mda[new_key] = alias.get(msg.data[key], msg.data[key])

        path = compose(path, mda)
        parts = urlsplit(host)
        host_path = urlunsplit(
            (parts.scheme, parts.netloc, path, parts.query, parts.fragment))
        return host_path, connection_parameters, client

    def close(self):
        """Shutdown the dispatcher."""
        logger.info('Terminating dispatcher.')
        self.loop = False
        try:
            self.listener.stop()
        except Exception:
            logger.exception("Couldn't stop listener.")
        if self.publisher:
            try:
                self.publisher.stop()
            except Exception:
                logger.exception("Couldn't stop publisher.")
        try:
            self.config_handler.close()
        except Exception:
            logger.exception("Couldn't stop config handler.")
Exemplo n.º 30
0
class FilePublisher(AbstractWatchDogProcessor):
    def __init__(self, config):
        self.config = config.copy()
        if isinstance(config["filepattern"], (str, bytes)):
            self.config["filepattern"] = [self.config["filepattern"]]

        self.parsers = [
            Parser(filepattern) for filepattern in self.config["filepattern"]
        ]

        self.aliases = parse_aliases(config)

        self.topic = self.config["topic"]
        self.tbus_orbit = self.config.get("tbus_orbit", False)
        logger.debug("Looking for: %s",
                     str([parser.globify() for parser in self.parsers]))
        AbstractWatchDogProcessor.__init__(
            self, [parser.globify() for parser in self.parsers],
            config.get("watcher", "Observer"))

        self._pub = NoisyPublisher("trollstalker",
                                   int(self.config["posttroll_port"]),
                                   self.config["topic"])
        self.pub = None

        obsolete_keys = [
            "topic", "filepattern", "tbus_orbit", "posttroll_port", "watch",
            "config_item", "configuration_file"
        ]

        for key in list(self.config.keys()):
            if key.startswith("alias_") or key in obsolete_keys:
                del self.config[key]

    def start(self):
        AbstractWatchDogProcessor.start(self)
        self.pub = self._pub.start()

    def stop(self):
        self._pub.stop()
        AbstractWatchDogProcessor.stop(self)

    def process(self, pathname):
        '''Process the event'''
        # New file created and closed
        logger.debug("processing %s", pathname)
        # parse information and create self.info dict{}
        metadata = self.config.copy()
        success = False
        for parser in self.parsers:
            try:
                metadata.update(parser.parse(pathname))
                success = True
                break
            except ValueError:
                pass
            if not success:
                logger.warning("Could not find a matching pattern for %s",
                               pathname)

        metadata['uri'] = pathname
        metadata['uid'] = os.path.basename(pathname)

        if self.tbus_orbit and "orbit_number" in metadata:
            logger.info("Changing orbit number by -1!")
            metadata["orbit_number"] -= 1

        # replace values with corresponding aliases, if any are given
        if self.aliases:
            for key in metadata:
                if key in self.aliases:
                    metadata[key] = self.aliases[key][str(metadata[key])]

        message = Message(self.topic, 'file', metadata)
        logger.info("Publishing message %s" % str(message))
        self.pub.send(str(message))
Exemplo n.º 31
0
class FilePublisher:
    """Publisher for generated files."""
    def __init__(self, port=0, nameservers=""):
        """Create new instance."""
        self.pub = None
        self.port = port
        self.nameservers = nameservers
        self.__setstate__({'port': port, 'nameservers': nameservers})

    def __setstate__(self, kwargs):
        """Set things running even when loading from YAML."""
        LOG.debug('Starting publisher')
        self.port = kwargs.get('port', 0)
        self.nameservers = kwargs.get('nameservers', "")
        if self.nameservers is None:
            self.pub = Publisher("tcp://*:" + str(self.port), "l2processor")
        else:
            self.pub = NoisyPublisher('l2processor',
                                      port=self.port,
                                      nameservers=self.nameservers)
            self.pub.start()

    @staticmethod
    def create_message(fmat, mda):
        """Create a message topic and mda."""
        topic_pattern = fmat["publish_topic"]
        file_mda = mda.copy()
        file_mda.update(fmat.get('extra_metadata', {}))

        file_mda['uri'] = os.path.abspath(fmat['filename'])

        file_mda['uid'] = os.path.basename(fmat['filename'])
        file_mda['product'] = fmat['product']
        file_mda['area'] = fmat['area']
        for key in ['productname', 'areaname', 'format']:
            try:
                file_mda[key] = fmat[key]
            except KeyError:
                pass
        for extra_info in [
                'area_coverage_percent', 'area_sunlight_coverage_percent'
        ]:
            try:
                file_mda[extra_info] = fmat[extra_info]
            except KeyError:
                pass

        topic = compose(topic_pattern, fmat)
        return topic, file_mda

    @staticmethod
    def create_dispatch_uri(ditem, fmat):
        """Create a uri from dispatch info."""
        path = compose(ditem['path'], fmat)
        netloc = ditem.get('hostname', '')

        return urlunsplit((ditem.get('scheme', ''), netloc, path, '', ''))

    def send_dispatch_messages(self, fmat, fmat_config, topic, file_mda):
        """Send dispatch messages corresponding to a file."""
        for dispatch_item in fmat_config.get('dispatch', []):
            mda = {
                'file_mda': file_mda,
                'source': fmat_config['filename'],
                'target': self.create_dispatch_uri(dispatch_item, fmat)
            }
            msg = Message(topic, 'dispatch', mda)
            LOG.debug('Sending dispatch order: %s', str(msg))
            self.pub.send(str(msg))

    def __call__(self, job):
        """Call the publisher."""
        mda = job['input_mda'].copy()
        mda.pop('dataset', None)
        mda.pop('collection', None)
        for fmat, fmat_config in plist_iter(
                job['product_list']['product_list'], mda):
            resampled_scene = job['resampled_scenes'].get(fmat['area'], [])
            if product_missing_from_scene(fmat['product'], resampled_scene):
                LOG.debug('Not publishing missing product %s.', str(fmat))
                continue
            try:
                topic, file_mda = self.create_message(fmat, mda)
            except KeyError:
                LOG.debug('Could not create a message for %s.', str(fmat))
                continue
            msg = Message(topic, 'file', file_mda)
            LOG.info('Publishing %s', str(msg))
            self.pub.send(str(msg))
            self.send_dispatch_messages(fmat, fmat_config, topic, file_mda)

    def stop(self):
        """Stop the publisher."""
        if self.pub:
            self.pub.stop()

    def __del__(self):
        """Stop the publisher when last reference is deleted."""
        self.stop()
Exemplo n.º 32
0
 def __init__(self, name, port=0):
     logging.Handler.__init__(self)
     self._publisher = NoisyPublisher(name, port)
     self._publisher.start()
Exemplo n.º 33
0
def reload_config(filename, disable_backlog=False):
    """Rebuild chains if needed (if the configuration changed) from *filename*.
    """
    LOGGER.debug("New config file detected! %s", filename)

    new_chains = read_config(filename)

    old_glob = []

    config_changed = False
    for key, val in new_chains.items():
        identical = True
        if key in chains:
            for key2, val2 in new_chains[key].items():
                if ((key2 not in ["notifier", "publisher"])
                        and ((key2 not in chains[key]) or
                             (chains[key][key2] != val2))):
                    identical = False
                    config_changed = True
                    break
            if identical:
                continue

            chains[key]["notifier"].stop()
            if "publisher" in chains[key]:
                chains[key]["publisher"].stop()

        chains[key] = val
        try:
            chains[key]["publisher"] = NoisyPublisher("move_it_" + key,
                                                      val["publish_port"])
        except (KeyError, NameError):
            pass
        chains[key]["notifier"] = create_notifier(val)
        # create logger too!
        if "publisher" in chains[key]:
            pub = chains[key]["publisher"].start()
        chains[key]["notifier"].start()
        old_glob.append(globify(val["origin"]))

        if "publisher" in chains[key]:

            def copy_hook(pathname, dest, val=val, pub=pub):
                fname = os.path.basename(pathname)

                destination = urlunparse((dest.scheme, dest.hostname,
                                          os.path.join(dest.path,
                                                       fname), dest.params,
                                          dest.query, dest.fragment))
                info = val.get("info", "")
                if info:
                    info = dict(
                        (elt.strip().split('=') for elt in info.split(";")))
                    for infokey, infoval in info.items():
                        if "," in infoval:
                            info[infokey] = infoval.split(",")
                else:
                    info = {}
                try:
                    info.update(
                        parse(os.path.basename(val["origin"]),
                              os.path.basename(pathname)))
                except ValueError:
                    info.update(
                        parse(
                            os.path.basename(
                                os.path.splitext(val["origin"])[0]),
                            os.path.basename(pathname)))

                info['uri'] = destination
                info['uid'] = fname
                msg = Message(val["topic"], 'file', info)
                pub.send(str(msg))
                LOGGER.debug("Message sent: %s", str(msg))

            chains[key]["copy_hook"] = copy_hook

            def delete_hook(pathname, val=val, pub=pub):
                fname = os.path.basename(pathname)
                info = val.get("info", "")
                if info:
                    info = dict(
                        (elt.strip().split('=') for elt in info.split(";")))
                info['uri'] = pathname
                info['uid'] = fname
                msg = Message(val["topic"], 'del', info)
                pub.send(str(msg))
                LOGGER.debug("Message sent: %s", str(msg))

            chains[key]["delete_hook"] = delete_hook

        if not identical:
            LOGGER.debug("Updated %s", key)
        else:
            LOGGER.debug("Added %s", key)

    for key in (set(chains.keys()) - set(new_chains.keys())):
        chains[key]["notifier"].stop()
        del chains[key]
        LOGGER.debug("Removed %s", key)

    if config_changed:
        LOGGER.debug("Reloaded config from %s", filename)
    else:
        LOGGER.debug("No changes to reload in %s", filename)

    if old_glob and not disable_backlog:
        fnames = []
        for pattern in old_glob:
            fnames += glob.glob(pattern)
        if fnames:
            time.sleep(3)
            LOGGER.debug("Touching old files")
            for fname in fnames:
                if os.path.exists(fname):
                    fp_ = open(fname, "ab")
                    fp_.close()
        old_glob = []
        LOGGER.info("Old files transferred")
Exemplo n.º 34
0
class FilePublisher(AbstractWatchDogProcessor):

    def __init__(self, config):
        self.config = config.copy()
        if isinstance(config["filepattern"], (str, unicode)):
            self.config["filepattern"] = [self.config["filepattern"]]

        self.parsers = [Parser(filepattern)
                        for filepattern in self.config["filepattern"]]

        self.aliases = parse_aliases(config)

        self.topic = self.config["topic"]
        self.tbus_orbit = self.config.get("tbus_orbit", False)
        LOGGER.debug("Looking for: %s", str([parser.globify() for parser in self.parsers]))
        AbstractWatchDogProcessor.__init__(self,
                                           [parser.globify()
                                            for parser in self.parsers],
                                           config.get("watcher",
                                                      "Observer"))

        self._pub = NoisyPublisher("trollstalker",
                                   int(self.config["posttroll_port"]),
                                   self.config["topic"])
        self.pub = None

        obsolete_keys = ["topic", "filepattern", "tbus_orbit",
                         "posttroll_port", "watch", "config_item", "configuration_file"]

        for key in self.config.keys():
            if key.startswith("alias_") or key in obsolete_keys:
                del self.config[key]

    def start(self):
        AbstractWatchDogProcessor.start(self)
        self.pub = self._pub.start()

    def stop(self):
        self._pub.stop()
        AbstractWatchDogProcessor.stop(self)

    def process(self, pathname):
        '''Process the event'''
        # New file created and closed
        LOGGER.debug("processing %s", pathname)
        # parse information and create self.info dict{}
        metadata = self.config.copy()
        success = False
        for parser in self.parsers:
            try:
                metadata.update(parser.parse(pathname))
                success = True
                break
            except ValueError:
                pass
            if not success:
                LOGGER.warning("Could not find a matching pattern for %s",
                               pathname)

        metadata['uri'] = pathname
        metadata['uid'] = os.path.basename(pathname)

        if self.tbus_orbit and "orbit_number" in metadata:
            LOGGER.info("Changing orbit number by -1!")
            metadata["orbit_number"] -= 1

        # replace values with corresponding aliases, if any are given
        if self.aliases:
            for key in metadata:
                if key in self.aliases:
                    metadata[key] = self.aliases[key][str(metadata[key])]

        message = Message(self.topic, 'file', metadata)
        LOGGER.info("Publishing message %s" % str(message))
        self.pub.send(str(message))
Exemplo n.º 35
0
def reload_config(filename, chains, callback=request_push, pub_instance=None):
    """Rebuild chains if needed (if the configuration changed) from *filename*.
    """

    LOGGER.debug("New config file detected! " + filename)

    new_chains = read_config(filename)

    # setup new chains

    for key, val in new_chains.items():
        identical = True
        if key in chains:
            for key2, val2 in new_chains[key].items():
                if ((key2 not in ["listeners", "publisher"])
                        and ((key2 not in chains[key]) or
                             (chains[key][key2] != val2))):
                    identical = False
                    break
            if identical:
                continue

            if "publisher" in chains[key]:
                chains[key]["publisher"].stop()
            for provider in chains[key]["providers"]:
                chains[key]["listeners"][provider].stop()
                del chains[key]["listeners"][provider]

        chains[key] = val
        try:
            chains[key]["publisher"] = NoisyPublisher("move_it_" + key,
                                                      val["publish_port"])
        except (KeyError, NameError):
            pass

        chains[key].setdefault("listeners", {})
        try:
            topics = []
            if "topic" in val:
                topics.append(val["topic"])
            if val.get("heartbeat", False):
                topics.append(HEARTBEAT_TOPIC)
            for provider in chains[key]["providers"]:
                chains[key]["listeners"][provider] = Listener(
                    provider,
                    topics,
                    callback,
                    pub_instance=pub_instance,
                    **chains[key])
                chains[key]["listeners"][provider].start()
        except Exception as err:
            LOGGER.exception(str(err))
            raise

        # create logger too!
        if "publisher" in chains[key]:
            chains[key]["publisher"].start()

        if not identical:
            LOGGER.debug("Updated " + key)
        else:
            LOGGER.debug("Added " + key)

    # disable old chains

    for key in (set(chains.keys()) - set(new_chains.keys())):
        for provider, listener in chains[key]["providers"].iteritems():
            listener.stop()
            del chains[key]["providers"][provider]

        if "publisher" in chains[key]:
            chains[key]["publisher"].stop()

        del chains[key]
        LOGGER.debug("Removed " + key)

    LOGGER.debug("Reloaded config from " + filename)
Exemplo n.º 36
0
 def __init__(self, cmd_args):
     """Initialize client."""
     super(MoveItClient, self).__init__(cmd_args, "client")
     self._np = NoisyPublisher("move_it_client")
     self.sync_publisher = self._np.start()
     self.setup_watchers(cmd_args)
Exemplo n.º 37
0
class ActiveFiresPostprocessing(Thread):
    """The active fires post processor."""
    def __init__(self,
                 configfile,
                 shp_boarders,
                 shp_mask,
                 regional_filtermask=None):
        """Initialize the active fires post processor class."""
        super().__init__()
        self.shp_boarders = shp_boarders
        self.shp_filtermask = shp_mask
        self.regional_filtermask = regional_filtermask
        self.configfile = configfile
        self.options = {}

        config = read_config(self.configfile)
        self._set_options_from_config(config)

        self.host = socket.gethostname()

        self.timezone = self.options.get('timezone', 'GMT')

        self.input_topic = self.options['subscribe_topics'][0]
        self.output_topic = self.options['publish_topic']
        self.infile_pattern = self.options.get('af_pattern_ibands')
        self.outfile_pattern_national = self.options.get(
            'geojson_file_pattern_national')
        self.outfile_pattern_regional = self.options.get(
            'geojson_file_pattern_regional')
        self.output_dir = self.options.get('output_dir', '/tmp')

        frmt = self.options['regional_shapefiles_format']
        self.regional_shapefiles_globstr = globify(frmt)

        self.listener = None
        self.publisher = None
        self.loop = False
        self._setup_and_start_communication()

    def _setup_and_start_communication(self):
        """Set up the Posttroll communication and start the publisher."""
        logger.debug("Starting up... Input topic: %s", self.input_topic)
        now = datetime_from_utc_to_local(datetime.now(), self.timezone)
        logger.debug("Output times for timezone: {zone} Now = {time}".format(
            zone=str(self.timezone), time=now))

        self.listener = ListenerContainer(topics=[self.input_topic])
        self.publisher = NoisyPublisher("active_fires_postprocessing")
        self.publisher.start()
        self.loop = True
        signal.signal(signal.SIGTERM, self.signal_shutdown)

    def _set_options_from_config(self, config):
        """From the configuration on disk set the option dictionary, holding all metadata for processing."""
        for item in config:
            if not isinstance(config[item], dict):
                self.options[item] = config[item]

        if isinstance(self.options.get('subscribe_topics'), str):
            subscribe_topics = self.options.get('subscribe_topics').split(',')
            for item in subscribe_topics:
                if len(item) == 0:
                    subscribe_topics.remove(item)
            self.options['subscribe_topics'] = subscribe_topics

        if isinstance(self.options.get('publish_topics'), str):
            publish_topics = self.options.get('publish_topics').split(',')
            for item in publish_topics:
                if len(item) == 0:
                    publish_topics.remove(item)
            self.options['publish_topics'] = publish_topics

    def signal_shutdown(self, *args, **kwargs):
        """Shutdown the Active Fires postprocessing."""
        self.close()

    def run(self):
        """Run the AF post processing."""
        while self.loop:
            try:
                msg = self.listener.output_queue.get(timeout=1)
                logger.debug("Message: %s", str(msg.data))
            except Empty:
                continue
            else:
                if msg.type not in ['file', 'collection', 'dataset']:
                    logger.debug("Message type not supported: %s",
                                 str(msg.type))
                    continue

                platform_name = msg.data.get('platform_name')
                filename = get_filename_from_uri(msg.data.get('uri'))
                if not os.path.exists(filename):
                    logger.warning("File does not exist!")
                    continue

                file_ok = check_file_type_okay(msg.data.get('type'))
                no_fires_text = 'No fire detections for this granule'
                output_messages = self._generate_no_fires_messages(
                    msg, no_fires_text)
                if not file_ok:
                    for output_msg in output_messages:
                        logger.debug("Sending message: %s", str(output_msg))
                        self.publisher.send(str(output_msg))
                    continue

                af_shapeff = ActiveFiresShapefileFiltering(
                    filename,
                    platform_name=platform_name,
                    timezone=self.timezone)
                afdata = af_shapeff.get_af_data(self.infile_pattern)

                if len(afdata) == 0:
                    logger.debug("Sending message: %s", str(output_msg))
                    self.publisher.send(str(output_msg))
                    continue

                output_messages, afdata = self.fires_filtering(msg, af_shapeff)

                for output_msg in output_messages:
                    if output_msg:
                        logger.debug("Sending message: %s", str(output_msg))
                        self.publisher.send(str(output_msg))

                # Do the regional filtering now:
                if not self.regional_filtermask:
                    logger.info("No regional filtering is attempted.")
                    continue

                if len(afdata) == 0:
                    logger.debug(
                        "No fires - so no regional filtering to be done!")
                    continue

                # FIXME! If afdata is empty (len=0) then it seems all data are inside all regions!
                af_shapeff = ActiveFiresShapefileFiltering(
                    afdata=afdata,
                    platform_name=platform_name,
                    timezone=self.timezone)
                regional_fmask = af_shapeff.get_regional_filtermasks(
                    self.regional_filtermask,
                    globstr=self.regional_shapefiles_globstr)
                regional_messages = self.regional_fires_filtering_and_publishing(
                    msg, regional_fmask, af_shapeff)
                for region_msg in regional_messages:
                    logger.debug("Sending message: %s", str(region_msg))
                    self.publisher.send(str(region_msg))

    def regional_fires_filtering_and_publishing(self, msg, regional_fmask,
                                                afsff_obj):
        """From the regional-fires-filter-mask and the fire detection data send regional messages."""
        logger.debug(
            "Perform regional masking on VIIRS AF detections and publish accordingly."
        )

        afdata = afsff_obj.get_af_data()
        fmda = afsff_obj.metadata

        fmda['platform'] = afsff_obj.platform_name

        pout = Parser(self.outfile_pattern_regional)

        output_messages = []
        regions_with_detections = 0
        for region_name in regional_fmask:
            if not regional_fmask[region_name]['some_inside_test_area']:
                continue

            regions_with_detections = regions_with_detections + 1
            fmda['region_name'] = regional_fmask[region_name]['attributes'][
                'Kod_omr']

            out_filepath = os.path.join(self.output_dir, pout.compose(fmda))
            logger.debug("Output file path = %s", out_filepath)
            data_in_region = afdata[regional_fmask[region_name]['mask']]
            filepath = store_geojson(out_filepath,
                                     data_in_region,
                                     platform_name=fmda['platform'])
            if not filepath:
                logger.warning(
                    "Something wrong happended storing regional " +
                    "data to Geojson - area: {name}".format(str(region_name)))
                continue

            outmsg = self._generate_output_message(filepath, msg,
                                                   regional_fmask[region_name])
            output_messages.append(outmsg)
            logger.info("Geojson file created! Number of fires in region = %d",
                        len(data_in_region))

        logger.debug(
            "Regional masking done. Number of regions with fire " +
            "detections on this granule: %s", str(regions_with_detections))
        return output_messages

    def fires_filtering(self, msg, af_shapeff):
        """Read Active Fire data and perform spatial filtering removing false detections.

        Do the national filtering first, and then filter out potential false
        detections by the special mask for that.

        """
        logger.debug(
            "Read VIIRS AF detections and perform quality control and spatial filtering"
        )

        fmda = af_shapeff.metadata
        # metdata contains time and everything but it is not being transfered to the dataframe.attrs

        pout = Parser(self.outfile_pattern_national)
        out_filepath = os.path.join(self.output_dir, pout.compose(fmda))
        logger.debug("Output file path = %s", out_filepath)

        # National filtering:
        af_shapeff.fires_filtering(self.shp_boarders)
        # Metadata should be transfered here!
        afdata_ff = af_shapeff.get_af_data()

        if len(afdata_ff) > 0:
            af_shapeff.fires_filtering(self.shp_filtermask,
                                       start_geometries_index=0,
                                       inside=False)
            afdata_ff = af_shapeff.get_af_data()

        filepath = store_geojson(out_filepath,
                                 afdata_ff,
                                 platform_name=af_shapeff.platform_name)
        out_messages = self.get_output_messages(filepath, msg, len(afdata_ff))

        return out_messages, afdata_ff

    def get_output_messages(self, filepath, msg, number_of_data):
        """Generate the adequate output message(s) depending on if an output file was created or not."""
        if filepath:
            logger.info(
                "geojson file created! Number of fires after filtering = %d",
                number_of_data)
            return [self._generate_output_message(filepath, msg)]
        else:
            logger.info(
                "No geojson file created, number of fires after filtering = %d",
                number_of_data)
            return self._generate_no_fires_messages(
                msg, 'No true fire detections inside National boarders')

    def _generate_output_message(self, filepath, input_msg, region=None):
        """Create the output message to publish."""

        output_topic = generate_posttroll_topic(self.output_topic, region)
        to_send = prepare_posttroll_message(input_msg, region)
        to_send['uri'] = ('ssh://%s/%s' % (self.host, filepath))
        to_send['uid'] = os.path.basename(filepath)
        to_send['type'] = 'GEOJSON-filtered'
        to_send['format'] = 'geojson'
        to_send['product'] = 'afimg'
        pubmsg = Message(output_topic, 'file', to_send)
        return pubmsg

    def _generate_no_fires_messages(self, input_msg, msg_string):
        """Create the output messages to publish."""

        to_send = prepare_posttroll_message(input_msg)
        to_send['info'] = msg_string
        publish_messages = []
        for ext in ['National', 'Regional']:
            topic = self.output_topic + '/' + ext
            publish_messages.append(Message(topic, 'info', to_send))

        return publish_messages

    def close(self):
        """Shutdown the Active Fires postprocessing."""
        logger.info('Terminating Active Fires post processing.')
        self.loop = False
        try:
            self.listener.stop()
        except Exception:
            logger.exception("Couldn't stop listener.")
        if self.publisher:
            try:
                self.publisher.stop()
            except Exception:
                logger.exception("Couldn't stop publisher.")
Exemplo n.º 38
0
import socket


from posttroll.publisher import NoisyPublisher
from posttroll.subscriber import Subscriber
from posttroll.message import Message
from posttroll import context

from threading import Lock, Thread
from zmq import Poller, REQ, POLLIN, NOBLOCK, LINGER, zmq_version
from Queue import Queue, Empty
from collections import deque
import netifaces


NP = NoisyPublisher("move_it_client")
PUB = NP.start()

queue = Queue()

LOGGER = logging.getLogger("move_it_client")

REQ_TIMEOUT = 1000
if zmq_version().startswith("2."):
    REQ_TIMEOUT *= 1000

chains = {}
listeners = {}
file_cache = deque(maxlen=100)
cache_lock = Lock()
Exemplo n.º 39
0
class EventHandler(ProcessEvent):

    """
    Event handler class for inotify.
     *topic* - topic of the published messages
     *posttroll_port* - port number to publish the messages on
     *filepattern* - filepattern for finding information from the filename
    """

    def __init__(self, topic, instrument, posttroll_port=0, filepattern=None,
                 aliases=None, tbus_orbit=False):
        super(EventHandler, self).__init__()

        self._pub = NoisyPublisher("trollstalker", posttroll_port, topic)
        self.pub = self._pub.start()
        self.topic = topic
        self.info = {}
        if filepattern is None:
            filepattern = '{filename}'
        self.file_parser = Parser(filepattern)
        self.instrument = instrument
        self.aliases = aliases
        self.tbus_orbit = tbus_orbit

    def stop(self):
        '''Stop publisher.
        '''
        self._pub.stop()

    def __clean__(self):
        '''Clean instance attributes.
        '''
        self.info = {}

    def process_IN_CLOSE_WRITE(self, event):
        """When a file is closed, process the associated event.
        """
        LOGGER.debug("trigger: IN_CLOSE_WRITE")
        self.process(event)

    def process_IN_CLOSE_NOWRITE(self, event):
        """When a nonwritable file is closed, process the associated event.
        """
        LOGGER.debug("trigger: IN_CREATE")
        self.process(event)

    def process_IN_MOVED_TO(self, event):
        """When a file is closed, process the associated event.
        """
        LOGGER.debug("trigger: IN_MOVED_TO")
        self.process(event)

    def process_IN_CREATE(self, event):
        """When a file is created, process the associated event.
        """
        LOGGER.debug("trigger: IN_CREATE")
        self.process(event)

    def process_IN_CLOSE_MODIFY(self, event):
        """When a file is modified and closed, process the associated event.
        """
        LOGGER.debug("trigger: IN_MODIFY")
        self.process(event)

    def process(self, event):
        '''Process the event'''
        # New file created and closed
        if not event.dir:
            LOGGER.debug("processing %s", event.pathname)
            # parse information and create self.info dict{}
            self.parse_file_info(event)
            if len(self.info) > 0:
                message = self.create_message()
                LOGGER.info("Publishing message %s" % str(message))
                self.pub.send(str(message))
            self.__clean__()

    def create_message(self):
        """Create broadcasted message
        """
        return Message(self.topic, 'file', self.info)

    def parse_file_info(self, event):
        '''Parse satellite and orbit information from the filename.
        Message is sent, if a matching filepattern is found.
        '''
        try:
            LOGGER.debug("filter: %s\t event: %s",
                         self.file_parser.fmt, event.pathname)
            self.info = self.file_parser.parse(
                os.path.basename(event.pathname))
            LOGGER.debug("Extracted: %s", str(self.info))
        except ValueError:
            # Filename didn't match pattern, so empty the info dict
            LOGGER.info("Couldn't extract any usefull information")
            self.info = {}
        else:
            self.info['uri'] = event.pathname
            self.info['uid'] = os.path.basename(event.pathname)
            self.info['sensor'] = self.instrument.split(',')
            LOGGER.debug("self.info['sensor']: " + str(self.info['sensor']))

            if self.tbus_orbit and "orbit_number" in self.info:
                LOGGER.info("Changing orbit number by -1!")
                self.info["orbit_number"] -= 1

            # replace values with corresponding aliases, if any are given
            if self.aliases:
                for key in self.info:
                    if key in self.aliases:
                        self.info[key] = self.aliases[key][str(self.info[key])]
Exemplo n.º 40
0
class EventHandler(ProcessEvent):

    """
    Event handler class for inotify.
     *topic* - topic of the published messages
     *posttroll_port* - port number to publish the messages on
     *filepattern* - filepattern for finding information from the filename
    """

    def __init__(self, topic, instrument, posttroll_port=0, filepattern=None,
                 aliases=None, tbus_orbit=False, history=0, granule_length=0):
        super(EventHandler, self).__init__()

        self._pub = NoisyPublisher("trollstalker", posttroll_port, topic)
        self.pub = self._pub.start()
        self.topic = topic
        self.info = {}
        if filepattern is None:
            filepattern = '{filename}'
        self.file_parser = Parser(filepattern)
        self.instrument = instrument
        self.aliases = aliases
        self.tbus_orbit = tbus_orbit
        self.granule_length = granule_length
        self._deque = deque([], history)

    def stop(self):
        '''Stop publisher.
        '''
        self._pub.stop()

    def __clean__(self):
        '''Clean instance attributes.
        '''
        self.info = {}

    def process_IN_CLOSE_WRITE(self, event):
        """When a file is closed, process the associated event.
        """
        LOGGER.debug("trigger: IN_CLOSE_WRITE")
        self.process(event)

    def process_IN_CLOSE_NOWRITE(self, event):
        """When a nonwritable file is closed, process the associated event.
        """
        LOGGER.debug("trigger: IN_CREATE")
        self.process(event)

    def process_IN_MOVED_TO(self, event):
        """When a file is closed, process the associated event.
        """
        LOGGER.debug("trigger: IN_MOVED_TO")
        self.process(event)

    def process_IN_CREATE(self, event):
        """When a file is created, process the associated event.
        """
        LOGGER.debug("trigger: IN_CREATE")
        self.process(event)

    def process_IN_CLOSE_MODIFY(self, event):
        """When a file is modified and closed, process the associated event.
        """
        LOGGER.debug("trigger: IN_MODIFY")
        self.process(event)

    def process(self, event):
        '''Process the event'''
        # New file created and closed
        if not event.dir:
            LOGGER.debug("processing %s", event.pathname)
            # parse information and create self.info dict{}
            self.parse_file_info(event)
            if len(self.info) > 0:
                # Check if this file has been recently dealt with
                if event.pathname not in self._deque:
                    self._deque.append(event.pathname)
                    message = self.create_message()
                    LOGGER.info("Publishing message %s", str(message))
                    self.pub.send(str(message))
                else:
                    LOGGER.info("Data has been published recently, skipping.")
            self.__clean__()

    def create_message(self):
        """Create broadcasted message
        """
        return Message(self.topic, 'file', self.info)

    def parse_file_info(self, event):
        '''Parse satellite and orbit information from the filename.
        Message is sent, if a matching filepattern is found.
        '''
        try:
            LOGGER.debug("filter: %s\t event: %s",
                         self.file_parser.fmt, event.pathname)
            self.info = self.file_parser.parse(
                os.path.basename(event.pathname))
            LOGGER.debug("Extracted: %s", str(self.info))
        except ValueError:
            # Filename didn't match pattern, so empty the info dict
            LOGGER.info("Couldn't extract any usefull information")
            self.info = {}
        else:
            self.info['uri'] = event.pathname
            self.info['uid'] = os.path.basename(event.pathname)
            self.info['sensor'] = self.instrument.split(',')
            LOGGER.debug("self.info['sensor']: " + str(self.info['sensor']))

            if self.tbus_orbit and "orbit_number" in self.info:
                LOGGER.info("Changing orbit number by -1!")
                self.info["orbit_number"] -= 1

            # replace values with corresponding aliases, if any are given
            if self.aliases:
                info = self.info.copy()
                for key in info:
                    if key in self.aliases:
                        self.info['orig_'+key] = self.info[key]
                        self.info[key] = self.aliases[key][str(self.info[key])]

            # add start_time and end_time if not present
            try:
                base_time = self.info["time"]
            except KeyError:
                try:
                    base_time = self.info["nominal_time"]
                except KeyError:
                    base_time = self.info["start_time"]
            if "start_time" not in self.info:
                self.info["start_time"] = base_time
            if "start_date" in self.info:
                self.info["start_time"] = \
                    dt.datetime.combine(self.info["start_date"].date(),
                                        self.info["start_time"].time())
                if "end_date" not in self.info:
                    self.info["end_date"] = self.info["start_date"]
                del self.info["start_date"]
            if "end_date" in self.info:
                self.info["end_time"] = \
                    dt.datetime.combine(self.info["end_date"].date(),
                                        self.info["end_time"].time())
                del self.info["end_date"]
            if "end_time" not in self.info and self.granule_length > 0:
                self.info["end_time"] = base_time + dt.timedelta(seconds=self.granule_length)

            if "end_time" in self.info:
                while self.info["start_time"] > self.info["end_time"]:
                    self.info["end_time"] += dt.timedelta(days=1)