Beispiel #1
0
class FilePublisher(object):
    # todo add support for custom port and nameserver
    def __new__(cls):
        self = super().__new__(cls)
        LOG.debug('Starting publisher')
        self.pub = NoisyPublisher('l2processor')
        self.pub.start()
        return self

    def __call__(self, job):
        # create message
        # send message
        mda = job['input_mda'].copy()
        mda.pop('dataset', None)
        mda.pop('collection', None)
        topic = job['product_list']['common']['publish_topic']
        for area, config in job['product_list']['product_list'].items():
            for prod, pconfig in config['products'].items():
                for fmat in pconfig['formats']:
                    file_mda = mda.copy()
                    file_mda['uri'] = fmat['filename']
                    file_mda['uid'] = os.path.basename(fmat['filename'])
                    msg = Message(topic, 'file', file_mda)
                    LOG.debug('Publishing %s', str(msg))
                    self.pub.send(str(msg))
        self.pub.stop()
Beispiel #2
0
def send_message(topic, info, message_type):
    '''Send message with the given topic and info'''
    pub_ = NoisyPublisher("dummy_sender", 0, topic)
    pub = pub_.start()
    time.sleep(2)
    msg = Message(topic, message_type, info)
    print "Sending message: %s" % str(msg)
    pub.send(str(msg))
    pub_.stop()
Beispiel #3
0
class PytrollHandler(logging.Handler):
    """Sends the record through a pytroll publisher.
    """
    def __init__(self, name, port=0):
        logging.Handler.__init__(self)
        self._publisher = NoisyPublisher(name, port)
        self._publisher.start()

    def emit(self, record):
        message = self.format(record)
        self._publisher.send(message)

    def close(self):
        self._publisher.stop()
        logging.Handler.close(self)
Beispiel #4
0
class PytrollHandler(logging.Handler):

    """Sends the record through a pytroll publisher.
    """

    def __init__(self, name, port=0):
        logging.Handler.__init__(self)
        self._publisher = NoisyPublisher(name, port)
        self._publisher.start()

    def emit(self, record):
        message = self.format(record)
        self._publisher.send(message)

    def close(self):
        self._publisher.stop()
        logging.Handler.close(self)
Beispiel #5
0
    def test_listener_container(self):
        """Test listener container"""
        pub = NoisyPublisher("test")
        pub.start()
        sub = ListenerContainer(topics=["/counter"])
        time.sleep(2)
        for counter in range(5):
            tested = False
            msg_out = Message("/counter", "info", str(counter))
            pub.send(str(msg_out))

            msg_in = sub.output_queue.get(True, 1)
            if msg_in is not None:
                self.assertEqual(str(msg_in), str(msg_out))
                tested = True
            self.assertTrue(tested)
        pub.stop()
        sub.stop()
Beispiel #6
0
    def test_listener_container(self):
        """Test listener container"""
        from posttroll.message import Message
        from posttroll.publisher import NoisyPublisher
        from posttroll.listener import ListenerContainer

        pub = NoisyPublisher("test")
        pub.start()
        sub = ListenerContainer(topics=["/counter"])
        time.sleep(2)
        for counter in range(5):
            tested = False
            msg_out = Message("/counter", "info", str(counter))
            pub.send(str(msg_out))

            msg_in = sub.output_queue.get(True, 1)
            if msg_in is not None:
                self.assertEqual(str(msg_in), str(msg_out))
                tested = True
            self.assertTrue(tested)
        pub.stop()
        sub.stop()
Beispiel #7
0
class FilePublisher(object):
    """Publisher for generated files."""

    # todo add support for custom port and nameserver
    def __new__(cls):
        """Create new instance."""
        self = super().__new__(cls)
        LOG.debug('Starting publisher')
        self.pub = NoisyPublisher('l2processor')
        self.pub.start()
        return self

    def __call__(self, job):
        """Call the publisher."""
        mda = job['input_mda'].copy()
        mda.pop('dataset', None)
        mda.pop('collection', None)
        for fmat, _fmat_config in plist_iter(
                job['product_list']['product_list']):
            prod_path = "/product_list/areas/%s/products/%s" % (
                fmat['area'], fmat['product'])
            topic_pattern = get_config_value(job['product_list'], prod_path,
                                             "publish_topic")

            file_mda = mda.copy()
            try:
                file_mda['uri'] = fmat['filename']
            except KeyError:
                continue
            file_mda['uid'] = os.path.basename(fmat['filename'])
            topic = compose(topic_pattern, fmat)
            msg = Message(topic, 'file', file_mda)
            LOG.debug('Publishing %s', str(msg))
            self.pub.send(str(msg))
        self.pub.stop()

    def __del__(self):
        """Stop the publisher when last reference is deleted."""
        self.pub.stop()
Beispiel #8
0
class Heart(Thread):
    """Send heartbeats once in a while.

    *pub* is the publisher to use. If it's None, a new publisher
    will be created.
    *interval* is the interval to send heartbeat on, in seconds.
    *kwargs* is the things you want to send with the beats.
    """

    def __init__(self, pub, interval=30, **kwargs):
        Thread.__init__(self)
        self._loop = True
        self._event = Event()
        self._to_send = kwargs
        self._interval = interval
        if pub is not None:
            self._pub = pub
            self._stop_pub = False
        else:
            self._pub = NoisyPublisher("Heart", 0)
            self._pub.start()
            self._stop_pub = True

    def run(self):
        while self._loop:
            msg = Message("/heart/minion", "heartbeat", self._to_send).encode()
            self._pub.send(msg)
            self._event.wait(self._interval)

    def stop(self):
        """Cardiac arrest
        """
        self._loop = False
        if self._stop_pub:
            self._pub.stop()
        self._event.set()
Beispiel #9
0
class Dispatcher(Thread):
    """Class that dispatches files."""
    def __init__(self,
                 config_file,
                 publish_port=None,
                 publish_nameservers=None):
        """Initialize dispatcher class."""
        super().__init__()
        self.config = None
        self.topics = None
        self.listener = None
        self.publisher = None
        if publish_port is not None:
            self.publisher = NoisyPublisher("dispatcher",
                                            port=publish_port,
                                            nameservers=publish_nameservers)
            self.publisher.start()
        self.loop = True
        self.config_handler = DispatchConfig(config_file, self.update_config)
        signal.signal(signal.SIGTERM, self.signal_shutdown)

    def signal_shutdown(self, *args, **kwargs):
        """Shutdown dispatcher."""
        self.close()

    def update_config(self, new_config):
        """Update configuration and reload listeners."""
        old_config = self.config
        topics = set()
        try:
            for _client, client_config in new_config.items():
                topics |= set(
                    sum([
                        item['topics']
                        for item in client_config['dispatch_configs']
                    ], []))
            if self.topics != topics:
                if self.listener is not None:
                    # FIXME: make sure to get the last messages though
                    self.listener.stop()
                self.config = new_config
                addresses = client_config.get('subscribe_addresses', None)
                nameserver = client_config.get('nameserver', 'localhost')
                services = client_config.get('subscribe_services', '')
                self.listener = ListenerContainer(topics=topics,
                                                  addresses=addresses,
                                                  nameserver=nameserver,
                                                  services=services)
                self.topics = topics

        except KeyError as err:
            logger.warning(
                'Invalid config for %s, keeping the old one running: %s',
                _client, str(err))
            self.config = old_config

    def run(self):
        """Run dispatcher."""
        while self.loop:
            try:
                msg = self.listener.output_queue.get(timeout=1)
            except Empty:
                continue
            else:
                if msg.type != 'file':
                    continue
                destinations = self.get_destinations(msg)
                if destinations:
                    success = dispatch(msg.data['uri'], destinations)
                    if self.publisher:
                        self._publish(msg, destinations, success)

    def _publish(self, msg, destinations, success):
        """Publish a message.

        The URI is replaced with the URI on the target server.

        """
        for url, params, client in destinations:
            if not success[client]:
                continue
            del params
            info = msg.data.copy()
            info["uri"] = urlsplit(url).path
            topic = self.config[client].get("publish_topic")
            if topic is None:
                logger.error("Publish topic not configured for '%s'", client)
                continue
            topic = compose(topic, info)
            msg = Message(topic, 'file', info)
            logger.debug('Publishing %s', str(msg))
            self.publisher.send(str(msg))

    def get_destinations(self, msg):
        """Get the destinations for this message."""
        destinations = []
        for client, config in self.config.items():
            for disp_config in config['dispatch_configs']:
                for topic in disp_config['topics']:
                    if msg.subject.startswith(topic):
                        break
                else:
                    continue
                if check_conditions(msg, disp_config):
                    destinations.append(
                        self.create_dest_url(msg, client, disp_config))
        return destinations

    def create_dest_url(self, msg, client, disp_config):
        """Create the destination URL and the connection parameters."""
        defaults = self.config[client]
        info_dict = dict()
        for key in ['host', 'directory', 'filepattern']:
            try:
                info_dict[key] = disp_config[key]
            except KeyError:
                info_dict[key] = defaults[key]
        connection_parameters = disp_config.get(
            'connection_parameters', defaults.get('connection_parameters'))
        host = info_dict['host']
        path = os.path.join(info_dict['directory'], info_dict['filepattern'])
        mda = msg.data.copy()

        for key, aliases in defaults.get('aliases', {}).items():
            if isinstance(aliases, dict):
                aliases = [aliases]

            for alias in aliases:
                new_key = alias.pop("_alias_name", key)
                if key in msg.data:
                    mda[new_key] = alias.get(msg.data[key], msg.data[key])

        path = compose(path, mda)
        parts = urlsplit(host)
        host_path = urlunsplit(
            (parts.scheme, parts.netloc, path, parts.query, parts.fragment))
        return host_path, connection_parameters, client

    def close(self):
        """Shutdown the dispatcher."""
        logger.info('Terminating dispatcher.')
        self.loop = False
        try:
            self.listener.stop()
        except Exception:
            logger.exception("Couldn't stop listener.")
        if self.publisher:
            try:
                self.publisher.stop()
            except Exception:
                logger.exception("Couldn't stop publisher.")
        try:
            self.config_handler.close()
        except Exception:
            logger.exception("Couldn't stop config handler.")
class ActiveFiresPostprocessing(Thread):
    """The active fires post processor."""
    def __init__(self,
                 configfile,
                 shp_boarders,
                 shp_mask,
                 regional_filtermask=None):
        """Initialize the active fires post processor class."""
        super().__init__()
        self.shp_boarders = shp_boarders
        self.shp_filtermask = shp_mask
        self.regional_filtermask = regional_filtermask
        self.configfile = configfile
        self.options = {}

        config = read_config(self.configfile)
        self._set_options_from_config(config)

        self.host = socket.gethostname()

        self.timezone = self.options.get('timezone', 'GMT')

        self.input_topic = self.options['subscribe_topics'][0]
        self.output_topic = self.options['publish_topic']
        self.infile_pattern = self.options.get('af_pattern_ibands')
        self.outfile_pattern_national = self.options.get(
            'geojson_file_pattern_national')
        self.outfile_pattern_regional = self.options.get(
            'geojson_file_pattern_regional')
        self.output_dir = self.options.get('output_dir', '/tmp')

        frmt = self.options['regional_shapefiles_format']
        self.regional_shapefiles_globstr = globify(frmt)

        self.listener = None
        self.publisher = None
        self.loop = False
        self._setup_and_start_communication()

    def _setup_and_start_communication(self):
        """Set up the Posttroll communication and start the publisher."""
        logger.debug("Starting up... Input topic: %s", self.input_topic)
        now = datetime_from_utc_to_local(datetime.now(), self.timezone)
        logger.debug("Output times for timezone: {zone} Now = {time}".format(
            zone=str(self.timezone), time=now))

        self.listener = ListenerContainer(topics=[self.input_topic])
        self.publisher = NoisyPublisher("active_fires_postprocessing")
        self.publisher.start()
        self.loop = True
        signal.signal(signal.SIGTERM, self.signal_shutdown)

    def _set_options_from_config(self, config):
        """From the configuration on disk set the option dictionary, holding all metadata for processing."""
        for item in config:
            if not isinstance(config[item], dict):
                self.options[item] = config[item]

        if isinstance(self.options.get('subscribe_topics'), str):
            subscribe_topics = self.options.get('subscribe_topics').split(',')
            for item in subscribe_topics:
                if len(item) == 0:
                    subscribe_topics.remove(item)
            self.options['subscribe_topics'] = subscribe_topics

        if isinstance(self.options.get('publish_topics'), str):
            publish_topics = self.options.get('publish_topics').split(',')
            for item in publish_topics:
                if len(item) == 0:
                    publish_topics.remove(item)
            self.options['publish_topics'] = publish_topics

    def signal_shutdown(self, *args, **kwargs):
        """Shutdown the Active Fires postprocessing."""
        self.close()

    def run(self):
        """Run the AF post processing."""
        while self.loop:
            try:
                msg = self.listener.output_queue.get(timeout=1)
                logger.debug("Message: %s", str(msg.data))
            except Empty:
                continue
            else:
                if msg.type not in ['file', 'collection', 'dataset']:
                    logger.debug("Message type not supported: %s",
                                 str(msg.type))
                    continue

                platform_name = msg.data.get('platform_name')
                filename = get_filename_from_uri(msg.data.get('uri'))
                if not os.path.exists(filename):
                    logger.warning("File does not exist!")
                    continue

                file_ok = check_file_type_okay(msg.data.get('type'))
                no_fires_text = 'No fire detections for this granule'
                output_messages = self._generate_no_fires_messages(
                    msg, no_fires_text)
                if not file_ok:
                    for output_msg in output_messages:
                        logger.debug("Sending message: %s", str(output_msg))
                        self.publisher.send(str(output_msg))
                    continue

                af_shapeff = ActiveFiresShapefileFiltering(
                    filename,
                    platform_name=platform_name,
                    timezone=self.timezone)
                afdata = af_shapeff.get_af_data(self.infile_pattern)

                if len(afdata) == 0:
                    logger.debug("Sending message: %s", str(output_msg))
                    self.publisher.send(str(output_msg))
                    continue

                output_messages, afdata = self.fires_filtering(msg, af_shapeff)

                for output_msg in output_messages:
                    if output_msg:
                        logger.debug("Sending message: %s", str(output_msg))
                        self.publisher.send(str(output_msg))

                # Do the regional filtering now:
                if not self.regional_filtermask:
                    logger.info("No regional filtering is attempted.")
                    continue

                if len(afdata) == 0:
                    logger.debug(
                        "No fires - so no regional filtering to be done!")
                    continue

                # FIXME! If afdata is empty (len=0) then it seems all data are inside all regions!
                af_shapeff = ActiveFiresShapefileFiltering(
                    afdata=afdata,
                    platform_name=platform_name,
                    timezone=self.timezone)
                regional_fmask = af_shapeff.get_regional_filtermasks(
                    self.regional_filtermask,
                    globstr=self.regional_shapefiles_globstr)
                regional_messages = self.regional_fires_filtering_and_publishing(
                    msg, regional_fmask, af_shapeff)
                for region_msg in regional_messages:
                    logger.debug("Sending message: %s", str(region_msg))
                    self.publisher.send(str(region_msg))

    def regional_fires_filtering_and_publishing(self, msg, regional_fmask,
                                                afsff_obj):
        """From the regional-fires-filter-mask and the fire detection data send regional messages."""
        logger.debug(
            "Perform regional masking on VIIRS AF detections and publish accordingly."
        )

        afdata = afsff_obj.get_af_data()
        fmda = afsff_obj.metadata

        fmda['platform'] = afsff_obj.platform_name

        pout = Parser(self.outfile_pattern_regional)

        output_messages = []
        regions_with_detections = 0
        for region_name in regional_fmask:
            if not regional_fmask[region_name]['some_inside_test_area']:
                continue

            regions_with_detections = regions_with_detections + 1
            fmda['region_name'] = regional_fmask[region_name]['attributes'][
                'Kod_omr']

            out_filepath = os.path.join(self.output_dir, pout.compose(fmda))
            logger.debug("Output file path = %s", out_filepath)
            data_in_region = afdata[regional_fmask[region_name]['mask']]
            filepath = store_geojson(out_filepath,
                                     data_in_region,
                                     platform_name=fmda['platform'])
            if not filepath:
                logger.warning(
                    "Something wrong happended storing regional " +
                    "data to Geojson - area: {name}".format(str(region_name)))
                continue

            outmsg = self._generate_output_message(filepath, msg,
                                                   regional_fmask[region_name])
            output_messages.append(outmsg)
            logger.info("Geojson file created! Number of fires in region = %d",
                        len(data_in_region))

        logger.debug(
            "Regional masking done. Number of regions with fire " +
            "detections on this granule: %s", str(regions_with_detections))
        return output_messages

    def fires_filtering(self, msg, af_shapeff):
        """Read Active Fire data and perform spatial filtering removing false detections.

        Do the national filtering first, and then filter out potential false
        detections by the special mask for that.

        """
        logger.debug(
            "Read VIIRS AF detections and perform quality control and spatial filtering"
        )

        fmda = af_shapeff.metadata
        # metdata contains time and everything but it is not being transfered to the dataframe.attrs

        pout = Parser(self.outfile_pattern_national)
        out_filepath = os.path.join(self.output_dir, pout.compose(fmda))
        logger.debug("Output file path = %s", out_filepath)

        # National filtering:
        af_shapeff.fires_filtering(self.shp_boarders)
        # Metadata should be transfered here!
        afdata_ff = af_shapeff.get_af_data()

        if len(afdata_ff) > 0:
            af_shapeff.fires_filtering(self.shp_filtermask,
                                       start_geometries_index=0,
                                       inside=False)
            afdata_ff = af_shapeff.get_af_data()

        filepath = store_geojson(out_filepath,
                                 afdata_ff,
                                 platform_name=af_shapeff.platform_name)
        out_messages = self.get_output_messages(filepath, msg, len(afdata_ff))

        return out_messages, afdata_ff

    def get_output_messages(self, filepath, msg, number_of_data):
        """Generate the adequate output message(s) depending on if an output file was created or not."""
        if filepath:
            logger.info(
                "geojson file created! Number of fires after filtering = %d",
                number_of_data)
            return [self._generate_output_message(filepath, msg)]
        else:
            logger.info(
                "No geojson file created, number of fires after filtering = %d",
                number_of_data)
            return self._generate_no_fires_messages(
                msg, 'No true fire detections inside National boarders')

    def _generate_output_message(self, filepath, input_msg, region=None):
        """Create the output message to publish."""

        output_topic = generate_posttroll_topic(self.output_topic, region)
        to_send = prepare_posttroll_message(input_msg, region)
        to_send['uri'] = ('ssh://%s/%s' % (self.host, filepath))
        to_send['uid'] = os.path.basename(filepath)
        to_send['type'] = 'GEOJSON-filtered'
        to_send['format'] = 'geojson'
        to_send['product'] = 'afimg'
        pubmsg = Message(output_topic, 'file', to_send)
        return pubmsg

    def _generate_no_fires_messages(self, input_msg, msg_string):
        """Create the output messages to publish."""

        to_send = prepare_posttroll_message(input_msg)
        to_send['info'] = msg_string
        publish_messages = []
        for ext in ['National', 'Regional']:
            topic = self.output_topic + '/' + ext
            publish_messages.append(Message(topic, 'info', to_send))

        return publish_messages

    def close(self):
        """Shutdown the Active Fires postprocessing."""
        logger.info('Terminating Active Fires post processing.')
        self.loop = False
        try:
            self.listener.stop()
        except Exception:
            logger.exception("Couldn't stop listener.")
        if self.publisher:
            try:
                self.publisher.stop()
            except Exception:
                logger.exception("Couldn't stop publisher.")
Beispiel #11
0
class FilePublisher:
    """Publisher for generated files."""
    def __init__(self, port=0, nameservers=""):
        """Create new instance."""
        self.pub = None
        self.port = port
        self.nameservers = nameservers
        self.__setstate__({'port': port, 'nameservers': nameservers})

    def __setstate__(self, kwargs):
        """Set things running even when loading from YAML."""
        LOG.debug('Starting publisher')
        self.port = kwargs.get('port', 0)
        self.nameservers = kwargs.get('nameservers', "")
        if self.nameservers is None:
            self.pub = Publisher("tcp://*:" + str(self.port), "l2processor")
        else:
            self.pub = NoisyPublisher('l2processor',
                                      port=self.port,
                                      nameservers=self.nameservers)
            self.pub.start()

    @staticmethod
    def create_message(fmat, mda):
        """Create a message topic and mda."""
        topic_pattern = fmat["publish_topic"]
        file_mda = mda.copy()
        file_mda.update(fmat.get('extra_metadata', {}))

        file_mda['uri'] = os.path.abspath(fmat['filename'])

        file_mda['uid'] = os.path.basename(fmat['filename'])
        file_mda['product'] = fmat['product']
        file_mda['area'] = fmat['area']
        for key in ['productname', 'areaname', 'format']:
            try:
                file_mda[key] = fmat[key]
            except KeyError:
                pass
        for extra_info in [
                'area_coverage_percent', 'area_sunlight_coverage_percent'
        ]:
            try:
                file_mda[extra_info] = fmat[extra_info]
            except KeyError:
                pass

        topic = compose(topic_pattern, fmat)
        return topic, file_mda

    @staticmethod
    def create_dispatch_uri(ditem, fmat):
        """Create a uri from dispatch info."""
        path = compose(ditem['path'], fmat)
        netloc = ditem.get('hostname', '')

        return urlunsplit((ditem.get('scheme', ''), netloc, path, '', ''))

    def send_dispatch_messages(self, fmat, fmat_config, topic, file_mda):
        """Send dispatch messages corresponding to a file."""
        for dispatch_item in fmat_config.get('dispatch', []):
            mda = {
                'file_mda': file_mda,
                'source': fmat_config['filename'],
                'target': self.create_dispatch_uri(dispatch_item, fmat)
            }
            msg = Message(topic, 'dispatch', mda)
            LOG.debug('Sending dispatch order: %s', str(msg))
            self.pub.send(str(msg))

    def __call__(self, job):
        """Call the publisher."""
        mda = job['input_mda'].copy()
        mda.pop('dataset', None)
        mda.pop('collection', None)
        for fmat, fmat_config in plist_iter(
                job['product_list']['product_list'], mda):
            resampled_scene = job['resampled_scenes'].get(fmat['area'], [])
            if product_missing_from_scene(fmat['product'], resampled_scene):
                LOG.debug('Not publishing missing product %s.', str(fmat))
                continue
            try:
                topic, file_mda = self.create_message(fmat, mda)
            except KeyError:
                LOG.debug('Could not create a message for %s.', str(fmat))
                continue
            msg = Message(topic, 'file', file_mda)
            LOG.info('Publishing %s', str(msg))
            self.pub.send(str(msg))
            self.send_dispatch_messages(fmat, fmat_config, topic, file_mda)

    def stop(self):
        """Stop the publisher."""
        if self.pub:
            self.pub.stop()

    def __del__(self):
        """Stop the publisher when last reference is deleted."""
        self.stop()
class EventHandler(ProcessEvent):

    """
    Event handler class for inotify.
     *topic* - topic of the published messages
     *posttroll_port* - port number to publish the messages on
     *filepattern* - filepattern for finding information from the filename
    """

    def __init__(self, topic, instrument, posttroll_port=0, filepattern=None,
                 aliases=None, tbus_orbit=False, history=0, granule_length=0):
        super(EventHandler, self).__init__()

        self._pub = NoisyPublisher("trollstalker", posttroll_port, topic)
        self.pub = self._pub.start()
        self.topic = topic
        self.info = {}
        if filepattern is None:
            filepattern = '{filename}'
        self.file_parser = Parser(filepattern)
        self.instrument = instrument
        self.aliases = aliases
        self.tbus_orbit = tbus_orbit
        self.granule_length = granule_length
        self._deque = deque([], history)

    def stop(self):
        '''Stop publisher.
        '''
        self._pub.stop()

    def __clean__(self):
        '''Clean instance attributes.
        '''
        self.info = {}

    def process_IN_CLOSE_WRITE(self, event):
        """When a file is closed, process the associated event.
        """
        LOGGER.debug("trigger: IN_CLOSE_WRITE")
        self.process(event)

    def process_IN_CLOSE_NOWRITE(self, event):
        """When a nonwritable file is closed, process the associated event.
        """
        LOGGER.debug("trigger: IN_CREATE")
        self.process(event)

    def process_IN_MOVED_TO(self, event):
        """When a file is closed, process the associated event.
        """
        LOGGER.debug("trigger: IN_MOVED_TO")
        self.process(event)

    def process_IN_CREATE(self, event):
        """When a file is created, process the associated event.
        """
        LOGGER.debug("trigger: IN_CREATE")
        self.process(event)

    def process_IN_CLOSE_MODIFY(self, event):
        """When a file is modified and closed, process the associated event.
        """
        LOGGER.debug("trigger: IN_MODIFY")
        self.process(event)

    def process(self, event):
        '''Process the event'''
        # New file created and closed
        if not event.dir:
            LOGGER.debug("processing %s", event.pathname)
            # parse information and create self.info dict{}
            self.parse_file_info(event)
            if len(self.info) > 0:
                # Check if this file has been recently dealt with
                if event.pathname not in self._deque:
                    self._deque.append(event.pathname)
                    message = self.create_message()
                    LOGGER.info("Publishing message %s", str(message))
                    self.pub.send(str(message))
                else:
                    LOGGER.info("Data has been published recently, skipping.")
            self.__clean__()

    def create_message(self):
        """Create broadcasted message
        """
        return Message(self.topic, 'file', self.info)

    def parse_file_info(self, event):
        '''Parse satellite and orbit information from the filename.
        Message is sent, if a matching filepattern is found.
        '''
        try:
            LOGGER.debug("filter: %s\t event: %s",
                         self.file_parser.fmt, event.pathname)
            self.info = self.file_parser.parse(
                os.path.basename(event.pathname))
            LOGGER.debug("Extracted: %s", str(self.info))
        except ValueError:
            # Filename didn't match pattern, so empty the info dict
            LOGGER.info("Couldn't extract any usefull information")
            self.info = {}
        else:
            self.info['uri'] = event.pathname
            self.info['uid'] = os.path.basename(event.pathname)
            self.info['sensor'] = self.instrument.split(',')
            LOGGER.debug("self.info['sensor']: " + str(self.info['sensor']))

            if self.tbus_orbit and "orbit_number" in self.info:
                LOGGER.info("Changing orbit number by -1!")
                self.info["orbit_number"] -= 1

            # replace values with corresponding aliases, if any are given
            if self.aliases:
                info = self.info.copy()
                for key in info:
                    if key in self.aliases:
                        self.info['orig_'+key] = self.info[key]
                        self.info[key] = self.aliases[key][str(self.info[key])]

            # add start_time and end_time if not present
            try:
                base_time = self.info["time"]
            except KeyError:
                try:
                    base_time = self.info["nominal_time"]
                except KeyError:
                    base_time = self.info["start_time"]
            if "start_time" not in self.info:
                self.info["start_time"] = base_time
            if "start_date" in self.info:
                self.info["start_time"] = \
                    dt.datetime.combine(self.info["start_date"].date(),
                                        self.info["start_time"].time())
                if "end_date" not in self.info:
                    self.info["end_date"] = self.info["start_date"]
                del self.info["start_date"]
            if "end_date" in self.info:
                self.info["end_time"] = \
                    dt.datetime.combine(self.info["end_date"].date(),
                                        self.info["end_time"].time())
                del self.info["end_date"]
            if "end_time" not in self.info and self.granule_length > 0:
                self.info["end_time"] = base_time + dt.timedelta(seconds=self.granule_length)

            if "end_time" in self.info:
                while self.info["start_time"] > self.info["end_time"]:
                    self.info["end_time"] += dt.timedelta(days=1)
Beispiel #13
0
class EventHandler(ProcessEvent):
    """
    Event handler class for inotify.
     *topic* - topic of the published messages
     *posttroll_port* - port number to publish the messages on
     *filepattern* - filepattern for finding information from the filename
    """
    def __init__(self,
                 topic,
                 instrument,
                 posttroll_port=0,
                 filepattern=None,
                 aliases=None,
                 tbus_orbit=False,
                 history=0,
                 granule_length=0):
        super(EventHandler, self).__init__()

        self._pub = NoisyPublisher("trollstalker", posttroll_port, topic)
        self.pub = self._pub.start()
        self.topic = topic
        self.info = {}
        if filepattern is None:
            filepattern = '{filename}'
        self.file_parser = Parser(filepattern)
        self.instrument = instrument
        self.aliases = aliases
        self.tbus_orbit = tbus_orbit
        self.granule_length = granule_length
        self._deque = deque([], history)

    def stop(self):
        '''Stop publisher.
        '''
        self._pub.stop()

    def __clean__(self):
        '''Clean instance attributes.
        '''
        self.info = {}

    def process_IN_CLOSE_WRITE(self, event):
        """When a file is closed, process the associated event.
        """
        LOGGER.debug("trigger: IN_CLOSE_WRITE")
        self.process(event)

    def process_IN_CLOSE_NOWRITE(self, event):
        """When a nonwritable file is closed, process the associated event.
        """
        LOGGER.debug("trigger: IN_CREATE")
        self.process(event)

    def process_IN_MOVED_TO(self, event):
        """When a file is closed, process the associated event.
        """
        LOGGER.debug("trigger: IN_MOVED_TO")
        self.process(event)

    def process_IN_CREATE(self, event):
        """When a file is created, process the associated event.
        """
        LOGGER.debug("trigger: IN_CREATE")
        self.process(event)

    def process_IN_CLOSE_MODIFY(self, event):
        """When a file is modified and closed, process the associated event.
        """
        LOGGER.debug("trigger: IN_MODIFY")
        self.process(event)

    def process(self, event):
        '''Process the event'''
        # New file created and closed
        if not event.dir:
            LOGGER.debug("processing %s", event.pathname)
            # parse information and create self.info dict{}
            self.parse_file_info(event)
            if len(self.info) > 0:
                # Check if this file has been recently dealt with
                if event.pathname not in self._deque:
                    self._deque.append(event.pathname)
                    message = self.create_message()
                    LOGGER.info("Publishing message %s", str(message))
                    self.pub.send(str(message))
                else:
                    LOGGER.info("Data has been published recently, skipping.")
            self.__clean__()

    def create_message(self):
        """Create broadcasted message
        """
        return Message(self.topic, 'file', self.info)

    def parse_file_info(self, event):
        '''Parse satellite and orbit information from the filename.
        Message is sent, if a matching filepattern is found.
        '''
        try:
            LOGGER.debug("filter: %s\t event: %s", self.file_parser.fmt,
                         event.pathname)
            self.info = self.file_parser.parse(os.path.basename(
                event.pathname))
            LOGGER.debug("Extracted: %s", str(self.info))
        except ValueError:
            # Filename didn't match pattern, so empty the info dict
            LOGGER.info("Couldn't extract any usefull information")
            self.info = {}
        else:
            self.info['uri'] = event.pathname
            self.info['uid'] = os.path.basename(event.pathname)
            self.info['sensor'] = self.instrument.split(',')
            LOGGER.debug("self.info['sensor']: " + str(self.info['sensor']))

            if self.tbus_orbit and "orbit_number" in self.info:
                LOGGER.info("Changing orbit number by -1!")
                self.info["orbit_number"] -= 1

            # replace values with corresponding aliases, if any are given
            if self.aliases:
                info = self.info.copy()
                for key in info:
                    if key in self.aliases:
                        self.info['orig_' + key] = self.info[key]
                        self.info[key] = self.aliases[key][str(self.info[key])]

            # add start_time and end_time if not present
            try:
                base_time = self.info["time"]
            except KeyError:
                try:
                    base_time = self.info["nominal_time"]
                except KeyError:
                    base_time = self.info["start_time"]
            if "start_time" not in self.info:
                self.info["start_time"] = base_time
            if "start_date" in self.info:
                self.info["start_time"] = \
                    dt.datetime.combine(self.info["start_date"].date(),
                                        self.info["start_time"].time())
                if "end_date" not in self.info:
                    self.info["end_date"] = self.info["start_date"]
                del self.info["start_date"]
            if "end_date" in self.info:
                self.info["end_time"] = \
                    dt.datetime.combine(self.info["end_date"].date(),
                                        self.info["end_time"].time())
                del self.info["end_date"]
            if "end_time" not in self.info and self.granule_length > 0:
                self.info["end_time"] = base_time + dt.timedelta(
                    seconds=self.granule_length)

            if "end_time" in self.info:
                while self.info["start_time"] > self.info["end_time"]:
                    self.info["end_time"] += dt.timedelta(days=1)
class FilePublisher(AbstractWatchDogProcessor):
    def __init__(self, config):
        self.config = config.copy()
        if isinstance(config["filepattern"], (str, bytes)):
            self.config["filepattern"] = [self.config["filepattern"]]

        self.parsers = [
            Parser(filepattern) for filepattern in self.config["filepattern"]
        ]

        self.aliases = parse_aliases(config)

        self.topic = self.config["topic"]
        self.tbus_orbit = self.config.get("tbus_orbit", False)
        logger.debug("Looking for: %s",
                     str([parser.globify() for parser in self.parsers]))
        AbstractWatchDogProcessor.__init__(
            self, [parser.globify() for parser in self.parsers],
            config.get("watcher", "Observer"))

        self._pub = NoisyPublisher("trollstalker",
                                   int(self.config["posttroll_port"]),
                                   self.config["topic"])
        self.pub = None

        obsolete_keys = [
            "topic", "filepattern", "tbus_orbit", "posttroll_port", "watch",
            "config_item", "configuration_file"
        ]

        for key in list(self.config.keys()):
            if key.startswith("alias_") or key in obsolete_keys:
                del self.config[key]

    def start(self):
        AbstractWatchDogProcessor.start(self)
        self.pub = self._pub.start()

    def stop(self):
        self._pub.stop()
        AbstractWatchDogProcessor.stop(self)

    def process(self, pathname):
        '''Process the event'''
        # New file created and closed
        logger.debug("processing %s", pathname)
        # parse information and create self.info dict{}
        metadata = self.config.copy()
        success = False
        for parser in self.parsers:
            try:
                metadata.update(parser.parse(pathname))
                success = True
                break
            except ValueError:
                pass
            if not success:
                logger.warning("Could not find a matching pattern for %s",
                               pathname)

        metadata['uri'] = pathname
        metadata['uid'] = os.path.basename(pathname)

        if self.tbus_orbit and "orbit_number" in metadata:
            logger.info("Changing orbit number by -1!")
            metadata["orbit_number"] -= 1

        # replace values with corresponding aliases, if any are given
        if self.aliases:
            for key in metadata:
                if key in self.aliases:
                    metadata[key] = self.aliases[key][str(metadata[key])]

        message = Message(self.topic, 'file', metadata)
        logger.info("Publishing message %s" % str(message))
        self.pub.send(str(message))
Beispiel #15
0
class Chain(Thread):
    """The Chain class."""

    def __init__(self, name, config):
        """Init a chain object."""
        super(Chain, self).__init__()
        self._config = config
        self._name = name
        self.publisher = None
        self.listeners = {}
        self.listener_died_event = Event()
        self.running = True

        # Setup publisher
        try:
            nameservers = self._config["nameservers"]
            if nameservers:
                nameservers = nameservers.split()
            self.publisher = NoisyPublisher(
                "move_it_" + self._name,
                port=self._config["publish_port"],
                nameservers=nameservers)
            self.publisher.start()
        except (KeyError, NameError):
            pass

    def setup_listeners(self, callback, sync_publisher):
        """Set up the listeners."""
        self.callback = callback
        self.sync_publisher = sync_publisher
        try:
            topics = []
            if "topic" in self._config:
                topics.append(self._config["topic"])
            if self._config.get("heartbeat", False):
                topics.append(SERVER_HEARTBEAT_TOPIC)
            for provider in self._config["providers"]:
                if '/' in provider.split(':')[-1]:
                    parts = urlparse(provider)
                    if parts.scheme != '':
                        provider = urlunparse((parts.scheme, parts.netloc,
                                               '', '', '', ''))
                    else:
                        # If there's no scheme, urlparse thinks the
                        # URI is a local file
                        provider = urlunparse(('tcp', parts.path,
                                               '', '', '', ''))
                    topics.append(parts.path)
                LOGGER.debug("Add listener for %s with topic %s",
                             provider, str(topics))
                listener = Listener(
                    provider,
                    topics,
                    callback,
                    sync_publisher=sync_publisher,
                    publisher=self.publisher,
                    die_event=self.listener_died_event,
                    **self._config)
                listener.start()
                self.listeners[provider] = listener
        except Exception as err:
            LOGGER.exception(str(err))
            raise

    def restart_dead_listeners(self):
        """Restart dead listeners."""
        plural = ['', 's']
        for provider in list(self.listeners.keys()):
            if not self.listeners[provider].is_alive():
                cause_of_death = self.listeners[provider].cause_of_death
                death_count = self.listeners[provider].death_count
                while death_count < 3:
                    LOGGER.error("Listener for %s died %d time%s: %s", provider, death_count + 1,
                                 plural[min(death_count, 1)], str(cause_of_death))
                    self.listeners[provider] = self.listeners[provider].restart()
                    time.sleep(.5)
                    if not self.listeners[provider].is_alive():
                        death_count = self.listeners[provider].death_count
                        cause_of_death = self.listeners[provider].cause_of_death
                    else:
                        break
                if death_count >= 3:
                    with suppress(Exception):
                        self.listeners[provider].stop()
                    del self.listeners[provider]
                    LOGGER.critical("Listener for %s switched off: %s", provider, str(cause_of_death))

    def run(self):
        """Monitor the listeners."""
        try:
            while self.running:
                if self.listener_died_event.wait(LISTENER_CHECK_INTERVAL):
                    self.restart_dead_listeners()
                    self.listener_died_event.clear()
        except Exception:
            LOGGER.exception("Chain %s died!", self._name)

    def config_equals(self, other_config):
        """Check that current config is the same as `other_config`."""
        for key, val in other_config.items():
            if ((key not in ["listeners", "publisher"]) and
                ((key not in self._config) or
                    (self._config[key] != val))):
                return False
        return True

    def reset_listeners(self):
        """Reset the listeners."""
        for listener in self.listeners.values():
            listener.stop()
        self.listeners = {}

    def stop(self):
        """Stop the chain."""
        self.running = False
        if self.publisher:
            self.publisher.stop()
        self.reset_listeners()

    def restart(self):
        """Restart the chain, return a new running instance."""
        self.stop()
        new_chain = self.__class__(self._name, self._config)
        new_chain.setup_listeners(self.callback, self.sync_publisher)
        new_chain.start()
        return new_chain
Beispiel #16
0
class Chain(Thread):
    """The Chain class."""
    def __init__(self, name, config):
        """Init a chain object."""
        super(Chain, self).__init__()
        self._config = config
        self._name = name
        self._np = None
        self.publisher = None
        self.listeners = {}
        self.listener_died_event = Event()
        self.running = True
        self.setup_publisher()

    def setup_publisher(self):
        """Initialize publisher."""
        if self._np is None:
            try:
                nameservers = self._config["nameservers"]
                if nameservers:
                    nameservers = nameservers.split()
                self._np = NoisyPublisher("move_it_" + self._name,
                                          port=self._config["publish_port"],
                                          nameservers=nameservers)
                self.publisher = self._np.start()
            except (KeyError, NameError):
                pass

    def setup_listeners(self, keep_providers=None):
        """Set up the listeners."""
        keep_providers = keep_providers or []
        try:
            topics = []
            if "topic" in self._config:
                topics.append(self._config["topic"])
            if self._config.get("heartbeat", False):
                topics.append(SERVER_HEARTBEAT_TOPIC)
                # Subscribe also to heartbeat messages of other clients
                topics.append(CLIENT_HEARTBEAT_TOPIC_BASE + '_' + self._name)
            for provider in self._config["providers"]:
                if provider in keep_providers and provider in self.listeners:
                    LOGGER.debug(
                        "Not restarting Listener to %s, config not changed.",
                        provider)
                    continue
                if '/' in provider.split(':')[-1]:
                    parts = urlparse(provider)
                    if parts.scheme != '':
                        provider = urlunparse(
                            (parts.scheme, parts.netloc, '', '', '', ''))
                    else:
                        # If there's no scheme, urlparse thinks the
                        # URI is a local file
                        provider = urlunparse(
                            ('tcp', parts.path, '', '', '', ''))
                    topics.append(parts.path)
                LOGGER.debug("Add listener for %s with topic %s", provider,
                             str(topics))
                listener = Listener(provider,
                                    topics,
                                    publisher=self.publisher,
                                    die_event=self.listener_died_event,
                                    **self._config)
                listener.start()
                self.listeners[provider] = listener
        except Exception as err:
            LOGGER.exception(str(err))
            raise

    def restart_dead_listeners(self):
        """Restart dead listeners."""
        plural = ['', 's']
        for provider in list(self.listeners.keys()):
            if not self.listeners[provider].is_alive():
                cause_of_death = self.listeners[provider].cause_of_death
                death_count = self.listeners[provider].death_count
                while death_count < 3:
                    LOGGER.error("Listener for %s died %d time%s: %s",
                                 provider, death_count + 1,
                                 plural[min(death_count, 1)],
                                 str(cause_of_death))
                    self.listeners[provider] = self.listeners[
                        provider].restart()
                    time.sleep(.5)
                    if not self.listeners[provider].is_alive():
                        death_count = self.listeners[provider].death_count
                        cause_of_death = self.listeners[
                            provider].cause_of_death
                    else:
                        break
                if death_count >= 3:
                    with suppress(Exception):
                        self.listeners[provider].stop()
                    del self.listeners[provider]
                    LOGGER.critical("Listener for %s switched off: %s",
                                    provider, str(cause_of_death))

    def run(self):
        """Monitor the listeners."""
        try:
            while self.running:
                if self.listener_died_event.wait(LISTENER_CHECK_INTERVAL):
                    self.restart_dead_listeners()
                    self.listener_died_event.clear()
        except Exception:
            LOGGER.exception("Chain %s died!", self._name)

    def config_equals(self, other_config):
        """Check that current config is the same as `other_config`."""
        for key, val in other_config.items():
            if ((key not in ["listeners", "publisher"]) and
                ((key not in self._config) or (self._config[key] != val))):
                return False
        return True

    def get_unchanged_providers(self, other_config):
        """Get a list of providers that have not changed between this and other config."""
        if self._config["topic"] != other_config["topic"]:
            return []
        return list(
            set(self._config["providers"]).intersection(
                set(other_config["providers"])))

    def publisher_needs_restarting(self, other_config):
        """Check that current config is the same as `other_config`."""
        for key in ["nameservers", "publish_port"]:
            if self._config[key] != other_config[key]:
                return True
        return False

    def refresh(self, new_config):
        """Refresh the chain with new config."""
        publisher_needs_restarting = self.publisher_needs_restarting(
            new_config)
        unchanged_providers = self.get_unchanged_providers(new_config)
        self._config = new_config
        if publisher_needs_restarting:
            self._refresh_publisher()
        self._refresh_listeners(unchanged_providers)
        if not self.running:
            self.start()

    def _refresh_publisher(self):
        self._stop_publisher()
        self.setup_publisher()

    def _refresh_listeners(self, unchanged_providers):
        self.reset_listeners(keep_providers=unchanged_providers)
        self.setup_listeners(keep_providers=unchanged_providers)

    def reset_listeners(self, keep_providers=None):
        """Reset the listeners."""
        keep_providers = keep_providers or []
        kept_listeners = {}
        for key, listener in self.listeners.items():
            if key in keep_providers:
                kept_listeners[key] = listener
                continue
            listener.stop()
        self.listeners = kept_listeners

    def stop(self):
        """Stop the chain."""
        self._stop_publisher()
        self.running = False
        self.reset_listeners()

    def _stop_publisher(self):
        if self._np:
            self._np.stop()
            self._np = None

    def restart(self):
        """Restart the chain, return a new running instance."""
        self.stop()
        new_chain = self.__class__(self._name, self._config)
        new_chain.setup_listeners()
        new_chain.start()
        return new_chain
Beispiel #17
0
class FilePublisher(object):
    """Publisher for generated files."""

    # todo add support for custom port and nameserver
    def __new__(cls):
        """Create new instance."""
        self = super().__new__(cls)
        LOG.debug('Starting publisher')
        self.pub = NoisyPublisher('l2processor')
        self.pub.start()
        return self

    @staticmethod
    def create_message(fmat, mda):
        """Create a message topic and mda."""
        topic_pattern = fmat["publish_topic"]
        file_mda = mda.copy()

        file_mda['uri'] = os.path.abspath(fmat['filename'])

        file_mda['uid'] = os.path.basename(fmat['filename'])
        file_mda['product'] = fmat['product']
        file_mda['area'] = fmat['area']
        for key in ['productname', 'areaname', 'format']:
            try:
                file_mda[key] = fmat[key]
            except KeyError:
                pass
        for extra_info in ['area_coverage_percent', 'area_sunlight_coverage_percent']:
            try:
                file_mda[extra_info] = fmat[extra_info]
            except KeyError:
                pass

        topic = compose(topic_pattern, fmat)
        return topic, file_mda

    @staticmethod
    def create_dispatch_uri(ditem, fmat):
        """Create a uri from dispatch info."""
        path = compose(ditem['path'], fmat)
        netloc = ditem.get('hostname', '')

        return urlunsplit((ditem.get('scheme', ''), netloc, path, '', ''))

    def send_dispatch_messages(self, fmat, fmat_config, topic, file_mda):
        """Send dispatch messages corresponding to a file."""
        for dispatch_item in fmat_config.get('dispatch', []):
            mda = {
                'file_mda': file_mda,
                'source': fmat_config['filename'],
                'target': self.create_dispatch_uri(dispatch_item, fmat)
                }
            msg = Message(topic, 'dispatch', mda)
            LOG.debug('Sending dispatch order: %s', str(msg))
            self.pub.send(str(msg))

    def __call__(self, job):
        """Call the publisher."""
        try:
            mda = job['input_mda'].copy()
            mda.pop('dataset', None)
            mda.pop('collection', None)
            for fmat, fmat_config in plist_iter(job['product_list']['product_list'], mda):
                try:
                    topic, file_mda = self.create_message(fmat, mda)
                except KeyError:
                    continue
                msg = Message(topic, 'file', file_mda)
                LOG.debug('Publishing %s', str(msg))
                self.pub.send(str(msg))
                self.send_dispatch_messages(fmat, fmat_config, topic, file_mda)
        finally:
            self.pub.stop()

    def __del__(self):
        """Stop the publisher when last reference is deleted."""
        self.pub.stop()
Beispiel #18
0
class EndUserNotifier(Thread):
    """The Notifier class - sending mails or text messages to end users upon incoming messages."""
    def __init__(self, configfile, netrcfile=NETRCFILE):
        """Initialize the EndUserNotifier class."""
        super().__init__()
        self.configfile = configfile
        self._netrcfile = netrcfile
        self.options = {}

        config = read_config(self.configfile)
        self._set_options_from_config(config)

        self.host = socket.gethostname()
        LOG.debug("netrc file path = %s", self._netrcfile)
        self.secrets = netrc(self._netrcfile)

        self.smtp_server = self.options.get('smtp_server')
        self.domain = self.options.get('domain')
        self.sender = self.options.get('sender')
        self.subject = self.options.get('subject')

        self.recipients = RecipientDataStruct()
        self._set_recipients()

        self.max_number_of_fires_in_sms = self.options.get(
            'max_number_of_fires_in_sms', 2)
        LOG.debug("Max number of fires in SMS: %d",
                  self.max_number_of_fires_in_sms)

        self.fire_data = self.options.get('fire_data')
        self.unsubscribe_address = self.options.get('unsubscribe_address')
        self.unsubscribe_text = self.options.get('unsubscribe_text')

        if not self.domain:
            raise IOError('Missing domain specification in config!')

        self.input_topic = self.options['subscribe_topics'][0]
        LOG.debug("Input topic: %s", self.input_topic)

        self.output_topic = self.options['publish_topic']

        self.listener = None
        self.publisher = None
        self.loop = False
        self._setup_and_start_communication()

    def _set_recipients(self):
        """Set the recipients lists."""
        self.recipients._set_recipients(
            self.options.get('recipients'),
            self.options.get('recipients_attachment'))
        self.recipients.subject = self.subject

    def _setup_and_start_communication(self):
        """Set up the Posttroll communication and start the publisher."""
        LOG.debug("Input topic: %s", self.input_topic)
        self.listener = ListenerContainer(topics=[self.input_topic])
        self.publisher = NoisyPublisher("end_user_notifier")
        self.publisher.start()
        self.loop = True
        signal.signal(signal.SIGTERM, self.signal_shutdown)

    def _set_options_from_config(self, config):
        """From the configuration on disk set the option dictionary, holding all metadata for processing."""

        for item in config:
            self.options[item] = config[item]

        if isinstance(self.options.get('subscribe_topics'), str):
            subscribe_topics = self.options.get('subscribe_topics').split(',')
            for item in subscribe_topics:
                if len(item) == 0:
                    subscribe_topics.remove(item)
            self.options['subscribe_topics'] = subscribe_topics

        if isinstance(self.options.get('publish_topics'), str):
            publish_topics = self.options.get('publish_topics').split(',')
            for item in publish_topics:
                if len(item) == 0:
                    publish_topics.remove(item)
            self.options['publish_topics'] = publish_topics

        unsubscribe = config.get('unsubscribe')
        if unsubscribe:
            for key in unsubscribe:
                self.options['unsubscribe_' + key] = unsubscribe[key]

    def signal_shutdown(self, *args, **kwargs):
        """Shutdown the Notifier process."""
        self.close()

    def run(self):
        """Run the Notifier."""
        while self.loop:
            try:
                msg = self.listener.output_queue.get(timeout=1)
                LOG.debug("Message: %s", str(msg.data))
            except Empty:
                continue
            else:
                if msg.type in [
                        'info',
                ]:
                    # No fires detected - no notification to send:
                    LOG.info(
                        "Message type info: No fires detected - no notification to send."
                    )
                    continue
                elif msg.type not in ['file', 'collection', 'dataset']:
                    LOG.debug("Message type not supported: %s", str(msg.type))
                    continue

                output_msg = self.notify_end_users(msg)
                if output_msg:
                    LOG.debug("Sending message: %s", str(output_msg))
                    self.publisher.send(str(output_msg))
                else:
                    LOG.debug("No message to send")

    def notify_end_users(self, msg):
        """Send notifications to configured end users (mail and text messages)."""
        LOG.debug("Start sending notifications to configured end users.")

        url = urlparse(msg.data.get('uri'))
        LOG.info('File path: %s', str(url.path))
        filename = url.path

        ffdata = read_geojson_data(filename)
        if not ffdata:
            return None

        platform_name = msg.data.get("platform_name")

        # Create the message(s).
        # Some recipients (typically via e-mail) should have the full message and an attachment
        # Other recipients (typically via SMS) should have several smaller messages and no attachment
        #
        full_message, sub_messages = self.create_message_content(
            ffdata['features'], "\n" + self.unsubscribe_text)

        username, password = self._get_mailserver_login_credentials()
        server = self._start_smtp_server(username, password, self.recipients)

        self._send_notifications_without_attachments(server, self.recipients,
                                                     sub_messages,
                                                     platform_name)
        self._send_notifications_with_attachments(server, self.recipients,
                                                  full_message, filename,
                                                  platform_name)

        return _create_output_message(msg, self.output_topic,
                                      self.recipients.recipients_all)

    def _send_notifications_with_attachments(self, server, recipients,
                                             full_message, filename,
                                             platform_name):
        """Send notifications with attachments."""

        notification = MIMEMultipart()
        notification['From'] = self.sender
        if platform_name:
            notification[
                'Subject'] = recipients.subject + ' Satellit = %s' % platform_name
        else:
            notification['Subject'] = recipients.subject

        if recipients.region_name:
            full_message = recipients.region_name + ":\n" + full_message

        notification.attach(MIMEText(full_message, 'plain', 'UTF-8'))
        LOG.debug("Length of message: %d", len(full_message))

        part = MIMEBase('application', "octet-stream")
        with open(filename, 'rb') as file:
            part.set_payload(file.read())
            encoders.encode_base64(part)
        part.add_header(
            'Content-Disposition',
            'attachment; filename="{}"'.format(Path(filename).name))
        notification.attach(part)

        for recip in recipients.recipients_with_attachment:
            notification['To'] = recip
            LOG.info("Send fire notification to %s", str(recip))
            LOG.debug("Subject: %s", str(recipients.subject))
            txt = notification.as_string()
            server.sendmail(self.sender, recip, txt)
            LOG.debug("Text sent: %s", txt)

        server.quit()

    def _send_notifications_without_attachments(self, server, recipients,
                                                sub_messages, platform_name):
        """Send notifications without attachments."""

        for submsg in sub_messages:
            notification = MIMEMultipart()
            notification['From'] = self.sender
            if platform_name:
                notification[
                    'Subject'] = recipients.subject + ' Satellit = %s' % platform_name
            else:
                notification['Subject'] = recipients.subject

            notification.attach(MIMEText(submsg, 'plain', 'UTF-8'))

            for recip in recipients.recipients_without_attachment:
                notification['To'] = recip
                LOG.info("Send fire notification to %s", str(recip))
                LOG.debug("Subject: %s", str(recipients.subject))
                txt = notification.as_string()
                server.sendmail(self.sender, recip, txt)
                LOG.debug("Text sent: %s", txt)

    def _get_mailserver_login_credentials(self):
        """Get the login credentials for the mail server."""
        host_secrets = self.secrets.authenticators(self.host)
        if host_secrets is None:
            LOG.error("Failed getting authentication secrets for host: %s",
                      self.host)
            raise IOError("Check out the details in the netrc file: %s",
                          self._netrcfile)

        username, _, password = host_secrets

        return username, password

    def _start_smtp_server(self, username, password, recipients):
        """Start the smtp server and loging."""
        server = smtplib.SMTP(self.smtp_server)
        server.starttls()
        server.ehlo(self.domain)
        server.rcpt(recipients.recipients_all)
        server.login(username, password)

        return server

    def create_message_content(self, gjson_features, unsubscr):
        """Create the full message string and the list of sub-messages."""
        full_msg = ''
        msg_list = []
        outstr = ''
        for idx, firespot in enumerate(gjson_features):
            if idx % self.max_number_of_fires_in_sms == 0 and idx > 0:
                full_msg = full_msg + outstr
                if len(unsubscr) > 0:
                    outstr = outstr + unsubscr

                LOG.debug('%d: Sub message = <%s>', idx, outstr)
                msg_list.append(outstr)
                outstr = ''

            lonlats = firespot['geometry']['coordinates']
            outstr = outstr + '%f N, %f E\n' % (lonlats[1], lonlats[0])
            if ('observation_time' in self.fire_data
                    and 'observation_time' in firespot['properties']):
                timestr = firespot['properties']['observation_time']
                LOG.debug("Time string: %s", str(timestr))
                try:
                    dtobj = datetime.fromisoformat(timestr)
                    # Python > 3.6
                except AttributeError:
                    dtobj = datetime.strptime(
                        timestr.split('.')[0], '%Y-%m-%dT%H:%M:%S')

                outstr = outstr + '  %s\n' % dtobj.strftime('%d %b %H:%M')

            for prop in firespot['properties']:
                if prop in self.fire_data and prop not in ['observation_time']:
                    if prop in ['power', 'Power']:
                        outstr = outstr + '  FRP: %7.3f MW\n' % (
                            firespot['properties'][prop])
                    else:
                        outstr = outstr + ' FRP: %s\n' % (str(
                            firespot['properties'][prop]))

            LOG.debug("Message length so far: %d", len(outstr))
            LOG.debug("Max number of fires in sub message: %d",
                      self.max_number_of_fires_in_sms)

        if len(outstr) > 0:
            if len(unsubscr) > 0:
                outstr = outstr + unsubscr
            LOG.debug('%d: Sub message = <%s>', idx, outstr)
            msg_list.append(outstr)

        full_msg = full_msg + outstr

        LOG.debug("Full message: <%s>", full_msg)
        LOG.debug("Sub-messages: <%s>", str(msg_list))

        return full_msg, msg_list

    def close(self):
        """Shutdown the Notifier process."""
        LOG.info('Terminating the End User Notifier process.')
        self.loop = False
        try:
            self.listener.stop()
        except Exception:
            LOG.exception("Couldn't stop listener.")
        if self.publisher:
            try:
                self.publisher.stop()
            except Exception:
                LOG.exception("Couldn't stop publisher.")
class WorldCompositeDaemon(object):

    logger = logging.getLogger(__name__)
    publish_topic = "/global/mosaic/{areaname}"
    nameservers = None
    port = 0
    aliases = None
    broadcast_interval = 2

    def __init__(self, config):
        self.config = config
        self.slots = {}
        # Structure of self.slots is:
        # slots = {datetime(): {composite: {"img": None,
        #                              "num": 0},
        #                       "timeout": None}}
        self._parse_settings()
        self._listener = ListenerContainer(topics=config["topics"])
        self._set_message_settings()
        self._publisher = \
            NoisyPublisher("WorldCompositePublisher",
                           port=self.port,
                           aliases=self.aliases,
                           broadcast_interval=self.broadcast_interval,
                           nameservers=self.nameservers)
        self._publisher.start()
        self._loop = False
        if isinstance(config["area_def"], str):
            self.adef = get_area_def(config["area_def"])
        else:
            self.adef = config["area_def"]

    def run(self):
        """Listen to messages and make global composites"""
        self._loop = True

        while self._loop:
            if self._check_timeouts_and_save():
                num = gc.collect()
                self.logger.debug("%d objects garbage collected", num)

            # Get new messages from the listener
            msg = None
            try:
                msg = self._listener.output_queue.get(True, 1)
            except KeyboardInterrupt:
                self._loop = False
                break
            except queue_empty:
                continue

            if msg is not None and msg.type == "file":
                self._handle_message(msg)

        self._listener.stop()
        self._publisher.stop()

    def _set_message_settings(self):
        """Set message settings from config"""
        if "message_settings" not in self.config:
            return

        self.publish_topic = \
            self.config["message_settings"].get("publish_topic",
                                                "/global/mosaic/{areaname}")
        self.nameservers = \
            self.config["message_settings"].get("nameservers", None)
        self.port = self.config["message_settings"].get("port", 0)
        self.aliases = self.config["message_settings"].get("aliases", None)
        self.broadcast_interval = \
            self.config["message_settings"].get("broadcast_interval", 2)

    def _handle_message(self, msg):
        """Insert file from the message to correct time slot and composite"""
        # Check which time should be used as basis for timeout:
        # - "message" = time of message sending
        # - "nominal_time" = time of satellite data, read from message data
        # - "receive" = current time when message is read from queue
        # Default to use slot nominal time
        timeout_epoch = self.config.get("timeout_epoch", "nominal_time")

        self.logger.debug("New message received: %s", str(msg.data))
        fname = msg.data["uri"]
        tslot = msg.data["nominal_time"]
        composite = msg.data["productname"]
        if tslot not in self.slots:
            self.slots[tslot] = {}
            self.logger.debug("Adding new timeslot: %s", str(tslot))
        if composite not in self.slots[tslot]:
            if timeout_epoch == "message":
                epoch = msg.time
            elif timeout_epoch == "receive":
                epoch = dt.datetime.utcnow()
            else:
                epoch = tslot
            self.slots[tslot][composite] = \
                {"fnames": [], "num": 0,
                 "timeout": epoch +
                 dt.timedelta(minutes=self.config["timeout"])}
            self.logger.debug("Adding new composite to slot %s: %s",
                              str(tslot), composite)
        self.logger.debug("Adding file to slot %s/%s: %s",
                          str(tslot), composite, fname)
        self.slots[tslot][composite]["fnames"].append(fname)
        self.slots[tslot][composite]["num"] += 1

    def _check_timeouts_and_save(self):
        """Check timeouts, save completed images, and cleanup slots."""
        # Number of expected images
        num_expected = self.config["num_expected"]

        # Check timeouts and completed composites
        check_time = dt.datetime.utcnow()

        saved = False
        empty_slots = []
        slots = self.slots.copy()
        for slot in slots:
            composites = tuple(slots[slot].keys())
            for composite in composites:
                if (check_time > slots[slot][composite]["timeout"] or
                        slots[slot][composite]["num"] == num_expected):
                    fnames = slots[slot][composite]["fnames"]
                    self._create_global_mosaic(fnames, slot, composite)
                    saved = True

            # Collect empty slots
            if len(slots[slot]) == 0:
                empty_slots.append(slot)

        for slot in empty_slots:
            self.logger.debug("Removing empty time slot: %s",
                              str(slot))
            del self.slots[slot]

        return saved

    def _parse_settings(self):
        """Parse static settings from config"""
        lon_limits = LON_LIMITS.copy()
        try:
            lon_limits.update(self.config["lon_limits"])
        except KeyError:
            pass
        except TypeError:
            lon_limits = None
        self.config["lon_limits"] = lon_limits

        # Get image save options
        try:
            save_kwargs = self.config["save_settings"]
        except KeyError:
            save_kwargs = {}
        self.config["save_settings"] = save_kwargs


    def _create_global_mosaic(self, fnames, slot, composite):
        """Create and save global mosaic."""
        self.logger.info("Building composite %s for slot %s",
                         composite, str(slot))
        scn = Scene()
        file_parts = self._get_fname_parts(slot, composite)
        fname_out = file_parts["uri"]

        img = self._get_existing_image(fname_out)

        self.logger.info("Creating composite")
        scn['img'] = create_world_composite(fnames,
                                            self.adef,
                                            self.config["lon_limits"],
                                            img=img,
                                            logger=self.logger)
        self.logger.info("Saving %s", fname_out)
        scn.save_dataset('img', filename=fname_out,
                         **self.config["save_settings"])
        self._send_message(file_parts)
        del self.slots[slot][composite]

    def _get_fname_parts(self, slot, composite):
        """Get filename part dictionary"""
        file_parts = {'composite': composite,
                      'nominal_time': slot,
                      'areaname': self.adef.area_id}

        fname_out = compose(self.config["out_pattern"],
                            file_parts)
        file_parts['uri'] = fname_out
        file_parts['uid'] = os.path.basename(fname_out)

        return file_parts

    def _get_existing_image(self, fname_out):
        """Read an existing image and return it.  If the image doesn't exist,
        return None"""
        # Check if we already have an image with this filename
        if os.path.exists(fname_out):
            img = read_image(fname_out, self.adef.area_id)
            self.logger.info("Existing image was read: %s", fname_out)
        else:
            img = None

        return img

    def _send_message(self, file_parts):
        """Send a message"""
        msg = Message(compose(self.publish_topic, file_parts),
                      "file", file_parts)
        self._publisher.send(str(msg))
        self.logger.info("Sending message: %s", str(msg))

    def stop(self):
        """Stop"""
        self.logger.info("Stopping WorldCompositor")
        self._listener.stop()
        self._publisher.stop()

    def set_logger(self, logger):
        """Set logger."""
        self.logger = logger
Beispiel #20
0
class Dispatcher(Thread):
    """Class that dispatches files."""

    def __init__(self, config_file, publish_port=None,
                 publish_nameservers=None):
        """Initialize dispatcher class."""
        super().__init__()
        self.config = None
        self.topics = None
        self.listener = None
        self._publish_port = publish_port
        self._publish_nameservers = publish_nameservers
        self.publisher = None
        self.host = socket.gethostname()
        self._create_publisher()
        self.loop = True
        self.config_handler = DispatchConfig(config_file, self.update_config)
        signal.signal(signal.SIGTERM, self.signal_shutdown)

    def _create_publisher(self):
        if self._publish_port is not None:
            self.publisher = NoisyPublisher("dispatcher", port=self._publish_port,
                                            nameservers=self._publish_nameservers)
            self.publisher.start()

    def signal_shutdown(self, *args, **kwargs):
        """Shutdown dispatcher."""
        self.close()

    def update_config(self, new_config):
        """Update configuration and reload listeners."""
        old_config = self.config
        topics = set()
        try:
            for _client, client_config in new_config.items():
                topics |= set(sum([item['topics'] for item in client_config['dispatch_configs']], []))
            if self.topics != topics:
                self.config = new_config
                self._create_listener(client_config, topics)
        except KeyError as err:
            logger.warning('Invalid config for %s, keeping the old one running: %s', _client, str(err))
            self.config = old_config

    def _create_listener(self, client_config, topics):
        if self.listener is not None:
            # FIXME: make sure to get the last messages though
            self.listener.stop()
        addresses = client_config.get('subscribe_addresses', None)
        nameserver = client_config.get('nameserver', 'localhost')
        services = client_config.get('subscribe_services', '')
        self.listener = ListenerContainer(topics=topics,
                                          addresses=addresses,
                                          nameserver=nameserver,
                                          services=services)
        self.topics = topics

    def run(self):
        """Run dispatcher."""
        while self.loop:
            try:
                msg = self.listener.output_queue.get(timeout=1)
            except Empty:
                continue
            if msg.type != 'file':
                continue
            self._dispatch_from_message(msg)

    def _dispatch_from_message(self, msg):
        destinations = self.get_destinations(msg)
        if destinations:
            # Check if the url are on another host:
            url = urlparse(msg.data['uri'])
            _check_file_locality(url, self.host)
            success = dispatch(url.path, destinations)
            if self.publisher:
                self._publish(msg, destinations, success)

    def _publish(self, msg, destinations, success):
        """Publish a message.

        The URI is replaced with the URI on the target server.

        """
        for url, _, client in destinations:
            if not success[client]:
                continue
            msg = self._get_new_message(msg, url, client)
            if msg is None:
                continue
            logger.debug('Publishing %s', str(msg))
            self.publisher.send(str(msg))

    def _get_new_message(self, msg, url, client):
        info = self._get_message_info(msg, url)
        topic = self._get_topic(client, info)
        if topic is None:
            return None
        return Message(topic, 'file', info)

    def _get_message_info(self, msg, url):
        info = msg.data.copy()
        info["uri"] = urlsplit(url).path
        return info

    def _get_topic(self, client, info):
        topic = self.config[client].get("publish_topic")
        if topic is None:
            logger.error("Publish topic not configured for '%s'", client)
            return None
        return compose(topic, info)

    def get_destinations(self, msg):
        """Get the destinations for this message."""
        destinations = []
        for client, config in self.config.items():
            for dispatch_config in config['dispatch_configs']:
                destination = self._get_destination(dispatch_config, msg, client)
                if destination is None:
                    continue
                destinations.append(destination)
        return destinations

    def _get_destination(self, dispatch_config, msg, client):
        destination = None
        if _has_correct_topic(dispatch_config, msg):
            if check_conditions(msg, dispatch_config):
                destination = self.create_dest_url(msg, client, dispatch_config)
        return destination

    def create_dest_url(self, msg, client, conf):
        """Create the destination URL and the connection parameters."""
        config = self.config[client].copy()
        _verify_filepattern(config, msg)
        config.update(conf)
        connection_parameters = config.get('connection_parameters')

        host = config['host']

        metadata = _get_metadata_with_aliases(msg, config)

        path = compose(
            os.path.join(config['directory'],
                         config['filepattern']),
            metadata)
        parts = urlsplit(host)
        host_path = urlunsplit((parts.scheme, parts.netloc, path, parts.query, parts.fragment))
        return host_path, connection_parameters, client

    def close(self):
        """Shutdown the dispatcher."""
        logger.info('Terminating dispatcher.')
        self.loop = False
        try:
            self.listener.stop()
        except Exception:
            logger.exception("Couldn't stop listener.")
        if self.publisher:
            try:
                self.publisher.stop()
            except Exception:
                logger.exception("Couldn't stop publisher.")
        try:
            self.config_handler.close()
        except Exception:
            logger.exception("Couldn't stop config handler.")
Beispiel #21
0
class EventHandler(ProcessEvent):
    """
    Event handler class for inotify.
     *topic* - topic of the published messages
     *posttroll_port* - port number to publish the messages on
     *filepattern* - filepattern for finding information from the filename
    """
    def __init__(self,
                 topic,
                 instrument,
                 config_item,
                 posttroll_port=0,
                 filepattern=None,
                 aliases=None,
                 tbus_orbit=False,
                 history=0,
                 granule_length=0,
                 custom_vars=None,
                 nameservers=[],
                 watchManager=None):
        super(EventHandler, self).__init__()

        self._pub = NoisyPublisher("trollstalker_" + config_item,
                                   posttroll_port,
                                   topic,
                                   nameservers=nameservers)
        self.pub = self._pub.start()
        self.topic = topic
        self.info = OrderedDict()
        if filepattern is None:
            filepattern = '{filename}'
        self.file_parser = Parser(filepattern)
        self.instrument = instrument
        self.aliases = aliases
        self.custom_vars = custom_vars
        self.tbus_orbit = tbus_orbit
        self.granule_length = granule_length
        self._deque = deque([], history)
        self._watchManager = watchManager
        self._watched_dirs = dict()

    def stop(self):
        '''Stop publisher.
        '''
        self._pub.stop()

    def __clean__(self):
        '''Clean instance attributes.
        '''
        self.info = OrderedDict()

    def process_IN_CLOSE_WRITE(self, event):
        """When a file is closed, process the associated event.
        """
        LOGGER.debug("trigger: IN_CLOSE_WRITE")
        self.process(event)

    def process_IN_CLOSE_NOWRITE(self, event):
        """When a nonwritable file is closed, process the associated event.
        """
        LOGGER.debug("trigger: IN_CREATE")
        self.process(event)

    def process_IN_MOVED_TO(self, event):
        """When a file is closed, process the associated event.
        """
        LOGGER.debug("trigger: IN_MOVED_TO")
        self.process(event)

    def process_IN_CREATE(self, event):
        """When a file is created, process the associated event.
        """
        LOGGER.debug("trigger: IN_CREATE")
        self.process(event)

    def process_IN_CLOSE_MODIFY(self, event):
        """When a file is modified and closed, process the associated event.
        """
        LOGGER.debug("trigger: IN_MODIFY")
        self.process(event)

    def process_IN_DELETE(self, event):
        """On delete."""
        if (event.mask & pyinotify.IN_ISDIR):
            try:
                try:
                    self._watchManager.rm_watch(
                        self._watched_dirs[event.pathname], quiet=False)
                except pyinotify.WatchManagerError:
                    #As the directory is deleted prior removing the watch will cause a error message
                    #from pyinotify. This is ok, so just pass the exception.
                    LOGGER.debug("Removed watch: {}".format(event.pathname))
                    pass
                finally:
                    del self._watched_dirs[event.pathname]
            except KeyError:
                LOGGER.warning(
                    "Dir {} not watched by inotify. Can not delete watch.".
                    format(event.pathname))
        return

    def process(self, event):
        '''Process the event'''
        # New file created and closed
        if not event.dir:
            LOGGER.debug("processing %s", event.pathname)
            # parse information and create self.info OrderedDict{}
            self.parse_file_info(event)
            if len(self.info) > 0:
                # Check if this file has been recently dealt with
                if event.pathname not in self._deque:
                    self._deque.append(event.pathname)
                    message = self.create_message()
                    LOGGER.info("Publishing message %s", str(message))
                    self.pub.send(str(message))
                else:
                    LOGGER.info("Data has been published recently, skipping.")
            self.__clean__()
        elif (event.mask & pyinotify.IN_ISDIR):
            tmask = (pyinotify.IN_CLOSE_WRITE | pyinotify.IN_MOVED_TO
                     | pyinotify.IN_CREATE | pyinotify.IN_DELETE)
            try:
                self._watched_dirs.update(
                    self._watchManager.add_watch(event.pathname, tmask))
                LOGGER.debug("Added watch on dir: {}".format(event.pathname))
            except AttributeError:
                LOGGER.error(
                    "No watchmanager given. Can not add watch on {}".format(
                        event.pathname))
                pass

    def create_message(self):
        """Create broadcasted message
        """
        return Message(self.topic, 'file', dict(self.info))

    def parse_file_info(self, event):
        '''Parse satellite and orbit information from the filename.
        Message is sent, if a matching filepattern is found.
        '''
        try:
            LOGGER.debug("filter: %s\t event: %s", self.file_parser.fmt,
                         event.pathname)
            pathname_join = os.path.basename(event.pathname)
            if 'origin_inotify_base_dir_skip_levels' in self.custom_vars:
                pathname_list = event.pathname.split('/')
                pathname_join = "/".join(pathname_list[int(
                    self.custom_vars['origin_inotify_base_dir_skip_levels']):])
            else:
                LOGGER.debug(
                    "No origin_inotify_base_dir_skip_levels in self.custom_vars"
                )

            self.info = OrderedDict()
            self.info.update(self.file_parser.parse(pathname_join))
            LOGGER.debug("Extracted: %s", str(self.info))
        except ValueError:
            # Filename didn't match pattern, so empty the info dict
            LOGGER.info("Couldn't extract any usefull information")
            self.info = OrderedDict()
        else:
            self.info['uri'] = event.pathname
            self.info['uid'] = os.path.basename(event.pathname)
            self.info['sensor'] = self.instrument.split(',')
            LOGGER.debug("self.info['sensor']: " + str(self.info['sensor']))

            if self.tbus_orbit and "orbit_number" in self.info:
                LOGGER.info("Changing orbit number by -1!")
                self.info["orbit_number"] -= 1

            # replace values with corresponding aliases, if any are given
            if self.aliases:
                info = self.info.copy()
                for key in info:
                    if key in self.aliases:
                        self.info['orig_' + key] = self.info[key]
                        self.info[key] = self.aliases[key][str(self.info[key])]

            # add start_time and end_time if not present
            try:
                base_time = self.info["time"]
            except KeyError:
                try:
                    base_time = self.info["nominal_time"]
                except KeyError:
                    base_time = self.info["start_time"]
            if "start_time" not in self.info:
                self.info["start_time"] = base_time
            if "start_date" in self.info:
                self.info["start_time"] = \
                    dt.datetime.combine(self.info["start_date"].date(),
                                        self.info["start_time"].time())
                if "end_date" not in self.info:
                    self.info["end_date"] = self.info["start_date"]
                del self.info["start_date"]
            if "end_date" in self.info:
                self.info["end_time"] = \
                    dt.datetime.combine(self.info["end_date"].date(),
                                        self.info["end_time"].time())
                del self.info["end_date"]
            if "end_time" not in self.info and self.granule_length > 0:
                self.info["end_time"] = base_time + \
                    dt.timedelta(seconds=self.granule_length)

            if "end_time" in self.info:
                while self.info["start_time"] > self.info["end_time"]:
                    self.info["end_time"] += dt.timedelta(days=1)

            if self.custom_vars is not None:
                for var_name in self.custom_vars:
                    var_pattern = self.custom_vars[var_name]
                    var_val = None
                    if '%' in var_pattern:
                        var_val = helper_functions.create_aligned_datetime_var(
                            var_pattern, self.info)
                    if var_val is None:
                        var_val = compose(var_pattern, self.info)
                    self.info[var_name] = var_val
Beispiel #22
0
class EventHandler(ProcessEvent):
    """
    Event handler class for inotify.
     *topic* - topic of the published messages
     *posttroll_port* - port number to publish the messages on
     *filepattern* - filepattern for finding information from the filename
    """
    def __init__(self,
                 topic,
                 instrument,
                 posttroll_port=0,
                 filepattern=None,
                 aliases=None,
                 tbus_orbit=False):
        super(EventHandler, self).__init__()

        self._pub = NoisyPublisher("trollstalker", posttroll_port, topic)
        self.pub = self._pub.start()
        self.topic = topic
        self.info = {}
        if filepattern is None:
            filepattern = '{filename}'
        self.file_parser = Parser(filepattern)
        self.instrument = instrument
        self.aliases = aliases
        self.tbus_orbit = tbus_orbit

    def stop(self):
        '''Stop publisher.
        '''
        self._pub.stop()

    def __clean__(self):
        '''Clean instance attributes.
        '''
        self.info = {}

    def process_IN_CLOSE_WRITE(self, event):
        """When a file is closed, process the associated event.
        """
        LOGGER.debug("trigger: IN_CLOSE_WRITE")
        self.process(event)

    def process_IN_CLOSE_NOWRITE(self, event):
        """When a nonwritable file is closed, process the associated event.
        """
        LOGGER.debug("trigger: IN_CREATE")
        self.process(event)

    def process_IN_MOVED_TO(self, event):
        """When a file is closed, process the associated event.
        """
        LOGGER.debug("trigger: IN_MOVED_TO")
        self.process(event)

    def process_IN_CREATE(self, event):
        """When a file is created, process the associated event.
        """
        LOGGER.debug("trigger: IN_CREATE")
        self.process(event)

    def process_IN_CLOSE_MODIFY(self, event):
        """When a file is modified and closed, process the associated event.
        """
        LOGGER.debug("trigger: IN_MODIFY")
        self.process(event)

    def process(self, event):
        '''Process the event'''
        # New file created and closed
        if not event.dir:
            LOGGER.debug("processing %s", event.pathname)
            # parse information and create self.info dict{}
            self.parse_file_info(event)
            if len(self.info) > 0:
                message = self.create_message()
                LOGGER.info("Publishing message %s" % str(message))
                self.pub.send(str(message))
            self.__clean__()

    def create_message(self):
        """Create broadcasted message
        """
        return Message(self.topic, 'file', self.info)

    def parse_file_info(self, event):
        '''Parse satellite and orbit information from the filename.
        Message is sent, if a matching filepattern is found.
        '''
        try:
            LOGGER.debug("filter: %s\t event: %s", self.file_parser.fmt,
                         event.pathname)
            self.info = self.file_parser.parse(os.path.basename(
                event.pathname))
            LOGGER.debug("Extracted: %s", str(self.info))
        except ValueError:
            # Filename didn't match pattern, so empty the info dict
            LOGGER.info("Couldn't extract any usefull information")
            self.info = {}
        else:
            self.info['uri'] = event.pathname
            self.info['uid'] = os.path.basename(event.pathname)
            self.info['sensor'] = self.instrument.split(',')
            LOGGER.debug("self.info['sensor']: " + str(self.info['sensor']))

            if self.tbus_orbit and "orbit_number" in self.info:
                LOGGER.info("Changing orbit number by -1!")
                self.info["orbit_number"] -= 1

            # replace values with corresponding aliases, if any are given
            if self.aliases:
                for key in self.info:
                    if key in self.aliases:
                        self.info[key] = self.aliases[key][str(self.info[key])]
Beispiel #23
0
class EventHandler(ProcessEvent):

    """
    Event handler class for inotify.
     *topic* - topic of the published messages
     *posttroll_port* - port number to publish the messages on
     *filepattern* - filepattern for finding information from the filename
    """

    def __init__(self, topic, instrument, posttroll_port=0, filepattern=None,
                 aliases=None, tbus_orbit=False):
        super(EventHandler, self).__init__()

        self._pub = NoisyPublisher("trollstalker", posttroll_port, topic)
        self.pub = self._pub.start()
        self.topic = topic
        self.info = {}
        if filepattern is None:
            filepattern = '{filename}'
        self.file_parser = Parser(filepattern)
        self.instrument = instrument
        self.aliases = aliases
        self.tbus_orbit = tbus_orbit

    def stop(self):
        '''Stop publisher.
        '''
        self._pub.stop()

    def __clean__(self):
        '''Clean instance attributes.
        '''
        self.info = {}

    def process_IN_CLOSE_WRITE(self, event):
        """When a file is closed, process the associated event.
        """
        LOGGER.debug("trigger: IN_CLOSE_WRITE")
        self.process(event)

    def process_IN_CLOSE_NOWRITE(self, event):
        """When a nonwritable file is closed, process the associated event.
        """
        LOGGER.debug("trigger: IN_CREATE")
        self.process(event)

    def process_IN_MOVED_TO(self, event):
        """When a file is closed, process the associated event.
        """
        LOGGER.debug("trigger: IN_MOVED_TO")
        self.process(event)

    def process_IN_CREATE(self, event):
        """When a file is created, process the associated event.
        """
        LOGGER.debug("trigger: IN_CREATE")
        self.process(event)

    def process_IN_CLOSE_MODIFY(self, event):
        """When a file is modified and closed, process the associated event.
        """
        LOGGER.debug("trigger: IN_MODIFY")
        self.process(event)

    def process(self, event):
        '''Process the event'''
        # New file created and closed
        if not event.dir:
            LOGGER.debug("processing %s", event.pathname)
            # parse information and create self.info dict{}
            self.parse_file_info(event)
            if len(self.info) > 0:
                message = self.create_message()
                LOGGER.info("Publishing message %s" % str(message))
                self.pub.send(str(message))
            self.__clean__()

    def create_message(self):
        """Create broadcasted message
        """
        return Message(self.topic, 'file', self.info)

    def parse_file_info(self, event):
        '''Parse satellite and orbit information from the filename.
        Message is sent, if a matching filepattern is found.
        '''
        try:
            LOGGER.debug("filter: %s\t event: %s",
                         self.file_parser.fmt, event.pathname)
            self.info = self.file_parser.parse(
                os.path.basename(event.pathname))
            LOGGER.debug("Extracted: %s", str(self.info))
        except ValueError:
            # Filename didn't match pattern, so empty the info dict
            LOGGER.info("Couldn't extract any usefull information")
            self.info = {}
        else:
            self.info['uri'] = event.pathname
            self.info['uid'] = os.path.basename(event.pathname)
            self.info['sensor'] = self.instrument.split(',')
            LOGGER.debug("self.info['sensor']: " + str(self.info['sensor']))

            if self.tbus_orbit and "orbit_number" in self.info:
                LOGGER.info("Changing orbit number by -1!")
                self.info["orbit_number"] -= 1

            # replace values with corresponding aliases, if any are given
            if self.aliases:
                for key in self.info:
                    if key in self.aliases:
                        self.info[key] = self.aliases[key][str(self.info[key])]
class FilePublisher(AbstractWatchDogProcessor):

    def __init__(self, config):
        self.config = config.copy()
        if isinstance(config["filepattern"], (str, unicode)):
            self.config["filepattern"] = [self.config["filepattern"]]

        self.parsers = [Parser(filepattern)
                        for filepattern in self.config["filepattern"]]

        self.aliases = parse_aliases(config)

        self.topic = self.config["topic"]
        self.tbus_orbit = self.config.get("tbus_orbit", False)
        LOGGER.debug("Looking for: %s", str([parser.globify() for parser in self.parsers]))
        AbstractWatchDogProcessor.__init__(self,
                                           [parser.globify()
                                            for parser in self.parsers],
                                           config.get("watcher",
                                                      "Observer"))

        self._pub = NoisyPublisher("trollstalker",
                                   int(self.config["posttroll_port"]),
                                   self.config["topic"])
        self.pub = None

        obsolete_keys = ["topic", "filepattern", "tbus_orbit",
                         "posttroll_port", "watch", "config_item", "configuration_file"]

        for key in self.config.keys():
            if key.startswith("alias_") or key in obsolete_keys:
                del self.config[key]

    def start(self):
        AbstractWatchDogProcessor.start(self)
        self.pub = self._pub.start()

    def stop(self):
        self._pub.stop()
        AbstractWatchDogProcessor.stop(self)

    def process(self, pathname):
        '''Process the event'''
        # New file created and closed
        LOGGER.debug("processing %s", pathname)
        # parse information and create self.info dict{}
        metadata = self.config.copy()
        success = False
        for parser in self.parsers:
            try:
                metadata.update(parser.parse(pathname))
                success = True
                break
            except ValueError:
                pass
            if not success:
                LOGGER.warning("Could not find a matching pattern for %s",
                               pathname)

        metadata['uri'] = pathname
        metadata['uid'] = os.path.basename(pathname)

        if self.tbus_orbit and "orbit_number" in metadata:
            LOGGER.info("Changing orbit number by -1!")
            metadata["orbit_number"] -= 1

        # replace values with corresponding aliases, if any are given
        if self.aliases:
            for key in metadata:
                if key in self.aliases:
                    metadata[key] = self.aliases[key][str(metadata[key])]

        message = Message(self.topic, 'file', metadata)
        LOGGER.info("Publishing message %s" % str(message))
        self.pub.send(str(message))