예제 #1
0
 def _publish(self, metadata):
     if "dataset" in metadata:
         msg = pmessage.Message(self._subject, "dataset", metadata)
     else:
         msg = pmessage.Message(self._subject, "collection", metadata)
     logger.info("Sending: %s", str(msg))
     self._publisher.send(str(msg))
예제 #2
0
    def terminator(self, metadata):
        """Dummy terminator function.
        """
        sorted_mda = sorted(metadata, key=lambda x: x["start_time"])

        mda = metadata[0].copy()

        mda['end_time'] = sorted_mda[-1]['end_time']
        mda['collection_area_id'] = sorted_mda[-1]['collection_area_id']
        mda['collection'] = []

        is_correct = False
        for meta in sorted_mda:
            new_mda = {}
            if "uri" in meta or 'dataset' in meta:
                is_correct = True
            for key in ['dataset', 'uri', 'uid']:
                if key in meta:
                    new_mda[key] = meta[key]
                new_mda['start_time'] = meta['start_time']
                new_mda['end_time'] = meta['end_time']
            mda['collection'].append(new_mda)

        for key in ['dataset', 'uri', 'uid']:
            if key in mda:
                del mda[key]

        if is_correct:
            msg = message.Message("/placeholder", "collection",
                                  mda)
            self.logger.info("Collection ready %s", str(msg))
            self.output_queue.put(msg)
        else:
            self.logger.warning("Malformed metadata, no key: %s", "uri")
예제 #3
0
def terminator(metadata):
    """Dummy terminator function.
    """
    sorted_mda = sorted(metadata, key=lambda x: x["start_time"])

    mda = metadata[0].copy()

    subject = "/".join(("", mda["format"], mda["data_processing_level"], ''))

    mda['end_time'] = sorted_mda[-1]['end_time']

    mda['collection'] = []

    for meta in sorted_mda:
        new_mda = {}
        for key in ['dataset', 'uri', 'uid']:
            if key in meta:
                new_mda[key] = meta[key]
            new_mda['start_time'] = meta['start_time']
            new_mda['end_time'] = meta['end_time']
        mda['collection'].append(new_mda)

    for key in ['dataset', 'uri', 'uid']:
        if key in mda:
            del mda[key]

    msg = message.Message(subject, "collection", mda)
    logger.info("sending %s", str(msg))
    pub.send(str(msg))
예제 #4
0
    def _publish(self, time_slot, missing_files_check=True):
        """Publish file dataset and reinitialize gatherer."""

        data = self.slots[time_slot]

        # Diagnostic logging about delayed ...
        delayed_files = data['delayed_files']
        if len(delayed_files) > 0:
            file_str = ''
            for key in delayed_files:
                file_str += "%s %f seconds, " % (key, delayed_files[key])
            self.logger.warning("Files received late: %s",
                                file_str.strip(', '))

        if missing_files_check:
            # and missing files
            missing_files = \
                data['all_files'].difference(data['received_files'])
            if len(missing_files) > 0:
                self.logger.warning("Missing files: %s",
                                    ', '.join(missing_files))

        # Although we're not publishing a message, generate one anyway
        # for compatibility
        msg = message.Message("/placeholder", "dataset", data['metadata'])
        self.logger.info("Forwarding: %s", str(msg))
        self.output_queue.put(msg)
    def _publish(self, time_slot, missing_files_check=True):
        """Publish file dataset and reinitialize gatherer."""

        data = self.slots[time_slot]

        # Diagnostic logging about delayed ...
        delayed_files = data['delayed_files']
        if len(delayed_files) > 0:
            file_str = ''
            for key in delayed_files:
                file_str += "%s %f seconds, " % (key, delayed_files[key])
            self.logger.warning("Files received late: %s",
                                file_str.strip(', '))

        if missing_files_check:
            # and missing files
            missing_files = data['all_files'].difference(
                data['received_files'])
            if len(missing_files) > 0:
                self.logger.warning("Missing files: %s",
                                    ', '.join(missing_files))

        # Remove tags that are not necessary for datasets
        for tag in REMOVE_TAGS:
            try:
                del data['metadata'][tag]
            except KeyError:
                pass

        msg = message.Message(self._subject, "dataset", data['metadata'])
        self.logger.info("Sending: %s", str(msg))
        self._publisher.send(str(msg))
예제 #6
0
 def __init__(self, name, address, data_type, interval=2, nameservers=None):
     msg = message.Message("/address/%s" % name, "info", {
         "URI": address,
         "service": data_type
     }).encode()
     MessageBroadcaster.__init__(self, msg, broadcast_port, interval,
                                 nameservers)
예제 #7
0
 def _add_existing_files_to_slot(self, slot, fnames, message):
     for fname in fnames:
         meta = {
             "uid": os.path.basename(fname),
             "uri": fname,
             "sensor": message._posttroll_message.data["sensor"]
         }
         msg = self.message_from_posttroll(
             pmessage.Message(message._posttroll_message.subject, "file",
                              meta))
         slot.add_file(msg)
예제 #8
0
    def publish_collection(self, metadata):
        """Terminate the gathering."""
        subject = self._get_topic(metadata[0])
        mda = _merge_metadata(metadata)

        if mda:
            msg = message.Message(subject, "collection", mda)
            logger.info("sending %s", str(msg))
            self.publisher.send(str(msg))
        else:
            logger.warning("Malformed metadata, no key: %s", "uri")
예제 #9
0
def terminator(metadata, publish_topic=None):
    """Dummy terminator function.
    """
    sorted_mda = sorted(metadata, key=lambda x: x["start_time"])

    mda = metadata[0].copy()

    if publish_topic is not None:
        LOGGER.info("Composing topic.")
        subject = compose(publish_topic, mda)
    else:
        LOGGER.info("Using default topic.")
        subject = "/".join(
            ("", mda["format"], mda["data_processing_level"], ''))

    mda['start_time'] = sorted_mda[0]['start_time']
    mda['end_time'] = sorted_mda[-1]['end_time']
    mda['collection_area_id'] = sorted_mda[-1]['collection_area_id']
    mda['collection'] = []

    is_correct = False
    for meta in sorted_mda:
        new_mda = {}
        if "uri" in meta or 'dataset' in meta:
            is_correct = True
        for key in ['dataset', 'uri', 'uid']:
            if key in meta:
                new_mda[key] = meta[key]
            new_mda['start_time'] = meta['start_time']
            new_mda['end_time'] = meta['end_time']
        mda['collection'].append(new_mda)

    for key in ['dataset', 'uri', 'uid']:
        if key in mda:
            del mda[key]

    if is_correct:
        msg = message.Message(subject, "collection", mda)
        LOGGER.info("sending %s", str(msg))
        PUB.send(str(msg))
    else:
        LOGGER.warning("Malformed metadata, no key: %s", "uri")
예제 #10
0
    def _publish(self):
        """Publish file dataset and reinitialize gatherer."""

        # Diagnostic logging about delayed ...
        if len(self.delayed_files) > 0:
            file_str = ''
            for key in self.delayed_files:
                file_str += "%s %f seconds, " % (key, self.delayed_files[key])
            self.logger.warning("Files received late: %s",
                                file_str.strip(', '))
        # and missing files
        missing_files = self.all_files.difference(self.received_files)
        if len(missing_files) > 0:
            self.logger.warning("Missing files: %s", ', '.join(missing_files))

        msg = message.Message(self._subject, "dataset", self.metadata)
        self.logger.info("Sending: %s", str(msg))
        self._publisher.send(str(msg))

        self._clear_data()
예제 #11
0
 def __init__(self, name, address, interval, nameservers):
     msg = message.Message("/address/%s" % name, "info",
                           {"URI": "%s:%d" % address}).encode()
     MessageBroadcaster.__init__(self, msg, broadcast_port, interval,
                                 nameservers)
    def test_WorldCompositeDaemon(self):
        """Test WorldCompositeDaemon"""

        # Test incoming message handling and saving

        # Epoch: message sending time
        config = {
            "topics": ["/test"],
            "area_def": ADEF,
            "timeout_epoch": "message",
            "timeout": 45,
            "num_expected": 5,
            "out_pattern": os.path.join(THIS_DIR, "data", "test_out.png")
        }

        comp = gm.WorldCompositeDaemon(config)

        # There should be no slots
        self.assertEqual(len(comp.slots), 0)

        for i in range(len(self.sat_fnames)):
            msg = message.Message(
                "/test", "file", {
                    "uri": self.sat_fnames[i],
                    "nominal_time": self.tslot,
                    "productname": "wv"
                })
            epoch = msg.time
            comp._handle_message(msg)

            # Number of slots
            self.assertEqual(len(comp.slots), 1)

            # Number of composites
            self.assertEqual(len(comp.slots[self.tslot]), 1)

            # Number of files
            self.assertEqual(comp.slots[self.tslot]["wv"]["num"], i + 1)

            # Timeout
            diff = (comp.slots[self.tslot]["wv"]["timeout"] -
                    (epoch + dt.timedelta(minutes=config["timeout"])))
            self.assertAlmostEqual(diff.total_seconds(), 0.0, places=2)

            comp._check_timeouts_and_save()

            # Saving should not happen before all the images are received
            if i < 4:
                self.assertEqual(comp.slots[self.tslot]["wv"]["num"], i + 1)
            else:
                # After fifth image the composite should be saved and
                # all composites and slots removed
                self.assertEqual(len(comp.slots), 0)
                self.assertTrue(os.path.exists(config["out_pattern"]))
                # Remove the file
                os.remove(config["out_pattern"])

        comp.stop()

        # Epoch: file nominal time
        config = {
            "topics": ["/test"],
            "area_def": ADEF,
            "timeout_epoch": "nominal_time",
            "timeout": 45,
            "num_expected": 5,
            "out_pattern": os.path.join(THIS_DIR, "data", "test_out.png")
        }

        comp = gm.WorldCompositeDaemon(config)

        for i in range(len(self.sat_fnames)):
            msg = message.Message(
                "/test", "file", {
                    "uri": self.sat_fnames[i],
                    "nominal_time": self.tslot,
                    "productname": "wv"
                })
            epoch = self.tslot
            comp._handle_message(msg)

            # Number of slots
            self.assertEqual(len(comp.slots), 1)

            # Number of files should be one every time
            self.assertEqual(comp.slots[self.tslot]["wv"]["num"], 1)

            # Timeout
            self.assertEqual(comp.slots[self.tslot]["wv"]["timeout"],
                             (epoch + dt.timedelta(minutes=config["timeout"])))

            # Output file should be present after the first run
            if i > 0:
                self.assertTrue(os.path.exists(config["out_pattern"]))

            comp._check_timeouts_and_save()

            # There shouldn't be any slots now
            self.assertEqual(len(comp.slots), 0)

        # Remove the file
        os.remove(config["out_pattern"])

        # Stop compositor daemon
        comp.stop()