Example #1
0
    def poll(self, timeout: float = -1) -> (str, dict):
        """ Consume one message from Fink server

        Parameters
        ----------
        timeout: float, optional
            maximum time to block waiting for a message
            if not set default is None i.e. wait indefinitely

        Returns
        ----------
        (topic, alert, key): tuple(str, dict, str)
            returns (None, None, None) on timeout
        """
        msg = self._consumer.poll(timeout)
        if msg is None:
            return None, None, None

        # msg.error() returns None or KafkaError
        if msg.error():
            error_message = """
            Error: {} topic: {}[{}] at offset: {} with key: {}
            """.format(msg.error(), msg.topic(), msg.partition(), msg.offset(),
                       str(msg.key()))
            raise AlertError(error_message)

        topic = msg.topic()

        # decode the key if it is bytes
        key = msg.key()
        if type(key) == bytes:
            key = key.decode('utf8')

        if key is None:
            # backward compatibility
            key = '1.0_0.4.3'

        # Get the schema
        if self.schema_path is not None:
            _parsed_schema = _get_alert_schema(schema_path=self.schema_path)
        elif key is not None:
            _parsed_schema = _get_alert_schema(key=key)
        else:
            msg = """
            The message cannot be decoded as there is no key (None). Alternatively
            specify manually the schema path when instantiating ``AlertConsumer`` (or from fink_consumer).
            """
            raise NotImplementedError(msg)

        self._parsed_schema = _parsed_schema
        avro_alert = io.BytesIO(msg.value())
        alert = _decode_avro_alert(avro_alert, _parsed_schema)

        return topic, alert, key
Example #2
0
    def consume(self, num_alerts: int = 1, timeout: float = -1) -> list:
        """ Consume and return list of messages

        Parameters
        ----------
        num_messages: int
            maximum number of messages to return
        timeout: float
            maximum time to block waiting for messages
            if not set default is None i.e. wait indefinitely

        Returns
        ----------
        list: [tuple(str, dict, str)]
            list of topic, alert, key
            returns an empty list on timeout
        """
        alerts = []
        msg_list = self._consumer.consume(num_alerts, timeout)

        for msg in msg_list:
            topic = msg.topic()

            # decode the key if it is bytes
            key = msg.key()
            if type(key) == bytes:
                key = key.decode('utf8')

            if key is None:
                # backward compatibility
                key = '1.0_0.4.3'

            # Get the schema
            if self.schema_path is not None:
                _parsed_schema = _get_alert_schema(
                    schema_path=self.schema_path)
            elif key is not None:
                _parsed_schema = _get_alert_schema(key=key)
            else:
                msg = """
                The message cannot be decoded as there is no key (None). Either specify a
                key when writing the alert, or specify manually the schema path when
                instantiating ``AlertConsumer`` (or from fink_consumer).
                """
                raise NotImplementedError(msg)
            avro_alert = io.BytesIO(msg.value())
            alert = _decode_avro_alert(avro_alert, _parsed_schema)

            alerts.append((topic, alert, key))

        return alerts
Example #3
0
    def poll_and_write(self,
                       outdir: str,
                       timeout: float = -1,
                       overwrite: bool = False) -> (str, dict):
        """ Consume one message from Fink server, save alert on disk and
        return (topic, alert, key)

        Parameters
        ----------
        outdir: str
            Folder to store the alert. It must exists.
        timeout: float, optional
            maximum time to block waiting for messages
            if not set default is None i.e. wait indefinitely
        overwrite: bool, optional
            If True, allow an existing alert to be overwritten.
            Default is False.

        Returns
        ----------
        (topic, alert): tuple(str, dict)
            returns (None, None) on timeout

        """
        topic, alert, key = self.poll(timeout)

        if topic is not None:
            # Get the schema
            if self.schema_path is not None:
                _parsed_schema = _get_alert_schema(
                    schema_path=self.schema_path)
            elif key is not None:
                _parsed_schema = _get_alert_schema(key=key)
            else:
                msg = """
                The message cannot be written as there is no key (None). Either specify a
                key when writing the alert, or specify manually the schema path when
                instantiating ``AlertConsumer`` (or from fink_consumer).
                """
                raise NotImplementedError(msg)
            # print('Alert written at {}'.format(outdir))
            write_alert(alert, _parsed_schema, outdir, overwrite=overwrite)

        return topic, alert, key
Example #4
0
    def __init__(self, topics: list, config: dict, schema=None):
        """Creates an instance of `AlertConsumer`

        Parameters
        ----------
        topics : list of str
            list of topics to subscribe
        config: dict
            Dictionary of configurations. Allowed keys are:
            username: str
                username for API access
            password: str
                password for API access
            group_id: str
                group.id for Kafka consumer
            bootstrap.servers: str, optional
                Kafka servers to connect to
        """
        self._topics = topics
        self._kafka_config = _get_kafka_config(config)
        self._parsed_schema = _get_alert_schema(schema_path=schema)
        self._consumer = confluent_kafka.Consumer(self._kafka_config)
        self._consumer.subscribe(self._topics)