Exemplo n.º 1
0
    def emit(self,
             frame,
             compress=False,
             metadata={},
             snapshot_num=0,
             **kwargs):
        """

        :param compress:
        :param metadata:
        :param snapshot_num:
        :return:
        """
        if compress:
            raise NotImplementedError('Compress not implemented.')

        tag = frame.metadata.get('namespace', '')
        timestamp = frame.metadata.get('timestamp', '')
        timestamp = time.mktime(
            time.strptime(timestamp[:-5], '%Y-%m-%dT%H:%M:%S'))

        if self.emit_per_line:
            for json_item in self.get_json_item(frame):
                call_with_retries(self._emit,
                                  max_retries=self.max_retries,
                                  _args=tuple((tag, timestamp, json_item)))
        else:
            call_with_retries(self.emit_frame_atonce,
                              max_retries=self.max_retries,
                              _args=tuple((tag, timestamp, frame)))
    def __init__(self, url, timeout=1, max_retries=10,
                 emit_per_line=False):
        BaseEmitter.__init__(self, url,
                             timeout=timeout,
                             max_retries=max_retries,
                             emit_per_line=emit_per_line)

        try:
            broker, topic = url[len('kafka://'):].split('/')
        except (KeyError, TypeError) as exc:
            logger.warn('Can not parse the url provided.')
            raise exc

        self.client = None
        self.producer = None

        call_with_retries(self.connect_to_broker,
                          max_retries=self.max_retries,
                          _args=tuple((broker, topic)))
Exemplo n.º 3
0
    def init(self, url, timeout=1, max_retries=10, emit_format='csv'):
        IEmitter.init(self, url,
                      timeout=timeout,
                      max_retries=max_retries,
                      emit_format=emit_format)

        if emit_format == 'json':
            self.emit_per_line = True

        try:
            broker, topic = url[len('kafka://'):].split('/')
        except (KeyError, TypeError) as exc:
            logger.warn('Can not parse the url provided.')
            raise exc

        self.client = None
        self.producer = None

        call_with_retries(self.connect_to_broker,
                          max_retries=self.max_retries,
                          _args=tuple((broker, topic)))
Exemplo n.º 4
0
    def init(self, url, timeout=1, max_retries=5, emit_format='fluentd'):
        self.url = url
        self.timeout = timeout
        self.max_retries = max_retries
        self.emit_per_line = True

        if emit_format != 'json':
            raise EmitterUnsupportedFormat('Not supported: %s' % emit_format)

        try:
            # assumption URL fot fluentd engine is of form fuentd://IP:PORT
            host, port = url[len('fluentd://'):].split(':')
        except (KeyError, TypeError) as exc:
            logger.warn('Can not parse the url provided.')
            raise exc

        self.fluentd_sender = None

        call_with_retries(self.connect_to_fluentd_engine,
                          max_retries=self.max_retries,
                          _args=tuple((host, int(port))))
Exemplo n.º 5
0
    def emit(self, frame, compress=False,
             metadata={}, snapshot_num=0, **kwargs):
        """

        :param compress:
        :param metadata:
        :param snapshot_num:
        :return:
        """
        iostream = self.format(frame)
        if compress:
            raise NotImplementedError('Compress not implemented.')

        if self.emit_per_line:
            iostream.seek(0)
            for line in iostream.readlines():
                call_with_retries(lambda io: self.producer.produce([line]),
                                  max_retries=self.max_retries,
                                  _args=tuple([iostream]))
        else:
            call_with_retries(
                lambda io: self.producer.produce([io.getvalue()]),
                max_retries=self.max_retries,
                _args=tuple([iostream]))
    def emit(self, iostream, compress=False,
             metadata={}, snapshot_num=0):
        """

        :param iostream: a CStringIO used to buffer the formatted features.
        :param compress:
        :param metadata:
        :param snapshot_num:
        :return:
        """
        if compress:
            raise NotImplementedError('Compress not implemented.')

        if self.emit_per_line:
            iostream.seek(0)
            for line in iostream.readlines():
                call_with_retries(lambda io: self.producer.produce([line]),
                                  max_retries=self.max_retries,
                                  _args=tuple([iostream]))
        else:
            call_with_retries(
                lambda io: self.producer.produce([io.getvalue()]),
                max_retries=self.max_retries,
                _args=tuple([iostream]))