Beispiel #1
0
def init_global_fsender(tag, host, port):
    from fluent.sender import FluentSender

    global fsender
    if fsender is None:
        fsender = FluentSender(tag, host, port)
        linfo("init_global_fsender")
 def __init__(
         self,
         mongos,
         mongor_ssl=False,
         cursor_timeout=600,  #10 Minutes
         fluentd="localhost:24224",
         logtag="rabidmongoose"):
     self.connections = {}
     self.cursor_timeout = cursor_timeout
     self.mongor_uri = "".join(mongos)
     self.mongor_host = self.mongor_uri[10:].split(":")[0]
     self.mongor_port = int(self.mongor_uri[10:].split(":")[1])
     self.mongor_ssl = mongor_ssl
     LOGGER.debug('INIT - URI: %s, Host: %s, Port: %s' %
                  (self.mongor_uri, self.mongor_host, self.mongor_port))
     if HAS_FLUENT:
         self.sender = FluentSender(host=fluentd.split(":")[0],
                                    port=int(fluentd.split(":")[1]),
                                    tag=logtag)
         LOGGER.addHandler(
             FluentHandler("%s.debug" % (logtag),
                           host=fluentd.split(":")[0],
                           port=int(fluentd.split(":")[1])))
     else:
         self.sender = FakeFluentSender()
Beispiel #3
0
    def __init__(self, tag, pdir, stream_cfg, send_term,
                 max_send_fail, echo, encoding, lines_on_start,
                 max_between_data):
        """
        Trailer common class initialization

        Args:
            tag: Classification tag for Fluentd
            pdir: Position file directory
            stream_cfg: Log streaming service config (Fluentd / Kinesis)
            strem_cfg: Log transmission time interval
            max_send_fail: Maximum number of retries in case of transmission
                failure
            echo: Whether to save sent messages
            encoding: Original message encoding
            lines_on_start: How many lines of existing log will be resent
                at startup (for debugging)
            max_between_data: When the service is restarted, unsent logs
                smaller than this amount are sent.
        """
        super(BaseTailer, self).__init__()
        self.fsender = self.kclient = None
        self.ksent_seqn = self.ksent_shid = None
        self.linfo("__init__", "max_send_fail: '{}'".format(max_send_fail))

        self.last_get_hinfo = 0
        self.sname, self.saddr = self.get_host_info()
        tag = "{}.{}".format(self.sname.lower() if self.sname is not None else
                             None, tag)
        self.linfo(1, "tag: '{}'".format(tag))
        self.tag = tag
        self.send_term = send_term
        self.last_send_try = 0
        self.last_update = 0
        self.kpk_cnt = 0  # count for kinesis partition key
        self.pdir = pdir

        max_send_fail = max_send_fail if max_send_fail else MAX_SEND_FAIL
        tstc = type(stream_cfg)
        if tstc == FluentCfg:
            host, port = stream_cfg
            self.fsender = FluentSender(tag, host, port,
                                        max_send_fail=max_send_fail)
        elif tstc == KinesisCfg:
            stream_name, region, access_key, secret_key = stream_cfg
            self.kstream_name = stream_name
            self.ldebug('query_aws_client kinesis {}'.format(region))
            self.kclient = query_aws_client('kinesis', region, access_key,
                                            secret_key)
            self.kagg = aggregator.RecordAggregator()

        self.send_retry = 0
        self.echo_file = StringIO() if echo else None
        self.cache_sent_pos = {}
        self.encoding = encoding
        self.lines_on_start = lines_on_start if lines_on_start else 0
        self.max_between_data = max_between_data if max_between_data else\
            MAX_BETWEEN_DATA
Beispiel #4
0
async def test_fluentd_forward_source_events(elasticsearch, http_client,
                                             docker_ip):
    """FluentD forwards "native" records to ElasticSearch."""

    # Grab the current date (we'll need to use it several times and we don't
    # want to get flakey results when we execute the tests around midnight).
    today = datetime.date.today()

    # Clear the index.
    url = urljoin(elasticsearch, 'events-%s' % (today.isoformat(), ))
    async with http_client.delete(url) as resp:
        # May get 404 if we're the first test to run, but we'll get 200 if we
        # successfully delete the index.
        assert resp.status in (200, 404)

    # Post an event with a tag that matches `events.**` rule in `fluent.conf`.
    fluent = FluentSender('events.test', host=docker_ip, port=24224)
    fluent.emit('an-event', {
        'some-field': 'some-value',
    })

    # Wait until the record shows up in search results.
    await asyncio.sleep(datetime.timedelta(seconds=3).total_seconds())

    # Grab the record.
    url = urljoin(
        elasticsearch, 'events-%04d-%02d-%02d/events/_search' % (
            today.year,
            today.month,
            today.day,
        ))
    async with http_client.get(url) as resp:
        assert resp.status == 200
        body = await resp.json()
    assert body['hits']['total'] == 1
    assert body['hits']['hits'][0]['_source'] == {
        'service': 'test',
        'event': 'an-event',
        'some-field': 'some-value',
        '@timestamp': mock.ANY,
    }

    # Grab index stats, check that index exists and that we have our data.
    url = urljoin(elasticsearch, '_cat/indices?v')
    async with http_client.get(url) as resp:
        assert resp.status == 200
        body = await resp.text()
    print('STATS:')
    print(body)
    print('------')
    lines = body.split('\n')
    lines = [line.split() for line in lines if line.strip()]
    assert len(lines) >= 2
    stats = [dict(zip(lines[0], line)) for line in lines[1:]]
    stats = {index['index']: index for index in stats}
    index = stats.pop('events-%04d-%02d-%02d' %
                      (today.year, today.month, today.day))
    assert int(index['docs.count']) >= 1
 def __init__(self, name, host='localhost', port=24224):
     if FluentSender is None:
         raise Exception(
             "Attempted to use FluentTraceLogger with not Fluent dependency available."
         )
     self.lock = threading.Lock()
     self.thread_local = threading.local()
     self.name = name
     self.sender = FluentSender(self.name, host=host, port=port)
    def connect(self, tag=None):
        """
        Connect to a fluentd logger

        """

        if not tag:
            tag = self.tag

        timeout = float(self.timeout)
        port = int(self.port)
        buffer_max = int(self.buffer_max)

        self.sender = FluentSender(tag, self.host, port, buffer_max, timeout)
Beispiel #7
0
def main():

    parser = argparse.ArgumentParser()
    parser.add_argument("-i",
                        "--ident",
                        help="Unique identifier of the honeypot.")
    parser.add_argument("-n", "--hostname", help="Hostname of the honeypot.")
    parser.add_argument("-t", "--type", help="Honeypot type.")
    parser.add_argument("-a", "--address", help="IP address of the honeypot.")
    parser.add_argument("--fluent-host",
                        default="fluentbit",
                        help="Hostname of Fluent Bit server")
    parser.add_argument("--fluent-port",
                        type=int,
                        default=24284,
                        help="Port of Fluent Bit server")
    parser.add_argument("--fluent-app",
                        default="stingar",
                        help="Application name for Fluent Bit server")
    parser.add_argument("--tags", help="Comma separated tags for honeypot.")
    args = parser.parse_args()

    dt = datetime.datetime.now()

    data = {
        "identifier": args.ident,
        "hostname": args.hostname,
        "honeypot": args.type,
        "ip": args.address,
        "created": dt.strftime("%Y/%m/%d %H:%M:%S"),
        "updated": dt.strftime("%Y/%m/%d %H:%M:%S"),
        "tags": args.tags
    }

    sender = FluentSender(args.fluent_app,
                          host=args.fluent_host,
                          port=args.fluent_port)

    sender.emit(SENSOR_TOPIC, data)
    return 0
Beispiel #8
0
    def _get_sender(self, tag, host, port, bufmax, timeout):
        """
        Get the Fluentd sender for the given arguments.

        Arguments:
        tag     --  The base tag of the log event, representing the
                    application.
        host    --  The host name or IP address of the fluentd server.
        port    --  The port number the fluentd server is listeneing on.
        bufmax  --  The size of the buffer.
        timeout --  The timeout

        Returns:
        A FluentSender with the given configuration.
        """
        sender = None
        key = "%s%s%d%d%f" % (tag, host, port, bufmax, timeout)
        if key in self._senders:
            sender = self._senders[key]
        else:
            sender = FluentSender(tag, host, port, bufmax, timeout)
            self._senders[key] = sender
        return sender
Beispiel #9
0
    def __init__(self,
                 logsense_token=None,
                 tag='python',
                 meta={},
                 logsense_host=None,
                 logsense_port=None,
                 verbose=False,
                 nanosecond_precision=False):
        internal_logger = logging.getLogger('logsense.sender')
        self._logsense_token = logsense_token

        if logsense_host:
            self._logsense_host = logsense_host
        else:
            self._logsense_host = getenv('LOGSENSE_HOST', 'logs.logsense.com')

        if logsense_port:
            self._logsense_port = logsense_port
        else:
            self._logsense_port = int(getenv('LOGSENSE_PORT', '32714'))

        if self._logsense_token is None:
            self._logger = None
            print("LOGSENSE_TOKEN not set - skipping handler")
        else:
            self._verbose = verbose
            self._logger = FluentSender(
                tag,
                host=self._logsense_host,
                ssl_server_hostname=self._logsense_host,
                port=self._logsense_port,
                use_ssl=True,
                verbose=self._verbose)

            self._base_dict = self.update_meta(meta)

        self.nanosecond_precision = nanosecond_precision
 def __init__( self, name, host='localhost', port=24224 ):
     assert FluentSender is not None, FLUENT_IMPORT_MESSAGE
     self.lock = threading.Lock()
     self.thread_local = threading.local()
     self.name = name
     self.sender = FluentSender( self.name, host=host, port=port )
Beispiel #11
0
 def __init__(self, name, host='localhost', port=24224):
     self.lock = threading.Lock()
     self.thread_local = threading.local()
     self.name = name
     self.sender = FluentSender(self.name, host=host, port=port)
Beispiel #12
0
def main():
    parser = argparse.ArgumentParser(
        add_help=False,
        formatter_class=argparse.ArgumentDefaultsHelpFormatter
    )
    parser.add_argument(
        '--help',
        action='help',
        default=argparse.SUPPRESS,
        help=argparse._('show this help message and exit')
    )
    parser.add_argument(
        '-v', '--verbose', action='store_true', help='Display debug messages'
    )

    parser.add_argument('-h', '--host', type=str, default='127.0.0.1',
                        help='Listen host')
    parser.add_argument('-p', '--port', type=int, default=8080,
                        help='Listen port')

    parser.add_argument('-w', '--workers', type=int,
                        default=max(1, multiprocessing.cpu_count() - 1),
                        help='Number of workers')

    parser.add_argument('--sentry-dsn', type=str, help='Sentry DSN')

    parser.add_argument('--redis-host', type=str, default='127.0.0.1',
                        help='Redis host')
    parser.add_argument('--redis-port', type=str, default=6379,
                        help='Redis port')
    parser.add_argument('--redis-password', type=str, default=None,
                        help='Redis password')

    parser.add_argument('--disable-fluent', action='store_true', default=False,
                        help='If set, do not send logs to fluent')
    parser.add_argument('--fluent-host', type=str, default='127.0.0.1',
                        help='Fluentd host')
    parser.add_argument('--fluent-port', type=int, default=24224,
                        help='Fluentd port')

    parser.add_argument('--auth-enabled', action='store_true', default=False,
                        help='Enable authentication')
    parser.add_argument('--api-url', type=str, default='http://127.0.0.1:5000',
                        help='APITaxi URL, used when authentication is enabled to retrieve users')

    args = parser.parse_args()

    if args.sentry_dsn:
        sentry_sdk.init(args.sentry_dsn, traces_sample_rate=1.0)

    loglevel = logging.DEBUG if args.verbose else logging.INFO
    logging.config.dictConfig({
        'version': 1,
        'disable_existing_loggers': False,

        'formatters': {
            'default': {
                '()': FormatWithPID,
                'format': '%(asctime)s (pid %(pid)s) %(message)s'
            }
        },
        'handlers': {
            'console': {
               'level': loglevel,
               'class': 'logging.StreamHandler',
               'formatter': 'default',
            }
        },
        'loggers': {
            '': {
                'handlers': ['console'],
                'level': loglevel,
            }
        }
    })

    if not args.auth_enabled:
        logger.warning('Authentication is not enabled')

    api_key = os.getenv('API_KEY')
    if args.auth_enabled and not api_key:
        parser.error('--enable-auth is set but API_KEY environment variable is not set')

    if args.disable_fluent:
        fluent = None
    else:
        fluent = FluentSender('geotaxi', host=args.fluent_host, port=args.fluent_port)

    redis = Redis(
        host=args.redis_host,
        port=args.redis_port,
        password=args.redis_password,
        socket_keepalive=True,
    )

    worker = Worker(
        redis,
        fluent=fluent,
        auth_enabled=args.auth_enabled, api_url=args.api_url, api_key=api_key
    )

    run_server(args.workers, args.host, args.port, worker)
Beispiel #13
0
from fluent.sender import FluentSender

logger = FluentSender("app", host="localhost", port=24224)

logger.emit("follow", {"from": "userA", "to": "userB"})
logger.emit("bof", dict(beuha="aussi", age=42))

logger.close()