Beispiel #1
0
def parse_date(timestamp):
    try:
        return common.parse_date(timestamp, DATE_FORMAT)
    except ValueError:
        # Only Python >= 2.6 supports %f in the date string
        timestamp, _ = timestamp.split(',')
        common.parse_date(timestamp, LEGACY_DATE_FORMAT)
Beispiel #2
0
def parse_date(timestamp):
    try:
        return common.parse_date(timestamp, DATE_FORMAT)
    except ValueError:
        # Only Python >= 2.6 supports %f in the date string
        timestamp, _ = timestamp.split(',')
        return common.parse_date(timestamp, LEGACY_DATE_FORMAT)
Beispiel #3
0
def parse_cassandra(log, line):
    matched = LOG_PATTERN.match(line)
    if matched:
        event = matched.groupdict()

        # Convert the timestamp string into an epoch timestamp
        time_val = event.get('time', None)
        if time_val:
            event['timestamp'] = common.parse_date("%s %s" % (datetime.utcnow().strftime("%Y-%m-%d"), time_val), DATE_FORMAT)
        else:
            event['timestamp'] = common.parse_date(event['timestamp'], DATE_FORMAT)
        del event['time']

        # Process the log priority
        event['alert_type'] = ALERT_TYPES.get(event['priority'], "info")
        if event['alert_type'] in ('error', 'warning'):
            event['auto_priority'] = 1
        else:
            event['auto_priority'] = 0
        del event['priority']

        # Process the aggregation metadata
        event['event_type'] = EVENT_TYPE

        # Process the message
        msg = event['msg']
        if len(msg) > common.MAX_TITLE_LEN:
            event['msg_title'] = msg[0:common.MAX_TITLE_LEN]
            event['msg_text'] = msg
        else:
            event['msg_title'] = msg
        del event['msg']

        return [event]
    else:
        return None
Beispiel #4
0
def parse_date(timestamp):
    return common.parse_date(timestamp, DATE_FORMAT)
Beispiel #5
0
    def test_cassandra_parser(self):
        from dogstream import cassandra, common

        log_data = """ INFO [CompactionExecutor:1594] 2012-05-12 21:05:12,924 Saved test_data-Encodings-KeyCache (86400 items) in 85 ms
 INFO [CompactionExecutor:1595] 2012-05-12 21:05:15,144 Saved test_data-Metrics-KeyCache (86400 items) in 96 ms
 INFO [CompactionExecutor:1596] 2012-05-12 21:10:48,058 Compacting [SSTableReader(path='/var/cassandra/data/test_data/series-hc-6528-Data.db'), SSTableReader(path='/var/cassandra/data/test_data/series-hc-6531-Data.db'), SSTableReader(path='/var/cassandra/data/test_data/series-hc-6529-Data.db'), SSTableReader(path='/var/cassandra/data/test_data/series-hc-6530-Data.db')]
 INFO [CompactionExecutor:1596] 2012-05-12 21:10:54,851 Compacted to [/var/cassandra/a-hc-65-Data.db,].  102,079,134 to 101,546,397
 INFO [CompactionExecutor:1598] 2012-05-12 22:05:04,313 Saved test_data-ResourcesMetadata-KeyCache (1 items) in 10 ms
 INFO [CompactionExecutor:1599] 2012-05-12 22:05:14,813 Saved test_data-Encodings-KeyCache (86400 items) in 83 ms
 INFO [CompactionExecutor:1630] 2012-05-13 13:05:44,963 Saved test_data-Metrics-KeyCache (86400 items) in 77 ms
 INFO [CompactionExecutor:1631] 2012-05-13 13:15:01,923 Nothing to compact in data_log.  Use forceUserDefinedCompaction if you wish to force compaction of single sstables (e.g. for tombstone collection)
 INFO [CompactionExecutor:1632] 2012-05-13 13:15:01,927 Compacting [SSTableReader(path='/var/cassandra/data/test_data/series-hc-6527-Data.db'), SSTableReader(path='/var/cassandra/data/test_data/series-hc-6522-Data.db'), SSTableReader(path='/var/cassandra/data/test_data/series-hc-6532-Data.db'), SSTableReader(path='/var/cassandra/data/test_data/series-hc-6517-Data.db')]
 INFO [CompactionExecutor:1632] 2012-05-13 13:27:17,685 Compacting large row test_data/series:6c6f677c32 (782001077 bytes) incrementally
 INFO [CompactionExecutor:34] 2012-05-14 18:00:41,281 Saved test_data-Encodings-KeyCache (86400 items) in 78 ms
 INFO 13:27:17,685 Compacting large row test_data/series:6c6f677c32 (782001077 bytes) incrementally
"""
        alert_type = cassandra.ALERT_TYPES["INFO"]
        event_type = cassandra.EVENT_TYPE
        event_object = EventDefaults.EVENT_OBJECT

        expected_output = {
            "dogstreamEvents":[
            {
                "timestamp": common.parse_date("2012-05-12 21:10:48,058", cassandra.DATE_FORMAT),
                "msg_title": "Compacting [SSTableReader(path='/var/cassandra/data/test_data/series-hc-6528-Data.db'), SSTableReader(path='/var/cassandra/data/test_data/series-hc-6531-Data.db'), SSTableReader(path='/var/cassandra/data/test_data/series-hc-6529-Data.db'), SSTableReader(path='/var/cassandra/data/test_data/series-hc-6530-Data.db')]"[0:common.MAX_TITLE_LEN],
                "msg_text": "Compacting [SSTableReader(path='/var/cassandra/data/test_data/series-hc-6528-Data.db'), SSTableReader(path='/var/cassandra/data/test_data/series-hc-6531-Data.db'), SSTableReader(path='/var/cassandra/data/test_data/series-hc-6529-Data.db'), SSTableReader(path='/var/cassandra/data/test_data/series-hc-6530-Data.db')]",
                "alert_type": alert_type,
                "auto_priority": 0,
                "event_type": event_type,
                "event_object": event_object,
            },  {
                "timestamp": common.parse_date("2012-05-12 21:10:54,851", cassandra.DATE_FORMAT),
                "msg_title": "Compacted to [/var/cassandra/a-hc-65-Data.db,].  102,079,134 to 101,546,397",
                "alert_type": alert_type,
                "auto_priority": 0,
                "event_type": event_type,
                "event_object": event_object,
            },  {
                "timestamp": common.parse_date("2012-05-13 13:15:01,927", cassandra.DATE_FORMAT),
                "msg_title": "Compacting [SSTableReader(path='/var/cassandra/data/test_data/series-hc-6527-Data.db'), SSTableReader(path='/var/cassandra/data/test_data/series-hc-6522-Data.db'), SSTableReader(path='/var/cassandra/data/test_data/series-hc-6532-Data.db'), SSTableReader(path='/var/cassandra/data/test_data/series-hc-6517-Data.db')]"[0:common.MAX_TITLE_LEN],
                "msg_text": "Compacting [SSTableReader(path='/var/cassandra/data/test_data/series-hc-6527-Data.db'), SSTableReader(path='/var/cassandra/data/test_data/series-hc-6522-Data.db'), SSTableReader(path='/var/cassandra/data/test_data/series-hc-6532-Data.db'), SSTableReader(path='/var/cassandra/data/test_data/series-hc-6517-Data.db')]",
                "alert_type": alert_type,
                "event_type": event_type,
                "auto_priority": 0,
                "event_object": event_object,
            },  {
                "timestamp": common.parse_date("2012-05-13 13:27:17,685", cassandra.DATE_FORMAT),
                "msg_title": "Compacting large row test_data/series:6c6f677c32 (782001077 bytes) incrementally",
                "alert_type": alert_type,
                "event_type": event_type,
                "auto_priority": 0,
                "event_object": event_object,
            },  {
                "timestamp": common.parse_date(datetime.utcnow().strftime("%Y-%m-%d") + " 13:27:17,685", cassandra.DATE_FORMAT),
                "msg_title": "Compacting large row test_data/series:6c6f677c32 (782001077 bytes) incrementally",
                "alert_type": alert_type,
                "event_type": event_type,
                "auto_priority": 0,
                "event_object": event_object,
            },
        ]}

        self._write_log(log_data.split("\n"))

        dogstream = Dogstreams.init(self.logger, {'dogstreams': '%s:dogstream.cassandra:parse_cassandra' % self.log_file.name})
        actual_output = dogstream.check(self.config, move_end=False)
        self.assertEquals(expected_output, actual_output)
def parse_date(timestamp):
    return common.parse_date(timestamp, DATE_FORMAT)