Example #1
0
 def setup(self):
     """Setup before each method"""
     self.env = {
         'lambda_region': 'us-east-1',
         'account_id': '123456789012',
         'lambda_function_name': 'test_kinesis_stream',
         'lambda_alias': 'production'
     }
     self.config = load_config('test/unit/conf')
     self.log_metadata = self.config['logs']
Example #2
0
 def setup(self):
     self.env = {
         'lambda_region': 'us-east-1',
         'account_id': '123456789012',
         'lambda_function_name': 'stream_alert_test',
         'lambda_alias': 'production'
     }
     self.config = load_config('test/unit/conf')
     self.log_metadata = self.config['logs']
     pass
Example #3
0
def handler(event, context):
    """StreamAlert Lambda function handler.

    Loads the configuration for the StreamAlert function which contains:
    available data sources, log formats, parser modes, and sinks.  Classifies
    logs sent into the stream into a parsed type.  Matches records against
    rules.

    Args:
        event: An AWS event mapped to a specific source/entity (kinesis stream or
            an s3 bucket event) containing data emitted to the stream.
        context: An AWS context object which provides metadata on the currently
            executing lambda function.

    Returns:
        None
    """
    logger.debug('Number of Records: %d', len(event.get('Records')))

    config = load_config()
    env = load_env(context)
    # process_alerts(event['Records'])
    alerts_to_send = []

    # TODO(jack): Move this into classification
    for record in event.get('Records'):
        payload = StreamPayload(raw_record=record)
        classifier = StreamClassifier(config=config)
        classifier.map_source(payload)
        # If the kinesis stream or s3 bucket is not in our config,
        # go onto the next record.
        if not payload.valid_source:
            continue

        if payload.service == 's3':
            s3_file_lines = StreamPreParsers.pre_parse_s3(payload.raw_record)
            for line in s3_file_lines:
                data = line.rstrip()
                payload.refresh_record(data)
                classifier.classify_record(payload, data)
                process_alerts(payload, alerts_to_send)

        elif payload.service == 'kinesis':
            data = StreamPreParsers.pre_parse_kinesis(payload.raw_record)
            classifier.classify_record(payload, data)
            process_alerts(payload, alerts_to_send)

    if alerts_to_send:
        if env['lambda_alias'] == 'development':
            logger.info('%s alerts triggered', len(alerts_to_send))
            for alert in alerts_to_send:
                logger.info(alert)
        StreamSink(alerts_to_send, config, env).sink()
    else:
        logger.debug('Valid data, no alerts: %s', payload)
Example #4
0
    def run(self, event, context):
        """StreamAlert Lambda function handler.

        Loads the configuration for the StreamAlert function which contains:
        available data sources, log formats, parser modes, and sinks.  Classifies
        logs sent into the stream into a parsed type.  Matches records against
        rules.

        Args:
            event: An AWS event mapped to a specific source/entity (kinesis stream or
                an s3 bucket event) containing data emitted to the stream.
            context: An AWS context object which provides metadata on the currently
                executing lambda function.

        Returns:
            None
        """
        logger.debug('Number of Records: %d', len(event.get('Records')))

        config = load_config()
        env = load_env(context)

        for record in event.get('Records'):
            payload = StreamPayload(raw_record=record)
            classifier = StreamClassifier(config=config)
            classifier.map_source(payload)

            # If the kinesis stream or s3 bucket is not in our config,
            # go onto the next record
            if not payload.valid_source:
                continue

            if payload.service == 's3':
                self.s3_process(payload, classifier)
            elif payload.service == 'kinesis':
                self.kinesis_process(payload, classifier)
            else:
                logger.info('Unsupported service: %s', payload.service)

        # Give the user control over handling generated alerts
        if self.return_alerts:
            return self.alerts
        else:
            self.send_alerts(env)
 def setup(self):
     """Setup before each method"""
     # load config
     self.config = load_config('test/unit/conf')
     # load JSON parser class
     self.parser_class = get_parser('json')