def __init__(self, context, enable_alert_processor=True): """ Args: context: An AWS context object which provides metadata on the currently executing lambda function. enable_alert_processor (bool): If the user wants to send the alerts using their own methods, 'enable_alert_processor' can be set to False to suppress sending with the StreamAlert alert processor. """ # Load the config. Validation occurs during load, which will # raise exceptions on any ConfigErrors config = load_config() # Load the environment from the context arn self.env = load_env(context) # Instantiate the sink here to handle sending the triggered alerts to the # alert processor self.sinker = StreamSink(self.env) # Instantiate a classifier that is used for this run self.classifier = StreamClassifier(config=config) self.enable_alert_processor = enable_alert_processor self._failed_record_count = 0 self._alerts = []
def setup_class(cls): """Setup the class before any methods""" patcher = patch('stream_alert.rule_processor.sink.boto3.client') cls.boto_mock = patcher.start() context = get_mock_context() env = load_env(context) cls.sinker = StreamSink(env)
def test_load_env_development(): """Config - Load Development Environment""" env = load_env(None) assert_equal(env['lambda_alias'], 'development') assert_equal(env['lambda_function_name'], 'test_streamalert_rule_processor') assert_equal(env['lambda_region'], 'us-east-1') assert_equal(env['account_id'], '123456789012')
def test_load_env(): """Config - Environment Loader""" context = get_mock_context() env = load_env(context) assert_equal(env['lambda_region'], 'us-east-1') assert_equal(env['account_id'], '123456789012') assert_equal(env['lambda_function_name'], 'corp-prefix_prod_streamalert_rule_processor') assert_equal(env['lambda_alias'], 'development')
def test_load_env(): """Config Environment Validator""" context = namedtuple('Context', ['invoked_function_arn']) context.invoked_function_arn = ('arn:aws:lambda:us-east-1:555555555555:' 'function:streamalert_testing:production') env = load_env(context) assert_equal(env['lambda_region'], 'us-east-1') assert_equal(env['account_id'], '555555555555') assert_equal(env['lambda_function_name'], 'streamalert_testing') assert_equal(env['lambda_alias'], 'production')
def __init__(self, context, return_alerts=False): """ Args: context: An AWS context object which provides metadata on the currently executing lambda function. return_alerts: If the user wants to handle the sinking of alerts to external endpoints, return a list of generated alerts. """ self.return_alerts = return_alerts self.env = load_env(context) # Instantiate the sink here to handle sending the triggered alerts to the alert processor self.sinker = StreamSink(self.env) self.alerts = []
def __init__(self, context, enable_alert_processor=True): """ Args: context: An AWS context object which provides metadata on the currently executing lambda function. enable_alert_processor: If the user wants to send the alerts using their own methods, 'enable_alert_processor' can be set to False to suppress sending with the StreamAlert alert processor. """ self.env = load_env(context) self.enable_alert_processor = enable_alert_processor # Instantiate the sink here to handle sending the triggered alerts to the alert processor self.sinker = StreamSink(self.env) self._failed_record_count = 0 self._alerts = []
def run(self, event, context): """StreamAlert Lambda function handler. Loads the configuration for the StreamAlert function which contains: available data sources, log formats, parser modes, and sinks. Classifies logs sent into the stream into a parsed type. Matches records against rules. Args: event: An AWS event mapped to a specific source/entity (kinesis stream or an s3 bucket event) containing data emitted to the stream. context: An AWS context object which provides metadata on the currently executing lambda function. Returns: None """ logger.debug('Number of Records: %d', len(event.get('Records', []))) config = load_config() env = load_env(context) for record in event.get('Records', []): payload = StreamPayload(raw_record=record) classifier = StreamClassifier(config=config) classifier.map_source(payload) # If the kinesis stream or s3 bucket is not in our config, # go onto the next record if not payload.valid_source: continue if payload.service == 's3': self.s3_process(payload, classifier) elif payload.service == 'kinesis': self.kinesis_process(payload, classifier) else: logger.info('Unsupported service: %s', payload.service) # returns the list of generated alerts if self.return_alerts: return self.alerts # send alerts to SNS self.send_alerts(env, payload)
def __init__(self, context, enable_alert_processor=True): """Initializer Args: context (dict): An AWS context object which provides metadata on the currently executing lambda function. enable_alert_processor (bool): If the user wants to send the alerts using their own methods, 'enable_alert_processor' can be set to False to suppress sending with the StreamAlert alert processor. """ # Load the config. Validation occurs during load, which will # raise exceptions on any ConfigErrors StreamAlert.config = StreamAlert.config or load_config() # Load the environment from the context arn self.env = load_env(context) # Instantiate the sink here to handle sending the triggered alerts to the # alert processor self.sinker = StreamSink(self.env) # Instantiate a classifier that is used for this run self.classifier = StreamClassifier(config=self.config) self.enable_alert_processor = enable_alert_processor self._failed_record_count = 0 self._processed_size = 0 self._alerts = [] # Create a dictionary to hold parsed payloads by log type. # Firehose needs this information to send to its corresponding # delivery stream. self.categorized_payloads = defaultdict(list) # Firehose client initialization self.firehose_client = None # create an instance of the StreamRules class that gets cached in the # StreamAlert class as an instance property self._rule_engine = StreamRules(self.config)
def __init__(self, context): """Initializer Args: context (dict): An AWS context object which provides metadata on the currently executing lambda function. """ # Load the config. Validation occurs during load, which will # raise exceptions on any ConfigErrors StreamAlert.config = StreamAlert.config or load_config() # Load the environment from the context arn self.env = load_env(context) # Instantiate the send_alerts here to handle sending the triggered alerts to the # alert processor self.alert_forwarder = AlertForwarder() # Instantiate a classifier that is used for this run self.classifier = StreamClassifier(config=self.config) self._failed_record_count = 0 self._processed_record_count = 0 self._processed_size = 0 self._alerts = [] rule_import_paths = [ item for location in {'rule_locations', 'matcher_locations'} for item in self.config['global']['general'][location] ] # Create an instance of the StreamRules class that gets cached in the # StreamAlert class as an instance property self._rules_engine = RulesEngine(self.config, *rule_import_paths) # Firehose client attribute self._firehose_client = None
def setup_class(cls): """Setup the class before any methods""" context = get_mock_context() cls.env = load_env(context) cls.config = load_config('tests/unit/conf')
def setup_class(cls): """Setup the class before any methods""" context = get_mock_context() cls.env = load_env(context)
def setup(self): # pylint: disable=attribute-defined-outside-init self.forwarder = AlertForwarder(load_env(get_mock_context()))