def remaining_outputs(self): """Return the set of outputs which still need to be sent for this alert.""" if self.merge_enabled: # This alert will be merged later - for now, we care only about required outputs. outputs_to_send_now = self.outputs.intersection(resources.get_required_outputs()) else: outputs_to_send_now = self.outputs return outputs_to_send_now.difference(self.outputs_sent)
def __init__(self, config, *rule_paths): """Initialize a RulesEngine instance to cache a StreamThreatIntel instance.""" self._threat_intel = StreamThreatIntel.load_from_config(config) self._required_outputs_set = resources.get_required_outputs() import_folders(*rule_paths) self._load_rule_table(config) lookup_tables = LookupTables.load_lookup_tables(config) if lookup_tables: RulesEngine._LOOKUP_TABLES = lookup_tables.download_s3_objects()
def __init__(self, *rule_paths): RulesEngine._config = RulesEngine._config or load_config() RulesEngine._threat_intel = ( RulesEngine._threat_intel or ThreatIntel.load_from_config(self.config) ) # Instantiate the alert forwarder to handle sending alerts to the alert processor RulesEngine._alert_forwarder = RulesEngine._alert_forwarder or AlertForwarder() # Load the lookup tables, which include logic for refreshing the tables RulesEngine._lookup_tables = LookupTables.load_lookup_tables(self.config) # If not rule import paths are specified, default to the config if not rule_paths: rule_paths = [item for location in {'rule_locations', 'matcher_locations'} for item in self.config['global']['general'][location]] import_folders(*rule_paths) self._in_lambda = 'LAMBDA_RUNTIME_DIR' in env self._required_outputs_set = resources.get_required_outputs() self._load_rule_table(self.config)
def __init__(self, config, *rule_paths): """Initialize a RulesEngine instance to cache a StreamThreatIntel instance.""" self._threat_intel = StreamThreatIntel.load_from_config(config) self._required_outputs_set = resources.get_required_outputs() import_folders(*rule_paths) self._load_rule_table(config)
def setup_outputs(self, alert): """Helper function to handler any output setup Args: alert (Alert): The Alert instance containing outputs to be mocked out """ # Patch requests.get and requests.post self._setup_api_mocks() alert_outputs = resources.get_required_outputs() alert_outputs.update(alert.outputs) for output in alert_outputs: try: service, descriptor = output.split(':') except ValueError: LOGGER_CLI.error( 'Outputs should be declared in the format <SERVICE>:<DESCRIPTOR>' ) continue if service == 'aws-s3': bucket = self.outputs_config[service][descriptor] client = boto3.client('s3', region_name=self.region) try: # Check if the bucket exists before creating it client.head_bucket(Bucket=bucket) except ClientError: client.create_bucket(Bucket=bucket) elif service == 'aws-firehose': stream_name = self.outputs_config[service][descriptor] helpers.create_delivery_stream(self.region, stream_name) elif service == 'aws-lambda': lambda_function = self.outputs_config[service][descriptor] parts = lambda_function.split(':') if len(parts) == 2 or len(parts) == 8: lambda_function = parts[-2] else: lambda_function = parts[-1] helpers.create_lambda_function(lambda_function, self.region) elif service == 'aws-sns': topic_name = self.outputs_config[service][descriptor] boto3.client( 'sns', region_name=self.region).create_topic(Name=topic_name) elif service == 'aws-sqs': queue_name = self.outputs_config[service][descriptor] boto3.client( 'sqs', region_name=self.region).create_queue(QueueName=queue_name) elif service == 'carbonblack': output_name = '{}/{}'.format(service, descriptor) creds = { 'token': 'e51273c7c8e0fd9fae431cc019ab244112345678', 'url': 'cb.foo.bar' } helpers.put_mock_creds(output_name, creds, self.secrets_bucket, self.region, self.kms_alias) elif service == 'komand': output_name = '{}/{}'.format(service, descriptor) creds = { 'komand_auth_token': '00000000-0000-0000-0000-000000000000', 'url': 'komand.foo.bar' } helpers.put_mock_creds(output_name, creds, self.secrets_bucket, self.region, self.kms_alias) elif service == 'pagerduty': output_name = '{}/{}'.format(service, descriptor) creds = {'service_key': '247b97499078a015cc6c586bc0a92de6'} helpers.put_mock_creds(output_name, creds, self.secrets_bucket, self.region, self.kms_alias) elif service == 'pagerduty-v2': output_name = '{}/{}'.format(service, descriptor) creds = {'routing_key': '247b97499078a015cc6c586bc0a92de6'} helpers.put_mock_creds(output_name, creds, self.secrets_bucket, self.region, self.kms_alias) elif service == 'pagerduty-incident': output_name = '{}/{}'.format(service, descriptor) creds = { 'token': '247b97499078a015cc6c586bc0a92de6', 'service_name': '247b97499078a015cc6c586bc0a92de6', 'service_id': 'SERVICEID123', 'escalation_policy': '247b97499078a015cc6c586bc0a92de6', 'escalation_policy_id': 'POLICYID123', 'email_from': '*****@*****.**', 'integration_key': '247b97499078a015cc6c586bc0a92de6' } helpers.put_mock_creds(output_name, creds, self.secrets_bucket, 'us-east-1', self.kms_alias) elif service == 'phantom': output_name = '{}/{}'.format(service, descriptor) creds = { 'ph_auth_token': '6c586bc047b9749a92de29078a015cc6', 'url': 'phantom.foo.bar' } helpers.put_mock_creds(output_name, creds, self.secrets_bucket, self.region, self.kms_alias) elif service == 'slack': output_name = '{}/{}'.format(service, descriptor) creds = {'url': 'https://api.slack.com/web-hook-key'} helpers.put_mock_creds(output_name, creds, self.secrets_bucket, self.region, self.kms_alias) elif service == 'jira': output_name = '{}/{}'.format(service, descriptor) creds = { 'username': '******', 'password': '******', 'url': 'jira.foo.bar', 'project_key': 'foobar', 'issue_type': 'Task', 'aggregate': 'no' } helpers.put_mock_creds(output_name, creds, self.secrets_bucket, 'us-east-1', self.kms_alias) elif service == 'github': output_name = '{}/{}'.format(service, descriptor) creds = { 'username': '******', 'repository': 'github-user/github-repository', 'access_token': 'foobar', 'labels': 'test-label' } helpers.put_mock_creds(output_name, creds, self.secrets_bucket, 'us-east-1', self.kms_alias)
def test_get_required_outputs(): """Shared - Get Required Outputs""" outputs = resources.get_required_outputs() assert_equal(len(outputs), 1) assert_equal(outputs, {'aws-firehose:alerts'})
def __init__(self, config): """Initialize a StreamRules instance to cache a StreamThreatIntel instance.""" self._threat_intel = StreamThreatIntel.load_from_config(config) self._required_outputs_set = resources.get_required_outputs()