def _dispatch(self, alert, descriptor): """Send alert to an S3 bucket Organizes alert into the following folder structure: service/entity/rule_name/datetime.json The alert gets dumped to a JSON string Args: alert (Alert): Alert instance which triggered a rule descriptor (str): Output descriptor Returns: bool: True if alert was sent successfully, False otherwise """ bucket = self.config[self.__service__][descriptor] # Prefix with alerts to account for generic non-streamalert buckets # Produces the following key format: # alerts/dt=2017-01-25-00/kinesis_my-stream_my-rule_uuid.json # Keys need to be unique to avoid object overwriting key = 'alerts/dt={}/{}_{}_{}_{}.json'.format( datetime.now().strftime('%Y-%m-%d-%H'), alert.source_service, alert.source_entity, alert.rule_name, uuid.uuid4()) LOGGER.debug('Sending %s to S3 bucket %s with key %s', alert, bucket, key) client = boto3.client('s3', region_name=self.region) client.put_object(Body=json.dumps(alert.output_dict()), Bucket=bucket, Key=key) return True
def dispatch(self, alert, descriptor): """Send alert to Phantom Args: alert (Alert): Alert instance which triggered a rule descriptor (str): Output descriptor Returns: bool: True if alert was sent successfully, False otherwise """ creds = self._load_creds(descriptor) if not creds: return self._log_status(False, descriptor) headers = {"ph-auth-token": creds['ph_auth_token']} container_id = self._setup_container( alert.rule_name, alert.rule_description, creds['url'], headers) LOGGER.debug('sending alert to Phantom container with id %s', container_id) success = False if container_id: artifact = {'cef': alert.record, 'container_id': container_id, 'data': alert.output_dict(), 'name': 'Phantom Artifact', 'label': 'Alert'} artifact_url = os.path.join(creds['url'], self.ARTIFACT_ENDPOINT) try: success = self._post_request_retry(artifact_url, artifact, headers, False) except OutputRequestFailure: success = False return self._log_status(success, descriptor)
def _establish_session(self, username, password): """Establish a cookie based Jira session via basic user auth. Args: username (str): The Jira username used for establishing the session password (str): The Jira password used for establishing the session Returns: str: Header value intended to be passed with every subsequent Jira request or False if unsuccessful """ login_url = os.path.join(self._base_url, self.LOGIN_ENDPOINT) auth_info = {'username': username, 'password': password} try: resp = self._post_request_retry(login_url, data=auth_info, headers=self._get_default_headers(), verify=False) except OutputRequestFailure: LOGGER.error("Failed to authenticate to Jira") return False resp_dict = resp.json() if not resp_dict: return False return '{}={}'.format(resp_dict['session']['name'], resp_dict['session']['value'])
def dispatch(self, **kwargs): """Send alert to Phantom Args: **kwargs: consists of any combination of the following items: descriptor (str): Service descriptor (ie: slack channel, pd integration) rule_name (str): Name of the triggered rule alert (dict): Alert relevant to the triggered rule """ creds = self._load_creds(kwargs['descriptor']) if not creds: return self._log_status(False) headers = {"ph-auth-token": creds['ph_auth_token']} rule_desc = kwargs['alert']['rule_description'] container_id = self._setup_container(kwargs['rule_name'], rule_desc, creds['url'], headers) LOGGER.debug('sending alert to Phantom container with id %s', container_id) success = False if container_id: artifact = {'cef': kwargs['alert']['record'], 'container_id': container_id, 'data': kwargs['alert'], 'name': 'Phantom Artifact', 'label': 'Alert'} artifact_url = os.path.join(creds['url'], self.ARTIFACT_ENDPOINT) try: success = self._post_request_retry(artifact_url, artifact, headers, False) except OutputRequestFailure: success = False return self._log_status(success)
def _get_creds_from_s3(self, cred_location, descriptor): """Pull the encrypted credential blob for this service and destination from s3 Args: cred_location (str): The tmp path on disk to to store the encrypted blob descriptor (str): Service destination (ie: slack channel, pd integration) Returns: bool: True if download of creds from s3 was a success """ try: if not os.path.exists(os.path.dirname(cred_location)): os.makedirs(os.path.dirname(cred_location)) client = boto3.client('s3', region_name=self.region) with open(cred_location, 'wb') as cred_output: client.download_fileobj(self.secrets_bucket, self.output_cred_name(descriptor), cred_output) return True except ClientError as err: LOGGER.exception( 'credentials for \'%s\' could not be downloaded ' 'from S3: %s', self.output_cred_name(descriptor), err.response)
def dispatch(self, **kwargs): """Send alert to Github Args: **kwargs: consists of any combination of the following items: descriptor (str): Service descriptor (ie: slack channel, pd integration) rule_name (str): Name of the triggered rule alert (dict): Alert relevant to the triggered rule """ credentials = self._load_creds(kwargs['descriptor']) if not credentials: return self._log_status(False) username_password = "******".format(credentials['username'], credentials['access_token']) encoded_credentials = base64.b64encode(username_password) headers = {'Authorization': "Basic {}".format(encoded_credentials)} url = '{}/repos/{}/issues'.format(credentials['api'], credentials['repository']) title = "StreamAlert: {}".format(kwargs['rule_name']) body_template = "### Description\n{}\n\n### Event data\n\n```\n{}\n```" body = body_template.format(kwargs['alert']['rule_description'], json.dumps(kwargs['alert']['record'], indent=2)) issue = {'title': title, 'body': body, 'labels': credentials['labels'].split(',')} LOGGER.debug('sending alert to Github repository %s', credentials['repository']) try: success = self._post_request_retry(url, issue, headers) except OutputRequestFailure: success = False return self._log_status(success)
def _create_dispatcher(self, output): """Create a dispatcher for the given output. Args: output (str): Alert output, e.g. "aws-sns:topic-name" Returns: OutputDispatcher: Based on the output type. Returns None if the output is invalid or not defined in the config. """ try: service, descriptor = output.split(':') except ValueError: LOGGER.error( 'Improperly formatted output [%s]. Outputs for rules must ' 'be declared with both a service and a descriptor for the ' 'integration (ie: \'slack:my_channel\')', output) return None if service not in self.config or descriptor not in self.config[service]: LOGGER.error('The output \'%s\' does not exist!', output) return None return StreamAlertOutput.create_dispatcher(service, self.region, self.account_id, self.prefix, self.config)
def _dispatch(self, alert, descriptor): """Send alert to Cloudwatch Logger for Lambda Args: alert (Alert): Alert instance which triggered a rule descriptor (str): Output descriptor """ LOGGER.info('New Alert:\n%s', json.dumps(alert.output_dict(), indent=4)) return True
def _log_status(cls, success): """Log the status of sending the alerts Args: success (bool or dict): Indicates if the dispatching of alerts was successful """ if success: LOGGER.info('Successfully sent alert to %s', cls.__service__) else: LOGGER.error('Failed to send alert to %s', cls.__service__) return bool(success)
def get_dispatcher(cls, service): """Returns the subclass that should handle this particular service Args: service (str): The service identifier for this output Returns: OutputDispatcher: Subclass of OutputDispatcher to use for sending alerts """ try: return cls._outputs[service] except KeyError: LOGGER.error('Designated output service [%s] does not exist', service)
def _log_status(cls, success, descriptor): """Log the status of sending the alerts Args: success (bool or dict): Indicates if the dispatching of alerts was successful descriptor (str): Service descriptor """ if success: LOGGER.info('Successfully sent alert to %s:%s', cls.__service__, descriptor) else: LOGGER.error('Failed to send alert to %s:%s', cls.__service__, descriptor)
def _check_http_response(cls, response): """Method for checking for a valid HTTP response code Args: response (requests.Response): Response object from requests Returns: bool: Indicator of whether or not this request was successful """ success = response is not None and (200 <= response.status_code <= 299) if not success: LOGGER.error('Encountered an error while sending to %s:\n%s', cls.__service__, response.content) return success
def _dispatch(self, alert, descriptor): """Send alert to a Lambda function The alert gets dumped to a JSON string to be sent to the Lambda function Args: alert (Alert): Alert instance which triggered a rule descriptor (str): Output descriptor Returns: bool: True if alert was sent successfully, False otherwise """ alert_string = json.dumps(alert.record, separators=(',', ':')) function_name = self.config[self.__service__][descriptor] # Check to see if there is an optional qualifier included here # Acceptable values for the output configuration are the full ARN, # a function name followed by a qualifier, or just a function name: # 'arn:aws:lambda:aws-region:acct-id:function:function-name:prod' # 'function-name:prod' # 'function-name' # Checking the length of the list for 2 or 8 should account for all # times a qualifier is provided. parts = function_name.split(':') if len(parts) == 2 or len(parts) == 8: function = parts[-2] qualifier = parts[-1] else: function = parts[-1] qualifier = None LOGGER.debug('Sending alert to Lambda function %s', function_name) client = boto3.client('lambda', region_name=self.region) invoke_params = { 'FunctionName': function, 'InvocationType': 'Event', 'Payload': alert_string } # Use the qualifier if it's available. Passing an empty qualifier in # with `Qualifier=''` or `Qualifier=None` does not work if qualifier: invoke_params['Qualifier'] = qualifier client.invoke(**invoke_params) return True
def _kms_decrypt(self, data): """Decrypt data with AWS KMS. Args: data (str): An encrypted ciphertext data blob Returns: str: Decrypted json string """ try: client = boto3.client('kms', region_name=self.region) response = client.decrypt(CiphertextBlob=data) return response['Plaintext'] except ClientError as err: LOGGER.error('an error occurred during credentials decryption: %s', err.response)
def _load_output_config(config_path='conf/outputs.json'): """Load the outputs configuration file from disk Returns: dict: the output configuration settings """ with open(config_path) as outputs: try: config = json.load(outputs) except ValueError: LOGGER.error('The \'%s\' file could not be loaded into json', config_path) return return config
def dispatch(self, **kwargs): """Send alert to a Lambda function The alert gets dumped to a JSON string to be sent to the Lambda function Args: **kwargs: consists of any combination of the following items: descriptor (str): Service descriptor (ie: slack channel, pd integration) rule_name (str): Name of the triggered rule alert (dict): Alert relevant to the triggered rule """ alert = kwargs['alert'] alert_string = json.dumps(alert['record']) function_name = self.config[self.__service__][kwargs['descriptor']] # Check to see if there is an optional qualifier included here # Acceptable values for the output configuration are the full ARN, # a function name followed by a qualifier, or just a function name: # 'arn:aws:lambda:aws-region:acct-id:function:function-name:prod' # 'function-name:prod' # 'function-name' # Checking the length of the list for 2 or 8 should account for all # times a qualifier is provided. parts = function_name.split(':') if len(parts) == 2 or len(parts) == 8: function = parts[-2] qualifier = parts[-1] else: function = parts[-1] qualifier = None LOGGER.debug('Sending alert to Lambda function %s', function_name) client = boto3.client('lambda', region_name=self.region) # Use the qualifier if it's available. Passing an empty qualifier in # with `Qualifier=''` or `Qualifier=None` does not work and thus we # have to perform different calls to client.invoke(). if qualifier: resp = client.invoke(FunctionName=function, InvocationType='Event', Payload=alert_string, Qualifier=qualifier) else: resp = client.invoke(FunctionName=function, InvocationType='Event', Payload=alert_string) return self._log_status(resp, kwargs['descriptor'])
def _dispatch(self, alert, descriptor): """Send ban hash command to CarbonBlack Args: alert (Alert): Alert instance which triggered a rule descriptor (str): Output descriptor Returns: bool: True if alert was sent successfully, False otherwise """ if not alert.context: LOGGER.error('[%s] Alert must contain context to run actions', self.__service__) return False creds = self._load_creds(descriptor) if not creds: return False client = CbResponseAPI(**creds) # Get md5 hash 'value' from streamalert's rule processor action = alert.context.get('carbonblack', {}).get('action') if action == 'ban': binary_hash = alert.context.get('carbonblack', {}).get('value') # The binary should already exist in CarbonBlack binary = client.select(Binary, binary_hash) # Determine if the binary is currenty listed as banned if binary.banned: # Determine if the banned action is enabled, if true exit if binary.banned.enabled: return True # If the binary is banned and disabled, begin the banning hash operation banned_hash = client.select(BannedHash, binary_hash) banned_hash.enabled = True banned_hash.save() else: # Create a new BannedHash object to be saved banned_hash = client.create(BannedHash) # Begin the banning hash operation banned_hash.md5hash = binary.md5 banned_hash.text = "Banned from StreamAlert" banned_hash.enabled = True banned_hash.save() return banned_hash.enabled is True else: LOGGER.error('[%s] Action not supported: %s', self.__service__, action) return False
def _send_alert(alert_payload, output, dispatcher): """Send a single alert to the given output. Returns: bool: True if the alert was sent successfully. """ LOGGER.info('Sending alert %s to %s', alert_payload['id'], output) try: return dispatcher.dispatch(descriptor=output.split(':')[1], rule_name=alert_payload['rule_name'], alert=alert_payload) except Exception: # pylint: disable=broad-except LOGGER.exception( 'Exception when sending alert %s to %s. Alert:\n%s', alert_payload['id'], output, json.dumps(alert_payload, indent=2)) return False
def _send_alert(alert, output, dispatcher): """Send a single alert to the given output. Args: alert (Alert): Alert to be sent output (str): Alert output, e.g. "aws-sns:topic-name" dispatcher (OutputDispatcher): Dispatcher to receive the alert Returns: bool: True if the alert was sent successfully. """ LOGGER.info('Sending %s to %s', alert, output) try: return dispatcher.dispatch(alert, output.split(':')[1]) except Exception: # pylint: disable=broad-except LOGGER.exception('Exception when sending %s to %s. Alert:\n%s', alert, output, repr(alert)) return False
def dispatch(self, **kwargs): """Send alert to Komand Args: **kwargs: consists of any combination of the following items: descriptor (str): Service descriptor (ie: slack channel, pd integration) alert (dict): Alert relevant to the triggered rule """ creds = self._load_creds(kwargs['descriptor']) if not creds: return self._log_status(False, kwargs['descriptor']) headers = {'Authorization': creds['komand_auth_token']} LOGGER.debug('sending alert to Komand') resp = self._post_request(creds['url'], {'data': kwargs['alert']}, headers, False) success = self._check_http_response(resp) return self._log_status(success, kwargs['descriptor'])
def dispatch(self, **kwargs): """Send alert to an S3 bucket Organizes alert into the following folder structure: service/entity/rule_name/datetime.json The alert gets dumped to a JSON string Args: **kwargs: consists of any combination of the following items: descriptor (str): Service descriptor (ie: slack channel, pd integration) rule_name (str): Name of the triggered rule alert (dict): Alert relevant to the triggered rule """ alert = kwargs['alert'] service = alert['source_service'] entity = alert['source_entity'] current_date = datetime.now() s3_alert = alert # JSON dump the alert to retain a consistent alerts schema across log types. # This will get replaced by a UUID which references a record in a # different table in the future. s3_alert['record'] = json.dumps(s3_alert['record']) alert_string = json.dumps(s3_alert) bucket = self.config[self.__service__][kwargs['descriptor']] # Prefix with alerts to account for generic non-streamalert buckets # Produces the following key format: # alerts/dt=2017-01-25-00/kinesis_my-stream_my-rule_uuid.json # Keys need to be unique to avoid object overwriting key = 'alerts/dt={}/{}_{}_{}_{}.json'.format( current_date.strftime('%Y-%m-%d-%H'), service, entity, alert['rule_name'], uuid.uuid4()) LOGGER.debug('Sending alert to S3 bucket %s with key %s', bucket, key) client = boto3.client('s3', region_name=self.region) resp = client.put_object(Body=alert_string, Bucket=bucket, Key=key) return self._log_status(resp, kwargs['descriptor'])
def _get_existing_issue(self, issue_summary, project_key): """Find an existing Jira issue based on the issue summary Args: issue_summary (str): The Jira issue summary project_key (str): The Jira project to search Returns: int: ID of the found issue or False if existing issue does not exist """ jql = 'summary ~ "{}" and project="{}"'.format(issue_summary, project_key) resp = self._search_jira(jql, fields=['id', 'summary'], max_results=1) jira_id = False try: jira_id = int(resp[0]['id']) except (IndexError, KeyError): LOGGER.debug('Existing Jira issue not found') return jira_id
def dispatch(self, alert, descriptor): """Send alert to Github Args: alert (Alert): Alert instance which triggered a rule descriptor (str): Output descriptor Returns: bool: True if alert was sent successfully, False otherwise """ credentials = self._load_creds(descriptor) if not credentials: return self._log_status(False, descriptor) username_password = "******".format(credentials['username'], credentials['access_token']) encoded_credentials = base64.b64encode(username_password) headers = {'Authorization': "Basic {}".format(encoded_credentials)} url = '{}/repos/{}/issues'.format(credentials['api'], credentials['repository']) title = "StreamAlert: {}".format(alert.rule_name) body_template = "### Description\n{}\n\n### Event data\n\n```\n{}\n```" body = body_template.format( alert.rule_description, json.dumps(alert.record, indent=2, sort_keys=True)) issue = { 'title': title, 'body': body, 'labels': credentials['labels'].split(',') } LOGGER.debug('sending alert to Github repository %s', credentials['repository']) try: success = self._post_request_retry(url, issue, headers) except OutputRequestFailure: success = False return self._log_status(success, descriptor)
def _dispatch(self, alert, descriptor): """Send alert to Komand Args: alert (Alert): Alert instance which triggered a rule descriptor (str): Output descriptor Returns: bool: True if alert was sent successfully, False otherwise """ creds = self._load_creds(descriptor) if not creds: return False headers = {'Authorization': creds['komand_auth_token']} LOGGER.debug('sending alert to Komand') resp = self._post_request(creds['url'], {'data': alert.output_dict()}, headers, False) return self._check_http_response(resp)
def _item_verify(self, item_str, item_key, item_type, get_id=True): """Method to verify the existance of an item with the API Args: item_str (str): Service to query about in the API item_key (str): Endpoint/key to be extracted from search results item_type (str): Type of item reference to be returned get_id (boolean): Whether to generate a dict with result and reference Returns: dict: JSON object be used in the API call, containing the item id and the item reference, True if it just exists or False if it fails """ item_url = self._get_endpoint(self._base_url, item_key) item_id = self._check_exists(item_str, item_url, item_key, get_id) if not item_id: LOGGER.info('%s not found in %s, %s', item_str, item_key, self.__service__) return False if get_id: return {'id': item_id, 'type': item_type} return item_id
def run(self, event): """Run the alert processor! Args: event (dict): Lambda invocation event containing at least the rule name and alert ID. Returns: dict: Maps output (str) to whether it sent successfully (bool). An empty dict is returned if the Alert was improperly formatted. """ # Grab the alert record from Dynamo (if needed). if set(event) == {'AlertID', 'RuleName'}: LOGGER.info('Retrieving %s from alerts table', event) alert_record = self.alerts_table.get_alert_record(event['RuleName'], event['AlertID']) if not alert_record: LOGGER.error('%s does not exist in the alerts table', event) return {} else: alert_record = event # Convert record to an Alert instance. try: alert = Alert.create_from_dynamo_record(alert_record) except AlertCreationError: LOGGER.exception('Invalid alert %s', event) return {} # Remove normalization key from the record. # TODO: Consider including this in at least some outputs, e.g. default Athena firehose if NORMALIZATION_KEY in alert.record: del alert.record[NORMALIZATION_KEY] result = self._send_to_outputs(alert) self._update_table(alert, result) return result
def _format_attachments(cls, alert, header_text): """Format the message to be sent to slack. Args: alert (Alert): Alert relevant to the triggered rule header_text (str): A formatted rule header to be included with each attachemnt as fallback text (to be shown on mobile, etc) Yields: dict: A dictionary with the formatted attachemnt to be sent to Slack as the text """ messages = list(cls._split_attachment_text(alert.record)) for index, message in enumerate(messages, start=1): title = 'Record:' if len(messages) > 1: title = 'Record (Part {} of {}):'.format(index, len(messages)) rule_desc = '' # Only print the rule description on the first attachment if index == 1: rule_desc = alert.rule_description rule_desc = '*Rule Description:*\n{}\n'.format(rule_desc) # Yield this attachemnt to be sent with the list of attachments yield { 'fallback': header_text, 'color': '#b22222', 'pretext': rule_desc, 'title': title, 'text': message, 'mrkdwn_in': ['text', 'pretext'] } if index == cls.MAX_ATTACHMENTS: LOGGER.warning('%s: %d-part message truncated to %d parts', alert, len(messages), cls.MAX_ATTACHMENTS) break
def dispatch(self, alert, output): """Send alerts to the given service. This wraps the protected subclass method of _dispatch to aid in usability Args: alert (Alert): Alert instance which triggered a rule descriptor (str): Output descriptor (e.g. slack channel, pd integration) Returns: bool: True if alert was sent successfully, False otherwise """ LOGGER.info('Sending %s to %s', alert, output) descriptor = output.split(':')[1] try: sent = bool(self._dispatch(alert, descriptor)) except Exception: # pylint: disable=broad-except LOGGER.exception('Exception when sending %s to %s. Alert:\n%s', alert, output, repr(alert)) sent = False self._log_status(sent, descriptor) return sent
def dispatch(self, **kwargs): """Send alert to a Kinesis Firehose Delivery Stream Keyword Args: descriptor (str): Service descriptor (ie: slack channel, pd integration) rule_name (str): Name of the triggered rule alert (dict): Alert relevant to the triggered rule Returns: bool: Indicates a successful or failed dispatch of the alert """ @backoff.on_exception(backoff.fibo, ClientError, max_tries=self.MAX_BACKOFF_ATTEMPTS, jitter=backoff.full_jitter, on_backoff=backoff_handler, on_success=success_handler, on_giveup=giveup_handler) def _firehose_request_wrapper(json_alert, delivery_stream): """Make the PutRecord request to Kinesis Firehose with backoff Args: json_alert (str): The JSON dumped alert body delivery_stream (str): The Firehose Delivery Stream to send to Returns: dict: Firehose response in the format below {'RecordId': 'string'} """ return self.__aws_client__.put_record( DeliveryStreamName=delivery_stream, Record={'Data': json_alert}) if self.__aws_client__ is None: self.__aws_client__ = boto3.client('firehose', region_name=self.region) json_alert = json.dumps(kwargs['alert'], separators=(',', ':')) + '\n' if len(json_alert) > self.MAX_RECORD_SIZE: LOGGER.error('Alert too large to send to Firehose: \n%s...', json_alert[0:1000]) return False delivery_stream = self.config[self.__service__][kwargs['descriptor']] LOGGER.info('Sending alert [%s] to aws-firehose:%s', kwargs['rule_name'], delivery_stream) resp = _firehose_request_wrapper(json_alert, delivery_stream) if resp.get('RecordId'): LOGGER.info( 'Alert [%s] successfully sent to aws-firehose:%s with RecordId:%s', kwargs['rule_name'], delivery_stream, resp['RecordId']) return self._log_status(resp, kwargs['descriptor'])
def dispatch(self, **kwargs): """Send alert to Jira Args: **kwargs: consists of any combination of the following items: descriptor (str): Service descriptor (ie: slack channel, pd integration) rule_name (str): Name of the triggered rule alert (dict): Alert relevant to the triggered rule """ creds = self._load_creds(kwargs['descriptor']) if not creds: return self._log_status(False, kwargs['descriptor']) issue_id = None comment_id = None issue_summary = 'StreamAlert {}'.format(kwargs['rule_name']) alert_body = '{{code:JSON}}{}{{code}}'.format( json.dumps(kwargs['alert'])) self._base_url = creds['url'] self._auth_cookie = self._establish_session(creds['username'], creds['password']) # Validate successful authentication if not self._auth_cookie: return self._log_status(False, kwargs['descriptor']) # If aggregation is enabled, attempt to add alert to an existing issue. If a # failure occurs in this block, creation of a new Jira issue will be attempted. if creds.get('aggregate', '').lower() == 'yes': issue_id = self._get_existing_issue(issue_summary, creds['project_key']) if issue_id: comment_id = self._create_comment(issue_id, alert_body) if comment_id: LOGGER.debug( 'Sending alert to an existing Jira issue %s with comment %s', issue_id, comment_id) return self._log_status(True, kwargs['descriptor']) else: LOGGER.error( 'Encountered an error when adding alert to existing ' 'Jira issue %s. Attempting to create new Jira issue.', issue_id) # Create a new Jira issue issue_id = self._create_issue(issue_summary, creds['project_key'], creds['issue_type'], alert_body) if issue_id: LOGGER.debug('Sending alert to a new Jira issue %s', issue_id) return self._log_status(issue_id or comment_id, kwargs['descriptor'])