def dispatch(self, **kwargs): """Send alert to Github Args: **kwargs: consists of any combination of the following items: descriptor (str): Service descriptor (ie: slack channel, pd integration) rule_name (str): Name of the triggered rule alert (dict): Alert relevant to the triggered rule """ credentials = self._load_creds(kwargs['descriptor']) if not credentials: return self._log_status(False) username_password = "******".format(credentials['username'], credentials['access_token']) encoded_credentials = base64.b64encode(username_password) headers = {'Authorization': "Basic {}".format(encoded_credentials)} url = '{}/repos/{}/issues'.format(credentials['api'], credentials['repository']) title = "StreamAlert: {}".format(kwargs['rule_name']) body_template = "### Description\n{}\n\n### Event data\n\n```\n{}\n```" body = body_template.format(kwargs['alert']['rule_description'], json.dumps(kwargs['alert']['record'], indent=2)) issue = {'title': title, 'body': body, 'labels': credentials['labels'].split(',')} LOGGER.debug('sending alert to Github repository %s', credentials['repository']) try: success = self._post_request_retry(url, issue, headers) except OutputRequestFailure: success = False return self._log_status(success)
def dispatch(self, **kwargs): """Send alert to Phantom Args: **kwargs: consists of any combination of the following items: descriptor (str): Service descriptor (ie: slack channel, pd integration) rule_name (str): Name of the triggered rule alert (dict): Alert relevant to the triggered rule """ creds = self._load_creds(kwargs['descriptor']) if not creds: return self._log_status(False) headers = {"ph-auth-token": creds['ph_auth_token']} rule_desc = kwargs['alert']['rule_description'] container_id = self._setup_container(kwargs['rule_name'], rule_desc, creds['url'], headers) LOGGER.debug('sending alert to Phantom container with id %s', container_id) success = False if container_id: artifact = {'cef': kwargs['alert']['record'], 'container_id': container_id, 'data': kwargs['alert'], 'name': 'Phantom Artifact', 'label': 'Alert'} artifact_url = os.path.join(creds['url'], self.ARTIFACT_ENDPOINT) try: success = self._post_request_retry(artifact_url, artifact, headers, False) except OutputRequestFailure: success = False return self._log_status(success)
def dispatch(self, alert, descriptor): """Send alert to Phantom Args: alert (Alert): Alert instance which triggered a rule descriptor (str): Output descriptor Returns: bool: True if alert was sent successfully, False otherwise """ creds = self._load_creds(descriptor) if not creds: return self._log_status(False, descriptor) headers = {"ph-auth-token": creds['ph_auth_token']} container_id = self._setup_container( alert.rule_name, alert.rule_description, creds['url'], headers) LOGGER.debug('sending alert to Phantom container with id %s', container_id) success = False if container_id: artifact = {'cef': alert.record, 'container_id': container_id, 'data': alert.output_dict(), 'name': 'Phantom Artifact', 'label': 'Alert'} artifact_url = os.path.join(creds['url'], self.ARTIFACT_ENDPOINT) try: success = self._post_request_retry(artifact_url, artifact, headers, False) except OutputRequestFailure: success = False return self._log_status(success, descriptor)
def _dispatch(self, alert, descriptor): """Send alert to an S3 bucket Organizes alert into the following folder structure: service/entity/rule_name/datetime.json The alert gets dumped to a JSON string Args: alert (Alert): Alert instance which triggered a rule descriptor (str): Output descriptor Returns: bool: True if alert was sent successfully, False otherwise """ bucket = self.config[self.__service__][descriptor] # Prefix with alerts to account for generic non-streamalert buckets # Produces the following key format: # alerts/dt=2017-01-25-00/kinesis_my-stream_my-rule_uuid.json # Keys need to be unique to avoid object overwriting key = 'alerts/dt={}/{}_{}_{}_{}.json'.format( datetime.now().strftime('%Y-%m-%d-%H'), alert.source_service, alert.source_entity, alert.rule_name, uuid.uuid4()) LOGGER.debug('Sending %s to S3 bucket %s with key %s', alert, bucket, key) client = boto3.client('s3', region_name=self.region) client.put_object(Body=json.dumps(alert.output_dict()), Bucket=bucket, Key=key) return True
def dispatch(self, **kwargs): """Send alert to Jira Args: **kwargs: consists of any combination of the following items: descriptor (str): Service descriptor (ie: slack channel, pd integration) rule_name (str): Name of the triggered rule alert (dict): Alert relevant to the triggered rule """ creds = self._load_creds(kwargs['descriptor']) if not creds: return self._log_status(False, kwargs['descriptor']) issue_id = None comment_id = None issue_summary = 'StreamAlert {}'.format(kwargs['rule_name']) alert_body = '{{code:JSON}}{}{{code}}'.format( json.dumps(kwargs['alert'])) self._base_url = creds['url'] self._auth_cookie = self._establish_session(creds['username'], creds['password']) # Validate successful authentication if not self._auth_cookie: return self._log_status(False, kwargs['descriptor']) # If aggregation is enabled, attempt to add alert to an existing issue. If a # failure occurs in this block, creation of a new Jira issue will be attempted. if creds.get('aggregate', '').lower() == 'yes': issue_id = self._get_existing_issue(issue_summary, creds['project_key']) if issue_id: comment_id = self._create_comment(issue_id, alert_body) if comment_id: LOGGER.debug( 'Sending alert to an existing Jira issue %s with comment %s', issue_id, comment_id) return self._log_status(True, kwargs['descriptor']) else: LOGGER.error( 'Encountered an error when adding alert to existing ' 'Jira issue %s. Attempting to create new Jira issue.', issue_id) # Create a new Jira issue issue_id = self._create_issue(issue_summary, creds['project_key'], creds['issue_type'], alert_body) if issue_id: LOGGER.debug('Sending alert to a new Jira issue %s', issue_id) return self._log_status(issue_id or comment_id, kwargs['descriptor'])
def dispatch(self, alert, descriptor): """Send alert to Jira Args: alert (Alert): Alert instance which triggered a rule descriptor (str): Output descriptor Returns: bool: True if alert was sent successfully, False otherwise """ creds = self._load_creds(descriptor) if not creds: return self._log_status(False, descriptor) issue_id = None comment_id = None issue_summary = 'StreamAlert {}'.format(alert.rule_name) alert_body = '{{code:JSON}}{}{{code}}'.format( json.dumps(alert.output_dict(), sort_keys=True)) self._base_url = creds['url'] self._auth_cookie = self._establish_session(creds['username'], creds['password']) # Validate successful authentication if not self._auth_cookie: return self._log_status(False, descriptor) # If aggregation is enabled, attempt to add alert to an existing issue. If a # failure occurs in this block, creation of a new Jira issue will be attempted. if creds.get('aggregate', '').lower() == 'yes': issue_id = self._get_existing_issue(issue_summary, creds['project_key']) if issue_id: comment_id = self._create_comment(issue_id, alert_body) if comment_id: LOGGER.debug('Sending alert to an existing Jira issue %s with comment %s', issue_id, comment_id) return self._log_status(True, descriptor) else: LOGGER.error('Encountered an error when adding alert to existing ' 'Jira issue %s. Attempting to create new Jira issue.', issue_id) # Create a new Jira issue issue_id = self._create_issue(issue_summary, creds['project_key'], creds['issue_type'], alert_body) if issue_id: LOGGER.debug('Sending alert to a new Jira issue %s', issue_id) return self._log_status(issue_id or comment_id, descriptor)
def _dispatch(self, alert, descriptor): """Send alert to a Lambda function The alert gets dumped to a JSON string to be sent to the Lambda function Args: alert (Alert): Alert instance which triggered a rule descriptor (str): Output descriptor Returns: bool: True if alert was sent successfully, False otherwise """ alert_string = json.dumps(alert.record, separators=(',', ':')) function_name = self.config[self.__service__][descriptor] # Check to see if there is an optional qualifier included here # Acceptable values for the output configuration are the full ARN, # a function name followed by a qualifier, or just a function name: # 'arn:aws:lambda:aws-region:acct-id:function:function-name:prod' # 'function-name:prod' # 'function-name' # Checking the length of the list for 2 or 8 should account for all # times a qualifier is provided. parts = function_name.split(':') if len(parts) == 2 or len(parts) == 8: function = parts[-2] qualifier = parts[-1] else: function = parts[-1] qualifier = None LOGGER.debug('Sending alert to Lambda function %s', function_name) client = boto3.client('lambda', region_name=self.region) invoke_params = { 'FunctionName': function, 'InvocationType': 'Event', 'Payload': alert_string } # Use the qualifier if it's available. Passing an empty qualifier in # with `Qualifier=''` or `Qualifier=None` does not work if qualifier: invoke_params['Qualifier'] = qualifier client.invoke(**invoke_params) return True
def dispatch(self, **kwargs): """Send alert to a Lambda function The alert gets dumped to a JSON string to be sent to the Lambda function Args: **kwargs: consists of any combination of the following items: descriptor (str): Service descriptor (ie: slack channel, pd integration) rule_name (str): Name of the triggered rule alert (dict): Alert relevant to the triggered rule """ alert = kwargs['alert'] alert_string = json.dumps(alert['record']) function_name = self.config[self.__service__][kwargs['descriptor']] # Check to see if there is an optional qualifier included here # Acceptable values for the output configuration are the full ARN, # a function name followed by a qualifier, or just a function name: # 'arn:aws:lambda:aws-region:acct-id:function:function-name:prod' # 'function-name:prod' # 'function-name' # Checking the length of the list for 2 or 8 should account for all # times a qualifier is provided. parts = function_name.split(':') if len(parts) == 2 or len(parts) == 8: function = parts[-2] qualifier = parts[-1] else: function = parts[-1] qualifier = None LOGGER.debug('Sending alert to Lambda function %s', function_name) client = boto3.client('lambda', region_name=self.region) # Use the qualifier if it's available. Passing an empty qualifier in # with `Qualifier=''` or `Qualifier=None` does not work and thus we # have to perform different calls to client.invoke(). if qualifier: resp = client.invoke(FunctionName=function, InvocationType='Event', Payload=alert_string, Qualifier=qualifier) else: resp = client.invoke(FunctionName=function, InvocationType='Event', Payload=alert_string) return self._log_status(resp, kwargs['descriptor'])
def dispatch(self, **kwargs): """Send alert to Komand Args: **kwargs: consists of any combination of the following items: descriptor (str): Service descriptor (ie: slack channel, pd integration) alert (dict): Alert relevant to the triggered rule """ creds = self._load_creds(kwargs['descriptor']) if not creds: return self._log_status(False, kwargs['descriptor']) headers = {'Authorization': creds['komand_auth_token']} LOGGER.debug('sending alert to Komand') resp = self._post_request(creds['url'], {'data': kwargs['alert']}, headers, False) success = self._check_http_response(resp) return self._log_status(success, kwargs['descriptor'])
def dispatch(self, **kwargs): """Send alert to an S3 bucket Organizes alert into the following folder structure: service/entity/rule_name/datetime.json The alert gets dumped to a JSON string Args: **kwargs: consists of any combination of the following items: descriptor (str): Service descriptor (ie: slack channel, pd integration) rule_name (str): Name of the triggered rule alert (dict): Alert relevant to the triggered rule """ alert = kwargs['alert'] service = alert['source_service'] entity = alert['source_entity'] current_date = datetime.now() s3_alert = alert # JSON dump the alert to retain a consistent alerts schema across log types. # This will get replaced by a UUID which references a record in a # different table in the future. s3_alert['record'] = json.dumps(s3_alert['record']) alert_string = json.dumps(s3_alert) bucket = self.config[self.__service__][kwargs['descriptor']] # Prefix with alerts to account for generic non-streamalert buckets # Produces the following key format: # alerts/dt=2017-01-25-00/kinesis_my-stream_my-rule_uuid.json # Keys need to be unique to avoid object overwriting key = 'alerts/dt={}/{}_{}_{}_{}.json'.format( current_date.strftime('%Y-%m-%d-%H'), service, entity, alert['rule_name'], uuid.uuid4()) LOGGER.debug('Sending alert to S3 bucket %s with key %s', bucket, key) client = boto3.client('s3', region_name=self.region) resp = client.put_object(Body=alert_string, Bucket=bucket, Key=key) return self._log_status(resp, kwargs['descriptor'])
def _get_existing_issue(self, issue_summary, project_key): """Find an existing Jira issue based on the issue summary Args: issue_summary (str): The Jira issue summary project_key (str): The Jira project to search Returns: int: ID of the found issue or False if existing issue does not exist """ jql = 'summary ~ "{}" and project="{}"'.format(issue_summary, project_key) resp = self._search_jira(jql, fields=['id', 'summary'], max_results=1) jira_id = False try: jira_id = int(resp[0]['id']) except (IndexError, KeyError): LOGGER.debug('Existing Jira issue not found') return jira_id
def dispatch(self, alert, descriptor): """Send alert to Github Args: alert (Alert): Alert instance which triggered a rule descriptor (str): Output descriptor Returns: bool: True if alert was sent successfully, False otherwise """ credentials = self._load_creds(descriptor) if not credentials: return self._log_status(False, descriptor) username_password = "******".format(credentials['username'], credentials['access_token']) encoded_credentials = base64.b64encode(username_password) headers = {'Authorization': "Basic {}".format(encoded_credentials)} url = '{}/repos/{}/issues'.format(credentials['api'], credentials['repository']) title = "StreamAlert: {}".format(alert.rule_name) body_template = "### Description\n{}\n\n### Event data\n\n```\n{}\n```" body = body_template.format( alert.rule_description, json.dumps(alert.record, indent=2, sort_keys=True)) issue = { 'title': title, 'body': body, 'labels': credentials['labels'].split(',') } LOGGER.debug('sending alert to Github repository %s', credentials['repository']) try: success = self._post_request_retry(url, issue, headers) except OutputRequestFailure: success = False return self._log_status(success, descriptor)
def _dispatch(self, alert, descriptor): """Send alert to Komand Args: alert (Alert): Alert instance which triggered a rule descriptor (str): Output descriptor Returns: bool: True if alert was sent successfully, False otherwise """ creds = self._load_creds(descriptor) if not creds: return False headers = {'Authorization': creds['komand_auth_token']} LOGGER.debug('sending alert to Komand') resp = self._post_request(creds['url'], {'data': alert.output_dict()}, headers, False) return self._check_http_response(resp)
def run(alert, region, function_name, config): """Send an Alert to its described outputs. Args: alert (dict): dictionary representating an alert with the following structure: { 'record': record, 'rule_name': rule.rule_name, 'rule_description': rule.rule_function.__doc__, 'log_source': str(payload.log_source), 'log_type': payload.type, 'outputs': rule.outputs, 'source_service': payload.service, 'source_entity': payload.entity } region (str): The AWS region of the currently executing Lambda function function_name (str): The name of the lambda function config (dict): The loaded configuration for outputs from conf/outputs.json Yields: (bool, str): Dispatch status and name of the output to the handler """ if not validate_alert(alert): LOGGER.error('Invalid alert format:\n%s', json.dumps(alert, indent=2)) return LOGGER.debug('Sending alert to outputs:\n%s', json.dumps(alert, indent=2)) # strip out unnecessary keys and sort alert = _sort_dict(alert) outputs = alert['outputs'] # Get the output configuration for this rule and send the alert to each for output in set(outputs): try: service, descriptor = output.split(':') except ValueError: LOGGER.error( 'Improperly formatted output [%s]. Outputs for rules must ' 'be declared with both a service and a descriptor for the ' 'integration (ie: \'slack:my_channel\')', output) continue if service not in config or descriptor not in config[service]: LOGGER.error('The output \'%s\' does not exist!', output) continue # Retrieve the proper class to handle dispatching the alerts of this services output_dispatcher = get_output_dispatcher(service, region, function_name, config) if not output_dispatcher: continue LOGGER.debug('Sending alert to %s:%s', service, descriptor) sent = False try: sent = output_dispatcher.dispatch(descriptor=descriptor, rule_name=alert['rule_name'], alert=alert) except Exception as err: # pylint: disable=broad-except LOGGER.exception( 'An error occurred while sending alert ' 'to %s:%s: %s. alert:\n%s', service, descriptor, err, json.dumps(alert, indent=2)) # Yield back the result to the handler yield sent, output