def _get_object(self): """Given an S3 record, download and parse the data. Returns: str: Path to the downloaded s3 object. """ # Use the urllib unquote method to decode any url encoded characters # (ie - %26 --> &) from the bucket and key names unquoted = lambda (data): unquote(data).decode('utf-8') region = self.raw_record['awsRegion'] bucket = unquoted(self.raw_record['s3']['bucket']['name']) key = unquoted(self.raw_record['s3']['object']['key']) self.s3_object_size = int(self.raw_record['s3']['object']['size']) LOGGER.debug( 'Pre-parsing record from S3. Bucket: %s, Key: %s, Size: %d', bucket, key, self.s3_object_size) try: return self._download_object(region, bucket, key) except IOError: LOGGER.exception( '[S3Payload] The following error occurred while downloading') return
def sink(self, alerts): """Sink triggered alerts from the StreamRules engine. Args: alerts (list): a list of dictionaries representating json alerts Sends a message to the alert processor with the following JSON format: { "record": record, "metadata": { "rule_name": rule.rule_name, "rule_description": rule.rule_function.__doc__, "log": str(payload.log_source), "outputs": rule.outputs, "type": payload.type, "source": { "service": payload.service, "entity": payload.entity } } } """ for alert in alerts: try: data = json.dumps(alert, default=lambda o: o.__dict__) except AttributeError as err: LOGGER.error( 'An error occurred while dumping alert to JSON: %s ' 'Alert: %s', err.message, alert) continue try: response = self.client_lambda.invoke( FunctionName=self.function, InvocationType='Event', Payload=data, Qualifier='production') except ClientError as err: LOGGER.exception( 'An error occurred while sending alert to ' '\'%s:production\'. Error is: %s. Alert: %s', self.function, err.response, data) continue if response['ResponseMetadata']['HTTPStatusCode'] != 202: LOGGER.error('Failed to send alert to \'%s\': %s', self.function, data) continue if self.env['lambda_alias'] != 'development': LOGGER.info( 'Sent alert to \'%s\' with Lambda request ID \'%s\'', self.function, response['ResponseMetadata']['RequestId'])
def send_alerts(self, alerts): """Send alerts to the Alert Processor and to the alerts Dynamo table. Args: alerts (list): A list of dictionaries representing json alerts. """ try: self._send_to_dynamo(alerts) except ClientError: # The batch_writer() automatically retries transient errors - any raised ClientError # is likely unrecoverable. Log an exception and metric LOGGER.exception('Error saving alerts to Dynamo') MetricLogger.log_metric(FUNCTION_NAME, MetricLogger.FAILED_DYNAMO_WRITES, 1)
def send_alerts(self, alerts): """Send alerts to the Dynamo table. Args: alerts (list): A list of Alert instances to save to Dynamo. """ try: self._table.add_alerts(alerts) LOGGER.info('Successfully sent %d alert(s) to dynamo:%s', len(alerts), self._table.name) except ClientError: # add_alerts() automatically retries transient errors - any raised ClientError # is likely unrecoverable. Log an exception and metric LOGGER.exception('Error saving alerts to Dynamo') MetricLogger.log_metric(FUNCTION_NAME, MetricLogger.FAILED_DYNAMO_WRITES, 1)
def process_rule(cls, record, rule): """Process rule functions on a given record Args: record (dict): Parsed payload of any type rule (func): Rule function to process the record Returns: (bool): The return function of the rule """ try: rule_result = rule.rule_function(record) except Exception: # pylint: disable=broad-except rule_result = False LOGGER.exception('Encountered error with rule: %s', rule.rule_function.__name__) return rule_result