def test_multiple_records(self): """Verify that results are returned for multiple records.""" # Add two different files to mock S3. self._mock_s3_client.buckets[MOCK_S3_BUCKET_NAME]['KEY2'] = ( 'Evilicious', {}) self._mock_s3_client.buckets[MOCK_S3_BUCKET_NAME]['KEY3'] = ('', { 'observed_path': 'win32' }) self._test_event['S3Objects'] = ['KEY2', 'KEY3'] # Verify return value. result = main.analyze_lambda_handler(self._test_event, TEST_CONTEXT) expected = { 'S3:{}:KEY2'.format(MOCK_S3_BUCKET_NAME): { 'FileInfo': { 'ComputedMD5': hashlib.md5('Evilicious'.encode('utf-8')).hexdigest(), 'ComputedSHA256': hashlib.sha256('Evilicious'.encode('utf-8')).hexdigest(), 'ReportedMD5': '', 'S3Location': 'S3:{}:KEY2'.format(MOCK_S3_BUCKET_NAME), 'SamplePath': '' }, 'NumMatchedRules': 1, 'MatchedRules': { 'Rule1': { 'MatchedStrings': ['$evil_string'], 'Meta': { 'author': 'Austin Byers', 'description': ('A helpful description about why this rule matches ' 'dastardly evil files.') }, 'RuleFile': 'evil_check.yar', 'RuleName': 'contains_evil', 'RuleTags': ['mock_rule', 'has_meta'] } } }, 'S3:{}:KEY3'.format(MOCK_S3_BUCKET_NAME): { 'FileInfo': { 'ComputedMD5': hashlib.md5(''.encode('utf-8')).hexdigest(), 'ComputedSHA256': hashlib.sha256(''.encode('utf-8')).hexdigest(), 'ReportedMD5': '', 'S3Location': 'S3:{}:KEY3'.format(MOCK_S3_BUCKET_NAME), 'SamplePath': 'win32' }, 'NumMatchedRules': 1, 'MatchedRules': { 'Rule1': { 'MatchedStrings': [], 'Meta': {}, 'RuleFile': 'externals.yar', 'RuleName': 'filename_contains_win32', 'RuleTags': ['mock_rule'] } } } } self.assertEqual(expected, result) # Verify that return value can be encoded as JSON. json.dumps(result) # Verify cloudwatch metrics. expected_metrics = { 'AnalyzedBinaries': 2, 'MatchedBinaries': 2, 'YaraRules': 3, 'LambdaVersion': 1 } for metric in self._mock_cloudwatch_client.metric_data['BinaryAlert']: if metric['MetricName'] in expected_metrics: self.assertEqual(expected_metrics[metric['MetricName']], metric['Value'])
def test_new_matching_file_added(self): """Verify return value, Dynamo update, and SNS alert when a new file matches a YARA rule.""" md5 = hashlib.md5(MOCK_FILE_CONTENTS.encode('utf-8')).hexdigest() sha = hashlib.sha256(MOCK_FILE_CONTENTS.encode('utf-8')).hexdigest() result = main.analyze_lambda_handler(self._test_event, TEST_CONTEXT) # Verify return value. s3_id = 'S3:{}:{}'.format(MOCK_S3_BUCKET_NAME, MOCK_S3_OBJECT_KEY) expected = { s3_id: { 'FileInfo': { 'ComputedMD5': md5, 'ComputedSHA256': sha, 'ReportedMD5': MOCK_FILE_METADATA['reported_md5'], 'S3Location': s3_id, 'SamplePath': MOCK_FILE_METADATA['observed_path'] }, 'NumMatchedRules': 2, 'MatchedRules': { 'Rule1': { 'MatchedStrings': ['$evil_string'], 'Meta': { 'author': 'Austin Byers', 'description': ('A helpful description about why this rule matches ' 'dastardly evil files.') }, 'RuleFile': 'evil_check.yar', 'RuleName': 'contains_evil', 'RuleTags': ['mock_rule', 'has_meta'] }, 'Rule2': { 'MatchedStrings': [], 'Meta': {}, 'RuleFile': 'externals.yar', 'RuleName': 'extension_is_exe', 'RuleTags': ['mock_rule'] } } } } self.assertEqual(expected, result) # Verify that the return value can be encoded as JSON. json.dumps(result) # Verify that a new entry was made to Dynamo with all of the expected data. key_value_dict = self._mock_dynamo_table.items[( sha, str(LAMBDA_VERSION))].key_value_dict for expected in [ md5, MOCK_S3_OBJECT_KEY, 'evil_check.yar:contains_evil' ]: self.assertIn(expected, str(key_value_dict.values())) # Verify that an alert was published to SNS. alert = self._mock_sns_client.topics[MOCK_SNS_TOPIC_ARN][0]['Message'] for data in [md5, sha, 'evil_check.yar', 'externals.yar', s3_id]: self.assertIn(data, alert) # Verify that the SQS receipts were deleted. self.assertEqual([], self._mock_sqs_client.queues[MOCK_SQS_URL]) # Verify that the correct metrics were published to Cloudwatch. expected_metrics = { 'AnalyzedBinaries': 1, 'MatchedBinaries': 1, 'YaraRules': 3, 'LambdaVersion': 1 } for metric in self._mock_cloudwatch_client.metric_data['BinaryAlert']: if metric['MetricName'] in expected_metrics: self.assertEqual(expected_metrics[metric['MetricName']], metric['Value']) # Verify that the downloaded file was removed from /tmp. self.assertEqual([], os.listdir('/tmp'))