def test_analyze_no_matches(self): """Analyze returns empty list if no matches.""" # Setup a different YaraAnalyzer with an empty ruleset. yara_mocks.save_test_yara_rules('./empty.yara.rules', empty_rules_file=True) empty_analyzer = yara_analyzer.YaraAnalyzer('./empty.yara.rules') self.assertEqual([], empty_analyzer.analyze('/target.exe'))
def setUp(self): """For each test, build a new YaraAnalyzer.""" self.setUpPyfakefs() yara_mocks.save_test_yara_rules('./all.yara.rules') self._analyzer = yara_analyzer.YaraAnalyzer('./all.yara.rules') # Write target file. self.fs.CreateFile( './target.exe', contents='This is definitely not an evil file. ^_^\n')
def setUp(self): """Before each test, create the mock environment.""" # Show all differences on assertion failures, even for large dictionaries. self.maxDiff = None # pylint: disable=invalid-name # Set up the fake filesystem. self.setUpPyfakefs() os.mkdir('/tmp') os.makedirs(os.path.dirname(main.COMPILED_RULES_FILEPATH)) yara_mocks.save_test_yara_rules(main.COMPILED_RULES_FILEPATH) # Mock cloudwatch client. self._mock_cloudwatch_client = boto3_mocks.MockCloudwatchCient() # Create a mock Dynamo table. self._mock_dynamo_client = boto3_mocks.MockDynamoDBClient( MOCK_DYNAMO_TABLE_NAME, HASH_KEY, RANGE_KEY) self._mock_dynamo_table = self._mock_dynamo_client.tables[ MOCK_DYNAMO_TABLE_NAME] os.environ['YARA_MATCHES_DYNAMO_TABLE_NAME'] = MOCK_DYNAMO_TABLE_NAME # Create a mock S3 bucket and "upload" a file to it. self._mock_s3_client = boto3_mocks.MockS3Client( MOCK_S3_BUCKET_NAME, MOCK_S3_OBJECT_KEY, MOCK_FILE_CONTENTS, MOCK_FILE_METADATA) os.environ['S3_BUCKET_NAME'] = MOCK_S3_BUCKET_NAME # Create mock SNS topic. self._mock_sns_client = boto3_mocks.MockSNSClient() os.environ['YARA_ALERTS_SNS_TOPIC_ARN'] = MOCK_SNS_TOPIC_ARN # Create mock SQS queue. self._mock_sqs_client = boto3_mocks.MockSQSClient( MOCK_SQS_URL, MOCK_SQS_RECEIPTS) os.environ['SQS_QUEUE_URL'] = MOCK_SQS_URL # Enable the boto3 mocks. self._real_boto3_client = boto3.client boto3.client = mock.MagicMock(side_effect=self._boto3_client_mock) # Create test event. self._test_event = { 'S3Objects': [MOCK_S3_OBJECT_KEY], 'SQSReceipts': MOCK_SQS_RECEIPTS }
def setUp(self): """Before each test, create the mock environment.""" # Show all differences on assertion failures, even for large dictionaries. self.maxDiff = None # pylint: disable=invalid-name # Set up the fake filesystem. self.setUpPyfakefs() os.makedirs(os.path.dirname(COMPILED_RULES_FILEPATH)) os.makedirs(tempfile.gettempdir()) yara_mocks.save_test_yara_rules(COMPILED_RULES_FILEPATH) # Set environment variables. os.environ['S3_BUCKET_NAME'] = MOCK_S3_BUCKET_NAME os.environ['SQS_QUEUE_URL'] = MOCK_SQS_URL os.environ['YARA_MATCHES_DYNAMO_TABLE_NAME'] = MOCK_DYNAMO_TABLE_NAME os.environ['YARA_ALERTS_SNS_TOPIC_ARN'] = MOCK_SNS_TOPIC_ARN # Create test event. self._test_event = { # Two objects, which match different YARA rules. 'S3Objects': [urllib.parse.quote_plus(GOOD_S3_OBJECT_KEY), EVIL_S3_OBJECT_KEY], 'SQSReceipts': MOCK_SQS_RECEIPTS } # Import the module under test (now that YARA is mocked out). with mock.patch('boto3.client'), mock.patch('boto3.resource'): from lambda_functions.analyzer import main self.main = main # Reset each boto3 resource (sometimes necessary depending on import order). self.main.analyzer_aws_lib.CLOUDWATCH = mock.MagicMock() self.main.analyzer_aws_lib.DYNAMODB = mock.MagicMock() self.main.analyzer_aws_lib.S3 = mock.MagicMock() self.main.analyzer_aws_lib.SNS = mock.MagicMock() self.main.analyzer_aws_lib.SQS = mock.MagicMock() # Mock S3 Object self.main.analyzer_aws_lib.S3.Object = MockS3Object