def setUp(self): """Before each test, create the mock environment.""" # Show all differences on assertion failures, even for large dictionaries. self.maxDiff = None # pylint: disable=invalid-name # Set up the fake filesystem. self.setUpPyfakefs() os.makedirs(os.path.dirname(COMPILED_RULES_FILEPATH)) yara_mocks.save_test_yara_rules(COMPILED_RULES_FILEPATH) # Create test event. self._test_event = { 'Records': [{ 'body': json.dumps({ 'Records': [{ 's3': { 'bucket': { 'name': MOCK_S3_BUCKET_NAME }, 'object': { 'key': urllib.parse.quote_plus(GOOD_S3_OBJECT_KEY) } } }] }) }, { 'body': json.dumps({ 'Records': [{ 's3': { 'bucket': { 'name': MOCK_S3_BUCKET_NAME }, 'object': { 'key': urllib.parse.quote_plus(EVIL_S3_OBJECT_KEY) } } }] }) }] } # Import the module under test (now that YARA is mocked out). with mock.patch('boto3.client'), mock.patch('boto3.resource'), \ mock.patch.object(yara_analyzer.yara, 'load', side_effect=yara_mocks.mock_yara_load): from lambda_functions.analyzer import main self.main = main # Reset each boto3 resource (sometimes necessary depending on import order). self.main.analyzer_aws_lib.CLOUDWATCH = mock.MagicMock() self.main.analyzer_aws_lib.DYNAMODB = mock.MagicMock() self.main.analyzer_aws_lib.S3 = mock.MagicMock() self.main.analyzer_aws_lib.SNS = mock.MagicMock() # Mock S3 Object self.main.analyzer_aws_lib.S3.Object = MockS3Object
def test_analyze_no_matches(self): """Analyze returns empty list if no matches.""" # Setup a different YaraAnalyzer with an empty ruleset. yara_mocks.save_test_yara_rules('./empty.yara.rules', empty_rules_file=True) with mock.patch.object(yara_analyzer.yara, 'load', side_effect=yara_mocks.mock_yara_load): empty_analyzer = yara_analyzer.YaraAnalyzer('./empty.yara.rules') self.assertEqual([], empty_analyzer.analyze('/target.exe'))
def setUp(self): """For each test, build a new YaraAnalyzer.""" self.setUpPyfakefs() yara_mocks.save_test_yara_rules('./all.yara.rules') with mock.patch.object(yara_analyzer.yara, 'load', side_effect=yara_mocks.mock_yara_load): self._analyzer = yara_analyzer.YaraAnalyzer('./all.yara.rules') # Write target file. self.fs.CreateFile( './target.exe', contents='This is definitely not an evil file. ^_^\n')
def setUp(self): """Before each test, create the mock environment.""" # Show all differences on assertion failures, even for large dictionaries. self.maxDiff = None # pylint: disable=invalid-name # Set up the fake filesystem. real_tempdir = tempfile.gettempdir() self.setUpPyfakefs() os.makedirs(os.path.dirname(COMPILED_RULES_FILEPATH)) os.makedirs(real_tempdir) if not os.path.exists(tempfile.gettempdir()): # Temp directory in pyfakefs may look different than in the real fs. os.makedirs(tempfile.gettempdir()) yara_mocks.save_test_yara_rules(COMPILED_RULES_FILEPATH) # Set environment variables. os.environ['S3_BUCKET_NAME'] = MOCK_S3_BUCKET_NAME os.environ['SQS_QUEUE_URL'] = MOCK_SQS_URL os.environ['YARA_MATCHES_DYNAMO_TABLE_NAME'] = MOCK_DYNAMO_TABLE_NAME os.environ['YARA_ALERTS_SNS_TOPIC_ARN'] = MOCK_SNS_TOPIC_ARN # Create test event. self._test_event = { # Two objects, which match different YARA rules. 'S3Objects': [urllib.parse.quote_plus(GOOD_S3_OBJECT_KEY), EVIL_S3_OBJECT_KEY], 'SQSReceipts': MOCK_SQS_RECEIPTS } # Import the module under test (now that YARA is mocked out). with mock.patch('boto3.client'), mock.patch('boto3.resource'), \ mock.patch.object(yara_analyzer.yara, 'load', side_effect=yara_mocks.mock_yara_load): from lambda_functions.analyzer import main self.main = main # Reset each boto3 resource (sometimes necessary depending on import order). self.main.analyzer_aws_lib.CLOUDWATCH = mock.MagicMock() self.main.analyzer_aws_lib.DYNAMODB = mock.MagicMock() self.main.analyzer_aws_lib.S3 = mock.MagicMock() self.main.analyzer_aws_lib.SNS = mock.MagicMock() self.main.analyzer_aws_lib.SQS = mock.MagicMock() # Mock S3 Object self.main.analyzer_aws_lib.S3.Object = MockS3Object