def test_download_default_report(self, fake_session): # actual test auth_credential = fake_arn(service='iam', generate_account_id=True) downloader = AWSReportDownloader(self.fake_customer_name, auth_credential, self.fake_bucket_name) self.assertEqual(downloader.report_name, self.fake_report_name)
def test_missing_report_name(self, fake_session): """Test downloading a report with an invalid report name.""" auth_credential = fake_arn(service='iam', generate_account_id=True) with self.assertRaises(MasuProviderError): AWSReportDownloader(self.fake_customer_name, auth_credential, 's3_bucket', 'wrongreport')
def test_download_default_report_no_report_found(self, fake_session, fake_report_list): auth_credential = fake_arn(service='iam', generate_account_id=True) with self.assertRaises(MasuProviderError): AWSReportDownloader(self.fake_customer_name, auth_credential, self.fake_bucket_name)
def test_get_report_context_for_date_should_not_download( self, mock_session, mock_manifest, mock_check): """Test that no data is returned when we don't want to process.""" current_month = DateAccessor().today().replace(day=1, second=1, microsecond=1) auth_credential = fake_arn(service='iam', generate_account_id=True) downloader = AWSReportDownloader(self.fake_customer_name, auth_credential, self.fake_bucket_name) start_str = current_month.strftime(downloader.manifest_date_format) assembly_id = '1234' compression = downloader.report.get('Compression') report_keys = ['file1', 'file2'] mock_manifest.return_value = { 'assemblyId': assembly_id, 'Compression': compression, 'reportKeys': report_keys, 'billingPeriod': { 'start': start_str } } mock_check.return_value = False expected = {} result = downloader.get_report_context_for_date(current_month) self.assertEqual(result, expected)
def test_parse_arn_with_region_and_account(self): """Assert successful account ID parsing from a well-formed ARN.""" mock_account_id = fake_aws_account_id() mock_arn = fake_arn(account_id=mock_account_id, region='test-region-1') arn_object = utils.AwsArn(mock_arn) partition = arn_object.partition self.assertIsNotNone(partition) service = arn_object.service self.assertIsNotNone(service) region = arn_object.region self.assertIsNotNone(region) account_id = arn_object.account_id self.assertIsNotNone(account_id) resource_type = arn_object.resource_type self.assertIsNotNone(resource_type) resource_separator = arn_object.resource_separator self.assertIsNotNone(resource_separator) resource = arn_object.resource self.assertIsNotNone(resource) reconstructed_arn = ('arn:' + partition + ':' + service + ':' + region + ':' + account_id + ':' + resource_type + resource_separator + resource) self.assertEqual(mock_account_id, account_id) self.assertEqual(mock_arn, reconstructed_arn)
def setUp(self): """Setup the test.""" super().setUp() self.account_id = fake_aws_account_id() self.arn = fake_arn(account_id=self.account_id, region=REGION, service='iam') with ReportingCommonDBAccessor() as common_accessor: self.column_map = common_accessor.column_map
def setUpClass(cls): cls.fake_customer_name = CUSTOMER_NAME cls.fake_report_name = REPORT cls.fake_bucket_prefix = PREFIX cls.fake_bucket_name = BUCKET cls.selected_region = REGION cls.auth_credential = fake_arn(service='iam', generate_account_id=True) cls.manifest_accessor = ReportManifestDBAccessor()
def test_parse_arn_without_region_or_account(self): """Assert successful ARN parsing without a region or an account id.""" mock_arn = fake_arn() arn_object = utils.AwsArn(mock_arn) region = arn_object.region self.assertEqual(region, None) account_id = arn_object.account_id self.assertEqual(account_id, None)
def test_get_report_exception(self, fake_downloader): """Test task""" account = fake_arn(service='iam', generate_account_id=True) with self.assertRaises(Exception): _get_report_files(customer_name=self.fake.word(), authentication=account, provider_type='AWS', report_name=self.fake.word(), billing_source=self.fake.word())
def test_get_report_update_status(self, fake_downloader, fake_status): """Test that status is updated when downloading is complete.""" account = fake_arn(service='iam', generate_account_id=True) _get_report_files(customer_name=self.fake.word(), authentication=account, provider_type='AWS', report_name=self.fake.word(), provider_uuid=self.aws_test_provider_uuid, billing_source=self.fake.word()) fake_status.assert_called_with(ProviderStatusCode.READY)
def test_get_report(self, fake_downloader): """Test task""" account = fake_arn(service='iam', generate_account_id=True) report = _get_report_files(customer_name=self.fake.word(), authentication=account, provider_type='AWS', report_name=self.fake.word(), billing_source=self.fake.word()) self.assertIsInstance(report, list) self.assertGreater(len(report), 0)
def test_parse_arn_with_slash_separator(self): """Assert successful ARN parsing with a slash separator.""" mock_arn = fake_arn(resource_separator='/') arn_object = utils.AwsArn(mock_arn) resource_type = arn_object.resource_type self.assertIsNotNone(resource_type) resource_separator = arn_object.resource_separator self.assertEqual(resource_separator, '/') resource = arn_object.resource self.assertIsNotNone(resource)
def test_download_file_raise_nofile_err(self, fake_session): fake_response = {'Error': {'Code': 'NoSuchKey'}} fake_client = Mock() fake_client.get_object.side_effect = ClientError( fake_response, 'masu-test') auth_credential = fake_arn(service='iam', generate_account_id=True) downloader = AWSReportDownloader(self.fake_customer_name, auth_credential, self.fake_bucket_name) downloader.s3_client = fake_client with self.assertRaises(AWSReportDownloaderNoFileError): downloader.download_file(self.fake.file_path())
def setUp(self): self.mock_accounts = [] for _ in range(1, random.randint(5, 20)): self.mock_accounts.append({ 'authentication': fake_arn(service='iam', generate_account_id=True), 'billing_source': self.fake.word(), 'customer_name': self.fake.word(), 'provider_type': 'AWS', 'schema_name': self.fake.word() })
def test_disk_status_logging_no_dir(self, fake_downloader): """Test task for logging when temp directory does not exist.""" logging.disable(logging.NOTSET) shutil.rmtree(Config.TMP_DIR, ignore_errors=True) account = fake_arn(service='iam', generate_account_id=True) expected = 'INFO:masu.processor._tasks.download:Unable to find avaiable disk space. {} does not exist'.format(Config.TMP_DIR) with self.assertLogs('masu.processor._tasks.download', level='INFO') as logger: _get_report_files(customer_name=self.fake.word(), authentication=account, provider_type='AWS', report_name=self.fake.word(), billing_source=self.fake.word()) self.assertIn(expected, logger.output)
def test_check_size_fail_nosize(self, fake_session): fake_client = Mock() fake_client.get_object.return_value = {} auth_credential = fake_arn(service='iam', generate_account_id=True) downloader = AWSReportDownloader(self.fake_customer_name, auth_credential, self.fake_bucket_name) downloader.s3_client = fake_client fakekey = self.fake.file_path(depth=random.randint(1, 5), extension=random.choice( ['json', 'csv.gz'])) with self.assertRaises(AWSReportDownloaderError): downloader._check_size(fakekey, check_inflate=False)
def test_get_report_exception_update_status(self, fake_downloader, fake_status): """Test that status is updated when an exception is raised.""" account = fake_arn(service='iam', generate_account_id=True) try: _get_report_files(customer_name=self.fake.word(), authentication=account, provider_type='AWS', report_name=self.fake.word(), provider_uuid=self.aws_test_provider_uuid, billing_source=self.fake.word()) except ReportDownloaderError: pass fake_status.assert_called()
def test_disk_status_logging_no_dir(self, fake_downloader): """Test task for logging when temp directory does not exist.""" logging.disable(logging.NOTSET) Config.TMP_DIR = '/this/path/does/not/exist' account = fake_arn(service='iam', generate_account_id=True) expected = 'INFO:masu.processor._tasks.download:Unable to find' + \ f' available disk space. {Config.TMP_DIR} does not exist' with self.assertLogs('masu.processor._tasks.download', level='INFO') as logger: _get_report_files(customer_name=self.fake.word(), authentication=account, provider_type='AWS', report_name=self.fake.word(), provider_uuid=self.aws_test_provider_uuid, billing_source=self.fake.word()) self.assertIn(expected, logger.output)
def test_download_file_check_size_fail(self, fake_session, fake_shutil): fake_client = Mock() fake_client.get_object.return_value = { 'ContentLength': 123456, 'Body': io.BytesIO(b'\xd2\x02\x96I') } fake_shutil.disk_usage.return_value = (10, 10, 1234567) auth_credential = fake_arn(service='iam', generate_account_id=True) downloader = AWSReportDownloader(self.fake_customer_name, auth_credential, self.fake_bucket_name) downloader.s3_client = fake_client fakekey = self.fake.file_path(depth=random.randint(1, 5), extension='csv.gz') with self.assertRaises(AWSReportDownloaderError): downloader.download_file(fakekey)
def test_check_size_inflate_success(self, fake_session, fake_shutil): fake_client = Mock() fake_client.get_object.return_value = { 'ContentLength': 123456, 'Body': io.BytesIO(b'\xd2\x02\x96I') } fake_shutil.disk_usage.return_value = (10, 10, 4096 * 1024 * 1024) auth_credential = fake_arn(service='iam', generate_account_id=True) downloader = AWSReportDownloader(self.fake_customer_name, auth_credential, self.fake_bucket_name) downloader.s3_client = fake_client fakekey = self.fake.file_path(depth=random.randint(1, 5), extension='csv.gz') result = downloader._check_size(fakekey, check_inflate=True) self.assertTrue(result)
def setUp(self, fake_session): os.makedirs(DATA_DIR, exist_ok=True) self.fake_customer_name = CUSTOMER_NAME self.fake_report_name = REPORT self.fake_bucket_name = BUCKET self.fake_bucket_prefix = PREFIX self.selected_region = REGION auth_credential = fake_arn(service='iam', generate_account_id=True) self.report_downloader = AWSReportDownloader( **{ 'customer_name': self.fake_customer_name, 'auth_credential': auth_credential, 'bucket': self.fake_bucket_name, 'report_name': self.fake_report_name })
def setUpClass(cls): """Set up the class.""" super().setUpClass() cls.fake = faker.Faker() cls.fake_reports = [ { 'file': cls.fake.word(), 'compression': 'GZIP' }, { 'file': cls.fake.word(), 'compression': 'PLAIN' } ] cls.fake_account = fake_arn(service='iam', generate_account_id=True) cls.today = datetime.today() cls.yesterday = datetime.today() - timedelta(days=1)
def test_check_size_fail_nospace(self, fake_session, fake_shutil): fake_client = Mock() fake_client.get_object.return_value = { 'ContentLength': 123456, 'Body': Mock() } fake_shutil.disk_usage.return_value = (10, 10, 10) auth_credential = fake_arn(service='iam', generate_account_id=True) downloader = AWSReportDownloader(self.fake_customer_name, auth_credential, self.fake_bucket_name) downloader.s3_client = fake_client fakekey = self.fake.file_path(depth=random.randint(1, 5), extension=random.choice( ['json', 'csv.gz'])) result = downloader._check_size(fakekey, check_inflate=False) self.assertFalse(result)
def test_get_report_with_override(self, fake_accessor, fake_report_files): """Test _get_report_files on non-initial load with override set.""" Config.INGEST_OVERRIDE = True Config.INITIAL_INGEST_NUM_MONTHS = 5 initial_month_qty = Config.INITIAL_INGEST_NUM_MONTHS account = fake_arn(service='iam', generate_account_id=True) with patch.object(ReportDownloader, 'get_reports') as download_call: _get_report_files(customer_name=self.fake.word(), authentication=account, provider_type='AWS', report_name=self.fake.word(), provider_uuid=self.aws_test_provider_uuid, billing_source=self.fake.word()) download_call.assert_called_with(initial_month_qty) Config.INGEST_OVERRIDE = False Config.INITIAL_INGEST_NUM_MONTHS = 2
def test_download_default_report(self, fake_session): fake_report_date = self.fake.date_time().replace(day=1) fake_report_end_date = fake_report_date + relativedelta(months=+1) report_range = '{}-{}'.format(fake_report_date.strftime('%Y%m%d'), fake_report_end_date.strftime('%Y%m%d')) # mocked report file definition fake_report_file = '{}/{}/{}/{}/{}.csv'.format(self.fake_bucket_prefix, self.fake_report_name, report_range, uuid.uuid4(), 'mocked-report-file') # mocked Manifest definition fake_object = '{}/{}/{}/{}-Manifest.json'.format( self.fake_bucket_prefix, self.fake_report_name, report_range, self.fake_report_name) fake_object_body = {'reportKeys': [fake_report_file]} # Moto setup conn = boto3.resource('s3', region_name=self.selected_region) conn.create_bucket(Bucket=self.fake_bucket_name) # push mocked manifest into Moto env conn.Object(self.fake_bucket_name, fake_object).put(Body=json.dumps(fake_object_body)) key = conn.Object(self.fake_bucket_name, fake_object).get() self.assertEqual(fake_object_body, json.load(key['Body'])) # push mocked csv into Moto env fake_csv_body = ','.join(self.fake.words(random.randint(5, 10))) conn.Object(self.fake_bucket_name, fake_report_file).put(Body=fake_csv_body) key = conn.Object(self.fake_bucket_name, fake_report_file).get() self.assertEqual(fake_csv_body, str(key['Body'].read(), 'utf-8')) # actual test auth_credential = fake_arn(service='iam', generate_account_id=True) downloader = AWSReportDownloader(self.fake_customer_name, auth_credential, self.fake_bucket_name) self.assertEqual(downloader.report_name, self.fake_report_name)
def setUp(self): os.makedirs(DATA_DIR, exist_ok=True) self.fake_customer_name = CUSTOMER_NAME self.fake_report_name = 'koku-local' self.fake_bucket_name = tempfile.mkdtemp() self.fake_bucket_prefix = PREFIX self.selected_region = REGION self.fake_auth_credential = fake_arn(service='iam', generate_account_id=True) mytar = TarFile.open('./tests/data/test_local_bucket.tar.gz') mytar.extractall(path=self.fake_bucket_name) self.report_downloader = LocalReportDownloader( **{ 'customer_name': self.fake_customer_name, 'auth_credential': self.fake_auth_credential, 'bucket': self.fake_bucket_name })
def test_disk_status_logging(self, fake_downloader): """Test task for logging when temp directory exists.""" logging.disable(logging.NOTSET) os.makedirs(Config.TMP_DIR, exist_ok=True) account = fake_arn(service='iam', generate_account_id=True) expected = 'INFO:masu.processor._tasks.download:Avaiable disk space' with self.assertLogs('masu.processor._tasks.download', level='INFO') as logger: _get_report_files(customer_name=self.fake.word(), authentication=account, provider_type='AWS', report_name=self.fake.word(), billing_source=self.fake.word()) statement_found = False for log in logger.output: if expected in log: statement_found = True self.assertTrue(statement_found) shutil.rmtree(Config.TMP_DIR, ignore_errors=True)
def setUp(self): super().setUp() self.fake_creds = fake_arn(service='iam', generate_account_id=True)
def setUp(self): self.account_id = fake_aws_account_id() self.arn = fake_arn(account_id=self.account_id, region=REGION, service='iam')