def setUp(self): """Set up each test.""" super().setUp() self.fake_bucket_name = tempfile.mkdtemp() mytar = TarFile.open("./koku/masu/test/data/test_local_bucket.tar.gz") mytar.extractall(path=self.fake_bucket_name) os.makedirs(DATA_DIR, exist_ok=True) self.credentials = {"role_arn": self.fake_auth_credential} self.data_source = {"bucket": self.fake_bucket_name} self.report_downloader = ReportDownloader( customer_name=self.fake_customer_name, credentials=self.credentials, data_source=self.data_source, provider_type=Provider.PROVIDER_AWS_LOCAL, provider_uuid=self.aws_provider_uuid, ) self.aws_local_report_downloader = AWSLocalReportDownloader( **{ "customer_name": self.fake_customer_name, "credentials": self.credentials, "data_source": self.data_source, "provider_uuid": self.aws_provider_uuid, })
def test_get_manifest_context_for_date(self, mock_manifest, mock_delete): """Test that the manifest is read.""" current_month = DateAccessor().today().replace(day=1, second=1, microsecond=1) downloader = AWSLocalReportDownloader( self.fake_customer_name, self.credentials, self.data_source, provider_uuid=self.aws_provider_uuid) start_str = current_month.strftime(downloader.manifest_date_format) assembly_id = "1234" compression = "GZIP" report_keys = ["file1", "file2"] mock_manifest.return_value = ( "", { "assemblyId": assembly_id, "Compression": compression, "reportKeys": report_keys, "billingPeriod": { "start": start_str }, }, DateAccessor().today(), ) result = downloader.get_manifest_context_for_date(current_month) self.assertEqual(result.get("assembly_id"), assembly_id) self.assertEqual(result.get("compression"), compression) self.assertIsNotNone(result.get("files"))
def setUp(self): """Set up each test.""" super().setUp() self.fake_bucket_name = tempfile.mkdtemp() mytar = TarFile.open("./koku/masu/test/data/test_local_bucket.tar.gz") mytar.extractall(path=self.fake_bucket_name) os.makedirs(DATA_DIR, exist_ok=True) self.mock_task = Mock(request=Mock(id=str(self.fake.uuid4()), return_value={})) self.report_downloader = ReportDownloader( task=self.mock_task, customer_name=self.fake_customer_name, access_credential=self.fake_auth_credential, report_source=self.fake_bucket_name, provider_type=Provider.PROVIDER_AWS_LOCAL, provider_uuid=self.aws_provider_uuid, ) self.aws_local_report_downloader = AWSLocalReportDownloader( **{ "task": self.mock_task, "customer_name": self.fake_customer_name, "auth_credential": self.fake_auth_credential, "bucket": self.fake_bucket_name, "provider_uuid": self.aws_provider_uuid, } )
def setUp(self): """Set up each test.""" super().setUp() self.fake_bucket_name = tempfile.mkdtemp() mytar = TarFile.open('./koku/masu/test/data/test_local_bucket.tar.gz') mytar.extractall(path=self.fake_bucket_name) os.makedirs(DATA_DIR, exist_ok=True) self.mock_task = Mock(request=Mock(id=str(self.fake.uuid4()), return_value={})) self.report_downloader = ReportDownloader( task=self.mock_task, customer_name=self.fake_customer_name, access_credential=self.fake_auth_credential, report_source=self.fake_bucket_name, provider_type='AWS-local', provider_uuid=self.aws_provider_uuid, ) self.aws_local_report_downloader = AWSLocalReportDownloader( **{ 'task': self.mock_task, 'customer_name': self.fake_customer_name, 'auth_credential': self.fake_auth_credential, 'bucket': self.fake_bucket_name, 'provider_uuid': self.aws_provider_uuid, } )
def test_get_manifest_context_for_date_no_manifest(self, mock_manifest, mock_delete): """Test that the manifest is read.""" current_month = DateAccessor().today().replace(day=1, second=1, microsecond=1) auth_credential = fake_arn(service="iam", generate_account_id=True) downloader = AWSLocalReportDownloader( self.fake_customer_name, auth_credential, self.fake_bucket_name, provider_uuid=self.aws_provider_uuid) mock_manifest.return_value = ("", {"reportKeys": []}) result = downloader.get_manifest_context_for_date(current_month) self.assertEqual(result, {})
def test_get_manifest_context_for_date_no_manifest(self, mock_manifest, mock_delete): """Test that the manifest is read.""" current_month = DateAccessor().today().replace(day=1, second=1, microsecond=1) downloader = AWSLocalReportDownloader( self.fake_customer_name, self.credentials, self.data_source, provider_uuid=self.aws_provider_uuid) mock_manifest.return_value = ("", { "reportKeys": [] }, DateAccessor().today()) result = downloader.get_manifest_context_for_date(current_month) self.assertEqual(result, {})
def test_report_name_provided(self): """Test initializer when report_name is provided.""" report_downloader = AWSLocalReportDownloader( **{ "customer_name": self.fake_customer_name, "auth_credential": self.fake_auth_credential, "bucket": self.fake_bucket_name, "report_name": "awesome-report", }) self.assertEqual(report_downloader.report_name, "awesome-report")
def test_report_name_provided(self): """Test initializer when report_name is provided.""" report_downloader = AWSLocalReportDownloader( **{ "customer_name": self.fake_customer_name, "credentials": self.credentials, "data_source": self.data_source, "report_name": "awesome-report", }) self.assertEqual(report_downloader.report_name, "awesome-report")
def test_report_name_provided(self): """Test initializer when report_name is provided.""" report_downloader = AWSLocalReportDownloader( **{ 'customer_name': self.fake_customer_name, 'auth_credential': self.fake_auth_credential, 'bucket': self.fake_bucket_name, 'report_name': 'awesome-report' }) self.assertEqual(report_downloader.report_name, 'awesome-report')
def test_extract_names_no_prefix(self): """Test to extract the report and prefix names from a bucket with no prefix.""" report_downloader = AWSLocalReportDownloader( **{ 'customer_name': self.fake_customer_name, 'auth_credential': self.fake_auth_credential, 'bucket': self.fake_bucket_name }) self.assertEqual(report_downloader.report_name, self.fake_report_name) self.assertIsNone(report_downloader.report_prefix)
def test_extract_names_no_prefix(self): """Test to extract the report and prefix names from a bucket with no prefix.""" report_downloader = AWSLocalReportDownloader( **{ "customer_name": self.fake_customer_name, "credentials": self.credentials, "data_source": self.data_source, }) self.assertEqual(report_downloader.report_name, self.fake_report_name) self.assertIsNone(report_downloader.report_prefix)
def _set_downloader(self): """ Create the report downloader object. Downloader is specific to the provider's cloud service. Args: None Returns: (Object) : Some object that is a child of CURAccountsInterface """ if self.provider_type == AMAZON_WEB_SERVICES: return AWSReportDownloader(customer_name=self.customer_name, auth_credential=self.credential, bucket=self.cur_source, report_name=self.report_name, provider_id=self.provider_id) if self.provider_type == AWS_LOCAL_SERVICE_PROVIDER: return AWSLocalReportDownloader(customer_name=self.customer_name, auth_credential=self.credential, bucket=self.cur_source, report_name=self.report_name, provider_id=self.provider_id) if self.provider_type == AZURE: return AzureReportDownloader(customer_name=self.customer_name, auth_credential=self.credential, billing_source=self.cur_source, report_name=self.report_name, provider_id=self.provider_id) if self.provider_type == AZURE_LOCAL_SERVICE_PROVIDER: return AzureLocalReportDownloader(customer_name=self.customer_name, auth_credential=self.credential, billing_source=self.cur_source, report_name=self.report_name, provider_id=self.provider_id) if self.provider_type == OPENSHIFT_CONTAINER_PLATFORM: return OCPReportDownloader(customer_name=self.customer_name, auth_credential=self.credential, bucket=self.cur_source, report_name=self.report_name, provider_id=self.provider_id) if self.provider_type == GCP: return GCPReportDownloader(customer_name=self.customer_name, auth_credential=self.credential, billing_source=self.cur_source, report_name=self.report_name, provider_id=self.provider_id) return None
def test_extract_names_no_prefix(self): """Test to extract the report and prefix names from a bucket with no prefix.""" report_downloader = AWSLocalReportDownloader( **{ "task": self.mock_task, "customer_name": self.fake_customer_name, "auth_credential": self.fake_auth_credential, "bucket": self.fake_bucket_name, } ) self.assertEqual(report_downloader.report_name, self.fake_report_name) self.assertIsNone(report_downloader.report_prefix)
def test_extract_names_with_incomplete_path(self): """Test to extract the report and prefix from a path where a CUR hasn't been generated yet.""" bucket = tempfile.mkdtemp() report_downloader = AWSLocalReportDownloader( **{ "customer_name": self.fake_customer_name, "auth_credential": self.fake_auth_credential, "bucket": bucket, }) self.assertIsNone(report_downloader.report_name) self.assertIsNone(report_downloader.report_prefix) shutil.rmtree(bucket)
def test_extract_names_with_prefix(self): """Test to extract the report and prefix names from a bucket with prefix.""" bucket = tempfile.mkdtemp() report_name = "report-name" prefix_name = "prefix-name" full_path = f"{bucket}/{prefix_name}/{report_name}/20180801-20180901/" os.makedirs(full_path) report_downloader = AWSLocalReportDownloader( **{ "customer_name": self.fake_customer_name, "auth_credential": self.fake_auth_credential, "bucket": bucket, }) self.assertEqual(report_downloader.report_name, report_name) self.assertEqual(report_downloader.report_prefix, prefix_name) shutil.rmtree(full_path)
def test_extract_names_with_prefix(self): """Test to extract the report and prefix names from a bucket with prefix.""" bucket = tempfile.mkdtemp() report_name = 'report-name' prefix_name = 'prefix-name' full_path = '{}/{}/{}/20180801-20180901/'.format( bucket, prefix_name, report_name) os.makedirs(full_path) report_downloader = AWSLocalReportDownloader( **{ 'customer_name': self.fake_customer_name, 'auth_credential': self.fake_auth_credential, 'bucket': bucket }) self.assertEqual(report_downloader.report_name, report_name) self.assertEqual(report_downloader.report_prefix, prefix_name) shutil.rmtree(full_path)
def setUp(self): """Set up each test.""" self.fake_bucket_name = tempfile.mkdtemp() mytar = TarFile.open('./tests/data/test_local_bucket.tar.gz') mytar.extractall(path=self.fake_bucket_name) os.makedirs(DATA_DIR, exist_ok=True) self.report_downloader = ReportDownloader(self.fake_customer_name, self.fake_auth_credential, self.fake_bucket_name, 'AWS-local', 1) self.aws_local_report_downloader = AWSLocalReportDownloader( **{ 'customer_name': self.fake_customer_name, 'auth_credential': self.fake_auth_credential, 'bucket': self.fake_bucket_name, 'provider_id': 1 })
def test_extract_names_with_bad_path(self): """Test to extract the report and prefix names from a bad path.""" bucket = tempfile.mkdtemp() report_name = "report-name" prefix_name = "prefix-name" full_path = f"{bucket}/{prefix_name}/{report_name}/20180801-aaaaaaa/" os.makedirs(full_path) report_downloader = AWSLocalReportDownloader( **{ "task": self.mock_task, "customer_name": self.fake_customer_name, "auth_credential": self.fake_auth_credential, "bucket": bucket, } ) self.assertIsNone(report_downloader.report_name) self.assertIsNone(report_downloader.report_prefix) shutil.rmtree(full_path)
def test_extract_names_with_bad_path(self): """Test to extract the report and prefix names from a bad path.""" bucket = tempfile.mkdtemp() report_name = 'report-name' prefix_name = 'prefix-name' full_path = '{}/{}/{}/20180801-aaaaaaa/'.format(bucket, prefix_name, report_name) os.makedirs(full_path) report_downloader = AWSLocalReportDownloader( **{ 'task': self.mock_task, 'customer_name': self.fake_customer_name, 'auth_credential': self.fake_auth_credential, 'bucket': bucket, } ) self.assertIsNone(report_downloader.report_name) self.assertIsNone(report_downloader.report_prefix) shutil.rmtree(full_path)
def test_extract_names_no_prefix(self, _): """Test to extract the report and prefix names from a bucket with no prefix.""" with patch("masu.processor.parquet.parquet_report_processor.Path"): with patch("masu.processor.parquet.parquet_report_processor.pd"): with patch( "masu.processor.parquet.parquet_report_processor.open" ): with patch( "masu.processor.parquet.parquet_report_processor.copy_data_to_s3_bucket" ): with patch( "masu.processor.parquet.parquet_report_processor.ParquetReportProcessor." "create_parquet_table"): report_downloader = AWSLocalReportDownloader( **{ "customer_name": self.fake_customer_name, "credentials": self.credentials, "data_source": self.data_source, }) self.assertEqual(report_downloader.report_name, self.fake_report_name) self.assertIsNone(report_downloader.report_prefix)
def _set_downloader(self): """ Create the report downloader object. Downloader is specific to the provider's cloud service. Args: None Returns: (Object) : Some object that is a child of CURAccountsInterface """ if self.provider_type == Provider.PROVIDER_AWS: return AWSReportDownloader( customer_name=self.customer_name, auth_credential=self.credential, bucket=self.cur_source, report_name=self.report_name, provider_uuid=self.provider_uuid, request_id=self.request_id, account=self.account, ) if self.provider_type == Provider.PROVIDER_AWS_LOCAL: return AWSLocalReportDownloader( customer_name=self.customer_name, auth_credential=self.credential, bucket=self.cur_source, report_name=self.report_name, provider_uuid=self.provider_uuid, request_id=self.request_id, account=self.account, ) if self.provider_type == Provider.PROVIDER_AZURE: return AzureReportDownloader( customer_name=self.customer_name, auth_credential=self.credential, billing_source=self.cur_source, report_name=self.report_name, provider_uuid=self.provider_uuid, request_id=self.request_id, account=self.account, ) if self.provider_type == Provider.PROVIDER_AZURE_LOCAL: return AzureLocalReportDownloader( customer_name=self.customer_name, auth_credential=self.credential, billing_source=self.cur_source, report_name=self.report_name, provider_uuid=self.provider_uuid, request_id=self.request_id, account=self.account, ) if self.provider_type == Provider.PROVIDER_OCP: return OCPReportDownloader( customer_name=self.customer_name, auth_credential=self.credential, bucket=self.cur_source, report_name=self.report_name, provider_uuid=self.provider_uuid, request_id=self.request_id, account=self.account, ) if self.provider_type == Provider.PROVIDER_GCP: return GCPReportDownloader( customer_name=self.customer_name, auth_credential=self.credential, billing_source=self.cur_source, report_name=self.report_name, provider_uuid=self.provider_uuid, request_id=self.request_id, account=self.account, ) return None
class AWSLocalReportDownloaderTest(MasuTestCase): """Test Cases for the Local Report Downloader.""" fake = Faker() @classmethod def setUpClass(cls): """Set up class variables.""" super().setUpClass() cls.fake_customer_name = CUSTOMER_NAME cls.fake_report_name = "koku-local" cls.fake_bucket_prefix = PREFIX cls.selected_region = REGION cls.fake_auth_credential = fake_arn(service="iam", generate_account_id=True) cls.manifest_accessor = ReportManifestDBAccessor() def setUp(self): """Set up each test.""" super().setUp() self.fake_bucket_name = tempfile.mkdtemp() mytar = TarFile.open("./koku/masu/test/data/test_local_bucket.tar.gz") mytar.extractall(path=self.fake_bucket_name) os.makedirs(DATA_DIR, exist_ok=True) self.mock_task = Mock(request=Mock(id=str(self.fake.uuid4()), return_value={})) self.report_downloader = ReportDownloader( task=self.mock_task, customer_name=self.fake_customer_name, access_credential=self.fake_auth_credential, report_source=self.fake_bucket_name, provider_type=Provider.PROVIDER_AWS_LOCAL, provider_uuid=self.aws_provider_uuid, ) self.aws_local_report_downloader = AWSLocalReportDownloader( **{ "task": self.mock_task, "customer_name": self.fake_customer_name, "auth_credential": self.fake_auth_credential, "bucket": self.fake_bucket_name, "provider_uuid": self.aws_provider_uuid, } ) def tearDown(self): """Remove test generated data.""" shutil.rmtree(DATA_DIR, ignore_errors=True) shutil.rmtree(self.fake_bucket_name) def test_download_bucket(self): """Test to verify that basic report downloading works.""" test_report_date = datetime(year=2018, month=8, day=7) with patch.object(DateAccessor, "today", return_value=test_report_date): self.report_downloader.download_report(test_report_date) expected_path = "{}/{}/{}".format(DATA_DIR, self.fake_customer_name, "aws-local") self.assertTrue(os.path.isdir(expected_path)) def test_report_name_provided(self): """Test initializer when report_name is provided.""" report_downloader = AWSLocalReportDownloader( **{ "task": self.mock_task, "customer_name": self.fake_customer_name, "auth_credential": self.fake_auth_credential, "bucket": self.fake_bucket_name, "report_name": "awesome-report", } ) self.assertEqual(report_downloader.report_name, "awesome-report") def test_extract_names_no_prefix(self): """Test to extract the report and prefix names from a bucket with no prefix.""" report_downloader = AWSLocalReportDownloader( **{ "task": self.mock_task, "customer_name": self.fake_customer_name, "auth_credential": self.fake_auth_credential, "bucket": self.fake_bucket_name, } ) self.assertEqual(report_downloader.report_name, self.fake_report_name) self.assertIsNone(report_downloader.report_prefix) def test_download_bucket_with_prefix(self): """Test to verify that basic report downloading works.""" fake_bucket = tempfile.mkdtemp() mytar = TarFile.open("./koku/masu/test/data/test_local_bucket_prefix.tar.gz") mytar.extractall(fake_bucket) test_report_date = datetime(year=2018, month=8, day=7) with patch.object(DateAccessor, "today", return_value=test_report_date): report_downloader = ReportDownloader( self.mock_task, self.fake_customer_name, self.fake_auth_credential, fake_bucket, Provider.PROVIDER_AWS_LOCAL, self.aws_provider_uuid, ) # Names from test report .gz file report_downloader.download_report(test_report_date) expected_path = "{}/{}/{}".format(DATA_DIR, self.fake_customer_name, "aws-local") self.assertTrue(os.path.isdir(expected_path)) shutil.rmtree(fake_bucket) def test_extract_names_with_prefix(self): """Test to extract the report and prefix names from a bucket with prefix.""" bucket = tempfile.mkdtemp() report_name = "report-name" prefix_name = "prefix-name" full_path = f"{bucket}/{prefix_name}/{report_name}/20180801-20180901/" os.makedirs(full_path) report_downloader = AWSLocalReportDownloader( **{ "task": self.mock_task, "customer_name": self.fake_customer_name, "auth_credential": self.fake_auth_credential, "bucket": bucket, } ) self.assertEqual(report_downloader.report_name, report_name) self.assertEqual(report_downloader.report_prefix, prefix_name) shutil.rmtree(full_path) def test_extract_names_with_bad_path(self): """Test to extract the report and prefix names from a bad path.""" bucket = tempfile.mkdtemp() report_name = "report-name" prefix_name = "prefix-name" full_path = f"{bucket}/{prefix_name}/{report_name}/20180801-aaaaaaa/" os.makedirs(full_path) report_downloader = AWSLocalReportDownloader( **{ "task": self.mock_task, "customer_name": self.fake_customer_name, "auth_credential": self.fake_auth_credential, "bucket": bucket, } ) self.assertIsNone(report_downloader.report_name) self.assertIsNone(report_downloader.report_prefix) shutil.rmtree(full_path) def test_extract_names_with_incomplete_path(self): """Test to extract the report and prefix from a path where a CUR hasn't been generated yet.""" bucket = tempfile.mkdtemp() report_downloader = AWSLocalReportDownloader( **{ "task": self.mock_task, "customer_name": self.fake_customer_name, "auth_credential": self.fake_auth_credential, "bucket": bucket, } ) self.assertIsNone(report_downloader.report_name) self.assertIsNone(report_downloader.report_prefix) shutil.rmtree(bucket) def test_download_missing_month(self): """Test to verify that downloading a non-existant month throws proper exception.""" fake_bucket = tempfile.mkdtemp() mytar = TarFile.open("./koku/masu/test/data/test_local_bucket_prefix.tar.gz") mytar.extractall(fake_bucket) test_report_date = datetime(year=2018, month=7, day=7) with patch.object(DateAccessor, "today", return_value=test_report_date): report_downloader = ReportDownloader( self.mock_task, self.fake_customer_name, self.fake_auth_credential, fake_bucket, Provider.PROVIDER_AWS_LOCAL, 1, ) # Names from test report .gz file report_downloader.download_report(test_report_date) expected_path = "{}/{}/{}".format(DATA_DIR, self.fake_customer_name, "aws-local") self.assertFalse(os.path.isdir(expected_path)) def test_delete_manifest_file_warning(self): """Test that an INFO is logged when removing a manifest file that does not exist.""" with self.assertLogs( logger="masu.external.downloader.aws_local.aws_local_report_downloader", level="INFO" ) as captured_logs: # Disable log suppression logging.disable(logging.NOTSET) self.aws_local_report_downloader._remove_manifest_file("None") self.assertTrue( captured_logs.output[0].startswith("INFO:"), msg="The log is expected to start with 'INFO:' but instead was: " + captured_logs.output[0], ) self.assertTrue( "Could not delete manifest file at" in captured_logs.output[0], msg="""The log message is expected to contain 'Could not delete manifest file at' but instead was: """ + captured_logs.output[0], ) # Re-enable log suppression logging.disable(logging.CRITICAL)
class AWSLocalReportDownloaderTest(MasuTestCase): """Test Cases for the Local Report Downloader.""" fake = Faker() @classmethod def setUpClass(cls): """Set up class variables.""" super().setUpClass() cls.fake_customer_name = CUSTOMER_NAME cls.fake_report_name = "koku-local" cls.fake_bucket_prefix = PREFIX cls.selected_region = REGION cls.fake_auth_credential = fake_arn(service="iam", generate_account_id=True) cls.manifest_accessor = ReportManifestDBAccessor() def setUp(self): """Set up each test.""" super().setUp() self.fake_bucket_name = tempfile.mkdtemp() mytar = TarFile.open("./koku/masu/test/data/test_local_bucket.tar.gz") mytar.extractall(path=self.fake_bucket_name) os.makedirs(DATA_DIR, exist_ok=True) self.credentials = {"role_arn": self.fake_auth_credential} self.data_source = {"bucket": self.fake_bucket_name} self.report_downloader = ReportDownloader( customer_name=self.fake_customer_name, credentials=self.credentials, data_source=self.data_source, provider_type=Provider.PROVIDER_AWS_LOCAL, provider_uuid=self.aws_provider_uuid, ) self.aws_local_report_downloader = AWSLocalReportDownloader( **{ "customer_name": self.fake_customer_name, "credentials": self.credentials, "data_source": self.data_source, "provider_uuid": self.aws_provider_uuid, }) def tearDown(self): """Remove test generated data.""" shutil.rmtree(DATA_DIR, ignore_errors=True) shutil.rmtree(self.fake_bucket_name) @patch("masu.processor.parquet.parquet_report_processor.settings", ENABLE_S3_ARCHIVING=True) def test_download_bucket(self, _): """Test to verify that basic report downloading works.""" with patch("masu.processor.parquet.parquet_report_processor.Path"): with patch("masu.processor.parquet.parquet_report_processor.pd"): with patch( "masu.processor.parquet.parquet_report_processor.open" ): with patch( "masu.processor.parquet.parquet_report_processor.copy_data_to_s3_bucket" ): with patch( "masu.processor.parquet.parquet_report_processor.ParquetReportProcessor." "create_parquet_table"): test_report_date = datetime(year=2018, month=8, day=7) with patch.object(DateAccessor, "today", return_value=test_report_date): report_context = { "date": test_report_date.date(), "manifest_id": 1, "comporession": "GZIP", "current_file": "./koku/masu/test/data/test_local_bucket.tar.gz", } self.report_downloader.download_report( report_context) expected_path = "{}/{}/{}".format( DATA_DIR, self.fake_customer_name, "aws-local") self.assertTrue(os.path.isdir(expected_path)) def test_report_name_provided(self): """Test initializer when report_name is provided.""" report_downloader = AWSLocalReportDownloader( **{ "customer_name": self.fake_customer_name, "credentials": self.credentials, "data_source": self.data_source, "report_name": "awesome-report", }) self.assertEqual(report_downloader.report_name, "awesome-report") @patch("masu.processor.parquet.parquet_report_processor.settings", ENABLE_S3_ARCHIVING=True) def test_extract_names_no_prefix(self, _): """Test to extract the report and prefix names from a bucket with no prefix.""" with patch("masu.processor.parquet.parquet_report_processor.Path"): with patch("masu.processor.parquet.parquet_report_processor.pd"): with patch( "masu.processor.parquet.parquet_report_processor.open" ): with patch( "masu.processor.parquet.parquet_report_processor.copy_data_to_s3_bucket" ): with patch( "masu.processor.parquet.parquet_report_processor.ParquetReportProcessor." "create_parquet_table"): report_downloader = AWSLocalReportDownloader( **{ "customer_name": self.fake_customer_name, "credentials": self.credentials, "data_source": self.data_source, }) self.assertEqual(report_downloader.report_name, self.fake_report_name) self.assertIsNone(report_downloader.report_prefix) def test_download_bucket_with_prefix(self): """Test to verify that basic report downloading works.""" fake_bucket = tempfile.mkdtemp() mytar = TarFile.open( "./koku/masu/test/data/test_local_bucket_prefix.tar.gz") mytar.extractall(fake_bucket) test_report_date = datetime(year=2018, month=8, day=7) fake_data_source = {"bucket": fake_bucket} with patch.object(DateAccessor, "today", return_value=test_report_date): report_downloader = ReportDownloader( self.fake_customer_name, self.credentials, fake_data_source, Provider.PROVIDER_AWS_LOCAL, self.aws_provider_uuid, ) # Names from test report .gz file report_context = { "date": test_report_date.date(), "manifest_id": 1, "comporession": "GZIP", "current_file": "./koku/masu/test/data/test_local_bucket.tar.gz", } report_downloader.download_report(report_context) expected_path = "{}/{}/{}".format(DATA_DIR, self.fake_customer_name, "aws-local") self.assertTrue(os.path.isdir(expected_path)) shutil.rmtree(fake_bucket) def test_extract_names_with_prefix(self): """Test to extract the report and prefix names from a bucket with prefix.""" bucket = tempfile.mkdtemp() report_name = "report-name" prefix_name = "prefix-name" full_path = f"{bucket}/{prefix_name}/{report_name}/20180801-20180901/" os.makedirs(full_path) report_downloader = AWSLocalReportDownloader( **{ "customer_name": self.fake_customer_name, "credentials": self.credentials, "data_source": { "bucket": bucket }, }) self.assertEqual(report_downloader.report_name, report_name) self.assertEqual(report_downloader.report_prefix, prefix_name) shutil.rmtree(full_path) def test_extract_names_with_bad_path(self): """Test to extract the report and prefix names from a bad path.""" bucket = tempfile.mkdtemp() report_name = "report-name" prefix_name = "prefix-name" full_path = f"{bucket}/{prefix_name}/{report_name}/20180801-aaaaaaa/" os.makedirs(full_path) report_downloader = AWSLocalReportDownloader( **{ "customer_name": self.fake_customer_name, "credentials": self.credentials, "data_source": { "bucket": bucket }, }) self.assertIsNone(report_downloader.report_name) self.assertIsNone(report_downloader.report_prefix) shutil.rmtree(full_path) def test_extract_names_with_incomplete_path(self): """Test to extract the report and prefix from a path where a CUR hasn't been generated yet.""" bucket = tempfile.mkdtemp() report_downloader = AWSLocalReportDownloader( **{ "customer_name": self.fake_customer_name, "credentials": self.credentials, "data_source": { "bucket": bucket }, }) self.assertIsNone(report_downloader.report_name) self.assertIsNone(report_downloader.report_prefix) shutil.rmtree(bucket) def test_delete_manifest_file_warning(self): """Test that an INFO is logged when removing a manifest file that does not exist.""" with self.assertLogs( logger= "masu.external.downloader.aws_local.aws_local_report_downloader", level="INFO") as captured_logs: # Disable log suppression logging.disable(logging.NOTSET) self.aws_local_report_downloader._remove_manifest_file("None") self.assertTrue( captured_logs.output[0].startswith("INFO:"), msg= "The log is expected to start with 'INFO:' but instead was: " + captured_logs.output[0], ) self.assertTrue( "Could not delete manifest file at" in captured_logs.output[0], msg="""The log message is expected to contain 'Could not delete manifest file at' but instead was: """ + captured_logs.output[0], ) # Re-enable log suppression logging.disable(logging.CRITICAL) @patch( "masu.external.downloader.aws_local.aws_local_report_downloader.AWSLocalReportDownloader._remove_manifest_file" ) @patch( "masu.external.downloader.aws_local.aws_local_report_downloader.AWSLocalReportDownloader._get_manifest" ) def test_get_manifest_context_for_date(self, mock_manifest, mock_delete): """Test that the manifest is read.""" current_month = DateAccessor().today().replace(day=1, second=1, microsecond=1) downloader = AWSLocalReportDownloader( self.fake_customer_name, self.credentials, self.data_source, provider_uuid=self.aws_provider_uuid) start_str = current_month.strftime(downloader.manifest_date_format) assembly_id = "1234" compression = "GZIP" report_keys = ["file1", "file2"] mock_manifest.return_value = ( "", { "assemblyId": assembly_id, "Compression": compression, "reportKeys": report_keys, "billingPeriod": { "start": start_str }, }, DateAccessor().today(), ) result = downloader.get_manifest_context_for_date(current_month) self.assertEqual(result.get("assembly_id"), assembly_id) self.assertEqual(result.get("compression"), compression) self.assertIsNotNone(result.get("files")) @patch( "masu.external.downloader.aws_local.aws_local_report_downloader.AWSLocalReportDownloader._remove_manifest_file" ) @patch( "masu.external.downloader.aws_local.aws_local_report_downloader.AWSLocalReportDownloader._get_manifest" ) def test_get_manifest_context_for_date_no_manifest(self, mock_manifest, mock_delete): """Test that the manifest is read.""" current_month = DateAccessor().today().replace(day=1, second=1, microsecond=1) downloader = AWSLocalReportDownloader( self.fake_customer_name, self.credentials, self.data_source, provider_uuid=self.aws_provider_uuid) mock_manifest.return_value = ("", { "reportKeys": [] }, DateAccessor().today()) result = downloader.get_manifest_context_for_date(current_month) self.assertEqual(result, {})