def load_azure_data(self, customer, static_data_file, credentials=None, data_source=None): """Load Azure data into the database.""" provider_type = Provider.PROVIDER_AZURE_LOCAL nise_provider_type = provider_type.replace("-local", "") report_name = "Test" if credentials is None: credentials = { "subscription_id": "11111111-1111-1111-1111-11111111", "tenant_id": "22222222-2222-2222-2222-22222222", "client_id": "33333333-3333-3333-3333-33333333", "client_secret": "MyPassW0rd!", } if data_source is None: data_source = {"resource_group": "resourcegroup1", "storage_account": "storageaccount1"} with patch.object(settings, "AUTO_DATA_INGEST", False): provider = baker.make( "Provider", type=provider_type, authentication__credentials=credentials, customer=customer, billing_source__data_source=data_source, ) template, static_data_path = self.prepare_template(provider_type, static_data_file) options = { "static_report_file": static_data_path, "azure_report_name": report_name, "azure_container_name": self.nise_data_path, } base_path = f"{self.nise_data_path}/{report_name}" for start_date, end_date, bill_date in self.dates: manifest = baker.make( "CostUsageReportManifest", _fill_optional=True, provider=provider, billing_period_start_datetime=bill_date, ) with open(static_data_path, "w") as f: f.write(template.render(start_date=start_date, end_date=end_date)) run(nise_provider_type.lower(), options) report_path = self.build_report_path(provider_type, bill_date, base_path) for report in os.scandir(report_path): if os.path.isdir(report): continue elif "manifest" in report.name.lower(): continue self.process_report(report, "PLAIN", provider_type, provider, manifest) with patch("masu.processor.tasks.chain"), patch.object(settings, "AUTO_DATA_INGEST", False): update_summary_tables( self.schema, provider_type, provider.uuid, start_date, end_date, manifest_id=manifest.id ) update_cost_model_costs(self.schema, provider.uuid, self.dh.last_month_start, self.dh.today) refresh_materialized_views(self.schema, provider_type) shutil.rmtree(base_path, ignore_errors=True)
def load_openshift_data(self, customer, static_data_file, cluster_id): """Load OpenShift data into the database.""" provider_type = Provider.PROVIDER_OCP with override_settings(AUTO_DATA_INGEST=False): provider = baker.make( "Provider", type=provider_type, authentication__provider_resource_name=cluster_id, billing_source__bucket="", customer=customer, ) template, static_data_path = self.prepare_template( provider_type, static_data_file) options = { "static_report_file": static_data_path, "insights_upload": self.nise_data_path, "ocp_cluster_id": cluster_id, } base_path = f"{self.nise_data_path}/{cluster_id}" for start_date, end_date, bill_date in self.dates: manifest = baker.make( "CostUsageReportManifest", _fill_optional=True, provider=provider, billing_period_start_datetime=bill_date, num_processed_files=0, num_total_files=3, ) with open(static_data_path, "w") as f: f.write( template.render(start_date=start_date, end_date=end_date)) run(provider_type.lower(), options) report_path = self.build_report_path(provider_type, bill_date, base_path) for report in os.scandir(report_path): shutil.move(report.path, f"{base_path}/{report.name}") for report in [f.path for f in os.scandir(base_path)]: if os.path.isdir(report): continue elif "manifest" in report.lower(): continue self.process_report(report, "PLAIN", provider_type, provider, manifest) with patch("masu.processor.tasks.chain"): update_summary_tables(self.schema, provider_type, provider.uuid, start_date, end_date, manifest_id=manifest.id) update_cost_model_costs(self.schema, provider.uuid, self.dh.last_month_start, self.dh.today) refresh_materialized_views(self.schema, provider_type) shutil.rmtree(report_path, ignore_errors=True)
def load_aws_data(self, customer, static_data_file, account_id=None, provider_resource_name=None): """Load AWS data into the database.""" provider_type = Provider.PROVIDER_AWS_LOCAL if account_id is None: account_id = "9999999999999" if provider_resource_name is None: provider_resource_name = "arn:aws:iam::999999999999:role/CostManagement" nise_provider_type = provider_type.replace("-local", "") report_name = "Test" with patch.object(settings, "AUTO_DATA_INGEST", False): provider = baker.make( "Provider", type=provider_type, authentication__provider_resource_name=provider_resource_name, customer=customer, billing_source__bucket="test-bucket", ) template, static_data_path = self.prepare_template(provider_type, static_data_file) options = { "static_report_file": static_data_path, "aws_report_name": report_name, "aws_bucket_name": self.nise_data_path, } base_path = f"{self.nise_data_path}/{report_name}" with schema_context(self.schema): baker.make("AWSAccountAlias", account_id=account_id, account_alias="Test Account") for start_date, end_date, bill_date in self.dates: manifest = baker.make( "CostUsageReportManifest", _fill_optional=True, provider=provider, billing_period_start_datetime=bill_date, ) with open(static_data_path, "w") as f: f.write(template.render(start_date=start_date, end_date=end_date, account_id=account_id)) run(nise_provider_type.lower(), options) report_path = self.build_report_path(provider_type, bill_date, base_path) for report in os.scandir(report_path): if os.path.isdir(report): for report in [f.path for f in os.scandir(f"{report_path}/{report.name}")]: if os.path.isdir(report): continue elif "manifest" in report.lower(): continue self.process_report(report, "GZIP", provider_type, provider, manifest) with patch("masu.processor.tasks.chain"), patch.object(settings, "AUTO_DATA_INGEST", False): update_summary_tables( self.schema, provider_type, provider.uuid, start_date, end_date, manifest_id=manifest.id ) update_cost_model_costs(self.schema, provider.uuid, self.dh.last_month_start, self.dh.today) refresh_materialized_views(self.schema, provider_type) shutil.rmtree(base_path, ignore_errors=True)
def test_update_summary_tables_ocp(self, mock_cost_model, mock_charge_info, mock_view, mock_chain): """Test that the summary table task runs.""" infrastructure_rates = { "cpu_core_usage_per_hour": 1.5, "memory_gb_usage_per_hour": 2.5, "storage_gb_usage_per_month": 0.5, } markup = {} mock_cost_model.return_value.__enter__.return_value.infrastructure_rates = infrastructure_rates mock_cost_model.return_value.__enter__.return_value.supplementary_rates = {} mock_cost_model.return_value.__enter__.return_value.markup = markup provider = Provider.PROVIDER_OCP provider_ocp_uuid = self.ocp_test_provider_uuid daily_table_name = OCP_REPORT_TABLE_MAP["line_item_daily"] start_date = DateHelper().last_month_start end_date = DateHelper().last_month_end with schema_context(self.schema): daily_query = self.ocp_accessor._get_db_obj_query(daily_table_name) daily_query.delete() initial_daily_count = daily_query.count() self.assertEqual(initial_daily_count, 0) update_summary_tables(self.schema, provider, provider_ocp_uuid, start_date, end_date) with schema_context(self.schema): self.assertNotEqual(daily_query.count(), initial_daily_count) update_cost_model_costs(schema_name=self.schema, provider_uuid=provider_ocp_uuid, start_date=start_date, end_date=end_date) table_name = OCP_REPORT_TABLE_MAP["line_item_daily_summary"] with ProviderDBAccessor(provider_ocp_uuid) as provider_accessor: provider_obj = provider_accessor.get_provider() usage_period_qry = self.ocp_accessor.get_usage_period_query_by_provider( provider_obj.uuid) with schema_context(self.schema): cluster_id = usage_period_qry.first().cluster_id items = self.ocp_accessor._get_db_obj_query(table_name).filter( usage_start__gte=start_date, usage_start__lte=end_date, cluster_id=cluster_id, data_source="Pod") for item in items: self.assertNotEqual(item.infrastructure_usage_cost.get("cpu"), 0) self.assertNotEqual( item.infrastructure_usage_cost.get("memory"), 0) storage_daily_name = OCP_REPORT_TABLE_MAP[ "storage_line_item_daily"] items = self.ocp_accessor._get_db_obj_query( storage_daily_name).filter(cluster_id=cluster_id) for item in items: self.assertIsNotNone(item.volume_request_storage_byte_seconds) self.assertIsNotNone( item.persistentvolumeclaim_usage_byte_seconds) storage_summary_name = OCP_REPORT_TABLE_MAP[ "line_item_daily_summary"] items = self.ocp_accessor._get_db_obj_query( storage_summary_name).filter(cluster_id=cluster_id, data_source="Storage") for item in items: self.assertIsNotNone( item.volume_request_storage_gigabyte_months) self.assertIsNotNone( item.persistentvolumeclaim_usage_gigabyte_months) mock_chain.return_value.apply_async.assert_called()
def load_openshift_data(self, cluster_id, on_cloud=False): """Load OpenShift data for tests.""" report_periods = [] provider_type = Provider.PROVIDER_OCP credentials = {"cluster_id": cluster_id} billing_source = {} provider = self.create_provider(provider_type, credentials, billing_source, cluster_id) if not on_cloud: self.create_cost_model(provider) for start_date, end_date, bill_date in self.dates: LOG.info(f"load ocp data for start: {start_date}, end: {end_date}") self.create_manifest(provider, bill_date) report_period = self.create_bill(provider_type, provider, bill_date, cluster_id=cluster_id, cluster_alias=cluster_id) report_periods.append(report_period) with schema_context(self.schema): days = (end_date - start_date).days + 1 for i in range(days): infra_raw_cost = random.random( ) * 100 if on_cloud else None project_infra_raw_cost = infra_raw_cost * random.random( ) if on_cloud else None baker.make_recipe( # Storage data_source "api.report.test.util.ocp_usage_storage", report_period=report_period, cluster_id=cluster_id, cluster_alias=cluster_id, usage_start=start_date + timedelta(i), usage_end=start_date + timedelta(i), source_uuid=provider.uuid, infrastructure_raw_cost=infra_raw_cost, infrastructure_project_raw_cost=project_infra_raw_cost, ) baker.make_recipe( # Pod data_source "api.report.test.util.ocp_usage_pod", report_period=report_period, cluster_id=cluster_id, cluster_alias=cluster_id, usage_start=start_date + timedelta(i), usage_end=start_date + timedelta(i), source_uuid=provider.uuid, infrastructure_raw_cost=infra_raw_cost, infrastructure_project_raw_cost=project_infra_raw_cost, ) report_period_ids = [ report_period.id for report_period in report_periods ] with OCPReportDBAccessor(self.schema) as accessor: accessor.populate_pod_label_summary_table(report_period_ids, self.first_start_date, self.last_end_date) accessor.populate_volume_label_summary_table( report_period_ids, self.first_start_date, self.last_end_date) accessor.update_line_item_daily_summary_with_enabled_tags( self.first_start_date, self.last_end_date, report_period_ids) accessor.populate_ui_summary_tables(self.first_start_date, self.last_end_date, provider.uuid) update_cost_model_costs(self.schema, provider.uuid, self.first_start_date, self.last_end_date, tracing_id="12345", synchronous=True) return provider, report_periods