示例#1
0
文件: utils.py 项目: tohjustin/koku
    def load_gcp_data(self, customer):
        """Load GCP data into the database."""
        provider_type = Provider.PROVIDER_GCP_LOCAL
        credentials = {"project_id": "test_project_id"}
        data_source = {"table_id": "test_table_id", "dataset": "test_dataset"}
        with patch.object(settings, "AUTO_DATA_INGEST", False):
            provider = baker.make(
                "Provider",
                type=provider_type,
                authentication__credentials=credentials,
                billing_source__data_source=data_source,
                customer=customer,
            )

        for start_date, end_date, bill_date in self.dates:
            manifest = baker.make(
                "CostUsageReportManifest",
                _fill_optional=True,
                provider=provider,
                billing_period_start_datetime=bill_date,
            )
            with patch("masu.processor.tasks.chain"), patch.object(settings, "AUTO_DATA_INGEST", False):
                update_summary_tables(
                    self.schema, provider_type, provider.uuid, start_date, end_date, manifest_id=manifest.id
                )
        update_cost_model_costs.s(
            self.schema, provider.uuid, self.dh.last_month_start, self.dh.today, synchronous=True
        ).apply()
示例#2
0
文件: utils.py 项目: tohjustin/koku
    def load_azure_data(self, customer, static_data_file, credentials=None, data_source=None):
        """Load Azure data into the database."""
        provider_type = Provider.PROVIDER_AZURE_LOCAL
        nise_provider_type = provider_type.replace("-local", "")
        report_name = "Test"

        if credentials is None:
            credentials = {
                "subscription_id": "11111111-1111-1111-1111-11111111",
                "tenant_id": "22222222-2222-2222-2222-22222222",
                "client_id": "33333333-3333-3333-3333-33333333",
                "client_secret": "MyPassW0rd!",
            }
        if data_source is None:
            data_source = {"resource_group": "resourcegroup1", "storage_account": "storageaccount1"}

        with patch.object(settings, "AUTO_DATA_INGEST", False):
            provider = baker.make(
                "Provider",
                type=provider_type,
                authentication__credentials=credentials,
                billing_source__data_source=data_source,
                customer=customer,
            )
        template, static_data_path = self.prepare_template(provider_type, static_data_file)
        options = {
            "static_report_file": static_data_path,
            "azure_report_name": report_name,
            "azure_container_name": self.nise_data_path,
        }
        base_path = f"{self.nise_data_path}/{report_name}"

        for start_date, end_date, bill_date in self.dates:
            manifest = baker.make(
                "CostUsageReportManifest",
                _fill_optional=True,
                provider=provider,
                billing_period_start_datetime=bill_date,
            )
            with open(static_data_path, "w") as f:
                f.write(template.render(start_date=start_date, end_date=end_date))

            run(nise_provider_type.lower(), options)

            report_path = self.build_report_path(provider_type, bill_date, base_path)
            for report in os.scandir(report_path):
                if os.path.isdir(report):
                    continue
                elif "manifest" in report.name.lower():
                    continue
                self.process_report(report, "PLAIN", provider_type, provider, manifest)
            with patch("masu.processor.tasks.chain"), patch.object(settings, "AUTO_DATA_INGEST", False):
                update_summary_tables(
                    self.schema, provider_type, provider.uuid, start_date, end_date, manifest_id=manifest.id
                )
        update_cost_model_costs.s(
            self.schema, provider.uuid, self.dh.last_month_start, self.dh.today, synchronous=True
        ).apply()
        refresh_materialized_views.s(self.schema, provider_type, provider_uuid=provider.uuid, synchronous=True).apply()
        shutil.rmtree(base_path, ignore_errors=True)
示例#3
0
    def test_update_summary_tables_aws(self, mock_charge_info, mock_views, mock_chain):
        """Test that the summary table task runs."""
        provider = Provider.PROVIDER_AWS
        provider_aws_uuid = self.aws_provider_uuid

        daily_table_name = AWS_CUR_TABLE_MAP["line_item_daily"]
        summary_table_name = AWS_CUR_TABLE_MAP["line_item_daily_summary"]
        start_date = self.start_date.replace(day=1) + relativedelta.relativedelta(months=-1)

        with schema_context(self.schema):
            daily_query = self.aws_accessor._get_db_obj_query(daily_table_name)
            summary_query = self.aws_accessor._get_db_obj_query(summary_table_name)

            initial_daily_count = daily_query.count()
            initial_summary_count = summary_query.count()

        self.assertEqual(initial_daily_count, 0)
        self.assertEqual(initial_summary_count, 0)

        update_summary_tables(self.schema, provider, provider_aws_uuid, start_date)

        with schema_context(self.schema):
            self.assertNotEqual(daily_query.count(), initial_daily_count)
            self.assertNotEqual(summary_query.count(), initial_summary_count)

        mock_chain.return_value.apply_async.assert_called()
示例#4
0
    def test_update_summary_tables(self):
        """Test that the summary table task runs."""
        daily_table_name = AWS_CUR_TABLE_MAP['line_item_daily']
        summary_table_name = AWS_CUR_TABLE_MAP['line_item_daily_summary']
        agg_table_name = AWS_CUR_TABLE_MAP['line_item_aggregates']
        start_date = datetime.utcnow()
        start_date = start_date.replace(day=1, month=(start_date.month - 1))

        daily_query = self.accessor._get_db_obj_query(daily_table_name)
        summary_query = self.accessor._get_db_obj_query(summary_table_name)
        agg_query = self.accessor._get_db_obj_query(agg_table_name)

        initial_daily_count = daily_query.count()
        initial_summary_count = summary_query.count()
        initial_agg_count = agg_query.count()

        self.assertEqual(initial_daily_count, 0)
        self.assertEqual(initial_summary_count, 0)
        self.assertEqual(initial_agg_count, 0)

        update_summary_tables('testcustomer', start_date)

        self.assertNotEqual(daily_query.count(), initial_daily_count)
        self.assertNotEqual(summary_query.count(), initial_summary_count)
        self.assertNotEqual(agg_query.count(), initial_agg_count)
示例#5
0
    def load_aws_data(self, customer, static_data_file, account_id=None, role_arn=None):
        """Load AWS data into the database."""
        provider_type = Provider.PROVIDER_AWS_LOCAL
        if account_id is None:
            account_id = "9999999999999"
        if role_arn is None:
            role_arn = "arn:aws:iam::999999999999:role/CostManagement"
        nise_provider_type = provider_type.replace("-local", "")
        report_name = "Test"
        credentials = {"role_arn": role_arn}
        data_source = {"bucket": "test-bucket"}
        with patch.object(settings, "AUTO_DATA_INGEST", False):
            provider = baker.make(
                "Provider",
                type=provider_type,
                authentication__credentials=credentials,
                billing_source__data_source=data_source,
                customer=customer,
            )
        template, static_data_path = self.prepare_template(provider_type, static_data_file)
        options = {
            "static_report_file": static_data_path,
            "aws_report_name": report_name,
            "aws_bucket_name": self.nise_data_path,
        }
        base_path = f"{self.nise_data_path}/{report_name}"

        with schema_context(self.schema):
            baker.make("AWSAccountAlias", account_id=account_id, account_alias="Test Account")

        for start_date, end_date, bill_date in self.dates:
            manifest = baker.make(
                "CostUsageReportManifest",
                _fill_optional=True,
                provider=provider,
                billing_period_start_datetime=bill_date,
            )
            with open(static_data_path, "w") as f:
                f.write(template.render(start_date=start_date, end_date=end_date, account_id=account_id))

            run(nise_provider_type.lower(), options)

            report_path = self.build_report_path(provider_type, bill_date, base_path)
            for report in os.scandir(report_path):
                if os.path.isdir(report):
                    for report in [f.path for f in os.scandir(f"{report_path}/{report.name}")]:
                        if os.path.isdir(report):
                            continue
                        elif "manifest" in report.lower():
                            continue
                        self.process_report(report, "GZIP", provider_type, provider, manifest)
            with patch("masu.processor.tasks.chain"), patch.object(settings, "AUTO_DATA_INGEST", False):
                update_summary_tables(
                    self.schema, provider_type, provider.uuid, start_date, end_date, manifest_id=manifest.id
                )
        update_cost_model_costs.s(
            self.schema, provider.uuid, self.dh.last_month_start, self.dh.today, synchronous=True
        ).apply()
        refresh_materialized_views.s(self.schema, provider_type, provider_uuid=provider.uuid, synchronous=True).apply()
        shutil.rmtree(base_path, ignore_errors=True)
示例#6
0
    def test_update_summary_tables_aws_end_date(self, mock_charge_info):
        """Test that the summary table task respects a date range."""
        provider = 'AWS'
        provider_aws_uuid = self.aws_provider_uuid
        ce_table_name = AWS_CUR_TABLE_MAP['cost_entry']
        daily_table_name = AWS_CUR_TABLE_MAP['line_item_daily']
        summary_table_name = AWS_CUR_TABLE_MAP['line_item_daily_summary']

        start_date = self.start_date.replace(
            day=1, hour=0, minute=0, second=0, microsecond=0
        ) + relativedelta.relativedelta(months=-1)

        end_date = start_date + timedelta(days=10)
        end_date = end_date.replace(hour=23, minute=59, second=59)

        daily_table = getattr(self.aws_accessor.report_schema, daily_table_name)
        summary_table = getattr(self.aws_accessor.report_schema, summary_table_name)
        ce_table = getattr(self.aws_accessor.report_schema, ce_table_name)

        with schema_context(self.schema):
            ce_start_date = ce_table.objects\
                .filter(interval_start__gte=start_date)\
                .aggregate(Min('interval_start'))['interval_start__min']
            ce_end_date = ce_table.objects\
                .filter(interval_start__lte=end_date)\
                .aggregate(Max('interval_start'))['interval_start__max']

        # The summary tables will only include dates where there is data
        expected_start_date = max(start_date, ce_start_date)
        expected_start_date = expected_start_date.replace(
            hour=0, minute=0, second=0, microsecond=0
        )
        expected_end_date = min(end_date, ce_end_date)
        expected_end_date = expected_end_date.replace(
            hour=0, minute=0, second=0, microsecond=0
        )

        update_summary_tables(
            self.schema, provider, provider_aws_uuid, start_date, end_date
        )

        with schema_context(self.schema):
            daily_entry = daily_table.objects.all().aggregate(
                Min('usage_start'), Max('usage_end')
            )
            result_start_date = daily_entry['usage_start__min']
            result_end_date = daily_entry['usage_end__max']

        self.assertEqual(result_start_date, expected_start_date)
        self.assertEqual(result_end_date, expected_end_date)

        with schema_context(self.schema):
            summary_entry = summary_table.objects.all().aggregate(
                Min('usage_start'), Max('usage_end')
            )
            result_start_date = summary_entry['usage_start__min']
            result_end_date = summary_entry['usage_end__max']

        self.assertEqual(result_start_date, expected_start_date)
        self.assertEqual(result_end_date, expected_end_date)
示例#7
0
    def load_gcp_data(self, customer, static_data_file):
        """Load GCP data into the database."""
        provider_type = Provider.PROVIDER_GCP_LOCAL
        nise_provider_type = provider_type.replace("-local", "")
        credentials = {"project_id": "test_project_id"}
        data_source = {"table_id": "test_table_id", "dataset": "test_dataset"}
        with patch.object(settings, "AUTO_DATA_INGEST", False):
            provider = baker.make(
                "Provider",
                type=provider_type,
                authentication__credentials=credentials,
                billing_source__data_source=data_source,
                customer=customer,
            )
        etag = uuid4()
        template, static_data_path = self.prepare_template(
            provider_type, static_data_file)
        options = {
            "static_report_file": static_data_path,
            "gcp_bucket_name": self.nise_data_path,
            "gcp_etag": etag
        }
        base_path = f"{self.nise_data_path}"
        for start_date, end_date, bill_date in self.dates:
            manifest = baker.make(
                "CostUsageReportManifest",
                _fill_optional=True,
                provider=provider,
                billing_period_start_datetime=bill_date,
            )
            with open(static_data_path, "w") as f:
                f.write(
                    template.render(start_date=start_date, end_date=end_date))

            run(nise_provider_type.lower(), options)

            report_path = f"{base_path}/{etag}"
            for report in os.scandir(report_path):
                if os.path.isdir(report):
                    continue
                self.process_report(report, "PLAIN", provider_type, provider,
                                    manifest)
            with patch("masu.processor.tasks.chain"), patch.object(
                    settings, "AUTO_DATA_INGEST", False):
                update_summary_tables(self.schema,
                                      provider_type,
                                      provider.uuid,
                                      start_date,
                                      end_date,
                                      manifest_id=manifest.id)
        update_cost_model_costs.s(self.schema,
                                  provider.uuid,
                                  self.dh.last_month_start,
                                  self.dh.today,
                                  synchronous=True).apply()
        refresh_materialized_views.s(self.schema,
                                     provider_type,
                                     provider_uuid=provider.uuid,
                                     synchronous=True).apply()
        shutil.rmtree(base_path, ignore_errors=True)
示例#8
0
    def test_update_summary_tables_aws_end_date(self, mock_charge_info):
        """Test that the summary table task respects a date range."""
        provider = Provider.PROVIDER_AWS_LOCAL
        provider_aws_uuid = self.aws_provider_uuid
        ce_table_name = AWS_CUR_TABLE_MAP["cost_entry"]
        daily_table_name = AWS_CUR_TABLE_MAP["line_item_daily"]
        summary_table_name = AWS_CUR_TABLE_MAP["line_item_daily_summary"]

        start_date = DateHelper().last_month_start

        end_date = DateHelper().last_month_end

        daily_table = getattr(self.aws_accessor.report_schema,
                              daily_table_name)
        summary_table = getattr(self.aws_accessor.report_schema,
                                summary_table_name)
        ce_table = getattr(self.aws_accessor.report_schema, ce_table_name)
        with schema_context(self.schema):
            daily_table.objects.all().delete()
            summary_table.objects.all().delete()
            ce_start_date = ce_table.objects.filter(
                interval_start__gte=start_date.date()).aggregate(
                    Min("interval_start"))["interval_start__min"]
            ce_end_date = ce_table.objects.filter(
                interval_start__lte=end_date.date()).aggregate(
                    Max("interval_start"))["interval_start__max"]

        # The summary tables will only include dates where there is data
        expected_start_date = max(start_date, ce_start_date)
        expected_start_date = expected_start_date.replace(hour=0,
                                                          minute=0,
                                                          second=0,
                                                          microsecond=0)
        expected_end_date = min(end_date, ce_end_date)
        expected_end_date = expected_end_date.replace(hour=0,
                                                      minute=0,
                                                      second=0,
                                                      microsecond=0)

        update_summary_tables(self.schema, provider, provider_aws_uuid,
                              start_date, end_date)

        with schema_context(self.schema):
            daily_entry = daily_table.objects.all().aggregate(
                Min("usage_start"), Max("usage_end"))
            result_start_date = daily_entry["usage_start__min"]
            result_end_date = daily_entry["usage_end__max"]

        self.assertEqual(result_start_date, expected_start_date.date())
        self.assertEqual(result_end_date, expected_end_date.date())

        with schema_context(self.schema):
            summary_entry = summary_table.objects.all().aggregate(
                Min("usage_start"), Max("usage_end"))
            result_start_date = summary_entry["usage_start__min"]
            result_end_date = summary_entry["usage_end__max"]

        self.assertEqual(result_start_date, expected_start_date.date())
        self.assertEqual(result_end_date, expected_end_date.date())
示例#9
0
文件: utils.py 项目: ebpetway/koku
    def load_openshift_data(self, customer, static_data_file, cluster_id):
        """Load OpenShift data into the database."""
        provider_type = Provider.PROVIDER_OCP
        with override_settings(AUTO_DATA_INGEST=False):
            provider = baker.make(
                "Provider",
                type=provider_type,
                authentication__provider_resource_name=cluster_id,
                billing_source__bucket="",
                customer=customer,
            )
        template, static_data_path = self.prepare_template(
            provider_type, static_data_file)
        options = {
            "static_report_file": static_data_path,
            "insights_upload": self.nise_data_path,
            "ocp_cluster_id": cluster_id,
        }
        base_path = f"{self.nise_data_path}/{cluster_id}"

        for start_date, end_date, bill_date in self.dates:
            manifest = baker.make(
                "CostUsageReportManifest",
                _fill_optional=True,
                provider=provider,
                billing_period_start_datetime=bill_date,
                num_processed_files=0,
                num_total_files=3,
            )
            with open(static_data_path, "w") as f:
                f.write(
                    template.render(start_date=start_date, end_date=end_date))

            run(provider_type.lower(), options)

            report_path = self.build_report_path(provider_type, bill_date,
                                                 base_path)
            for report in os.scandir(report_path):
                shutil.move(report.path, f"{base_path}/{report.name}")
            for report in [f.path for f in os.scandir(base_path)]:
                if os.path.isdir(report):
                    continue
                elif "manifest" in report.lower():
                    continue
                self.process_report(report, "PLAIN", provider_type, provider,
                                    manifest)
            with patch("masu.processor.tasks.chain"):
                update_summary_tables(self.schema,
                                      provider_type,
                                      provider.uuid,
                                      start_date,
                                      end_date,
                                      manifest_id=manifest.id)
        update_cost_model_costs(self.schema, provider.uuid,
                                self.dh.last_month_start, self.dh.today)
        refresh_materialized_views(self.schema, provider_type)
        shutil.rmtree(report_path, ignore_errors=True)
示例#10
0
    def test_update_summary_tables_aws_end_date(self, mock_charge_info):
        """Test that the summary table task respects a date range."""
        provider = 'AWS'
        provider_aws_uuid = self.aws_test_provider_uuid
        ce_table_name = AWS_CUR_TABLE_MAP['cost_entry']
        daily_table_name = AWS_CUR_TABLE_MAP['line_item_daily']
        summary_table_name = AWS_CUR_TABLE_MAP['line_item_daily_summary']

        start_date = self.start_date.replace(day=1,
                                             hour=0,
                                             minute=0,
                                             second=0,
                                             microsecond=0) + relativedelta.relativedelta(months=-1)

        end_date = start_date + timedelta(days=10)
        end_date = end_date.replace(hour=23, minute=59, second=59)

        daily_table = getattr(self.aws_accessor.report_schema, daily_table_name)
        summary_table = getattr(self.aws_accessor.report_schema, summary_table_name)
        ce_table = getattr(self.aws_accessor.report_schema, ce_table_name)

        ce_start_date = self.aws_accessor._session\
            .query(func.min(ce_table.interval_start))\
            .filter(ce_table.interval_start >= start_date).first()[0]

        ce_end_date = self.aws_accessor._session\
            .query(func.max(ce_table.interval_start))\
            .filter(ce_table.interval_start <= end_date).first()[0]

        # The summary tables will only include dates where there is data
        expected_start_date = max(start_date, ce_start_date)
        expected_start_date = expected_start_date.replace(hour=0, minute=0,
                                                          second=0,
                                                          microsecond=0)
        expected_end_date = min(end_date, ce_end_date)
        expected_end_date = expected_end_date.replace(hour=0, minute=0,
                                                      second=0, microsecond=0)

        update_summary_tables(self.schema_name, provider, provider_aws_uuid, start_date, end_date)

        result_start_date, result_end_date = self.aws_accessor._session.query(
            func.min(daily_table.usage_start),
            func.max(daily_table.usage_end)
        ).first()

        self.assertEqual(result_start_date, expected_start_date)
        self.assertEqual(result_end_date, expected_end_date)

        result_start_date, result_end_date = self.aws_accessor._session.query(
            func.min(summary_table.usage_start),
            func.max(summary_table.usage_end)
        ).first()

        self.assertEqual(result_start_date, expected_start_date)
        self.assertEqual(result_end_date, expected_end_date)
示例#11
0
    def test_update_summary_tables_ocp(self, mock_cpu_rate, mock_mem_rate, mock_charge_info):
        """Test that the summary table task runs."""
        mem_rate = {'tiered_rate': [{'value': '1.5', 'unit': 'USD'}]}
        cpu_rate = {'tiered_rate': [{'value': '2.5', 'unit': 'USD'}]}

        mock_cpu_rate.return_value = cpu_rate
        mock_mem_rate.return_value = mem_rate

        provider = 'OCP'
        provider_ocp_uuid = self.ocp_test_provider_uuid

        daily_table_name = OCP_REPORT_TABLE_MAP['line_item_daily']
        start_date = self.start_date.replace(day=1) + relativedelta.relativedelta(months=-1)

        daily_query = self.ocp_accessor._get_db_obj_query(daily_table_name)

        initial_daily_count = daily_query.count()

        self.assertEqual(initial_daily_count, 0)
        update_summary_tables(self.schema_name, provider, provider_ocp_uuid, start_date)

        self.assertNotEqual(daily_query.count(), initial_daily_count)

        update_charge_info(schema_name=self.test_schema, provider_uuid=provider_ocp_uuid)

        table_name = OCP_REPORT_TABLE_MAP['line_item_daily_summary']
        with ProviderDBAccessor(provider_ocp_uuid) as provider_accessor:
            provider_obj = provider_accessor.get_provider()

        usage_period_qry = self.ocp_accessor.get_usage_period_query_by_provider(provider_obj.id)
        cluster_id = usage_period_qry.first().cluster_id

        items = self.ocp_accessor._get_db_obj_query(table_name).filter_by(cluster_id=cluster_id)
        for item in items:
            self.assertIsNotNone(item.pod_charge_memory_gigabyte_hours)
            self.assertIsNotNone(item.pod_charge_cpu_core_hours)

        storage_daily_name = OCP_REPORT_TABLE_MAP['storage_line_item_daily']
        items = self.ocp_accessor._get_db_obj_query(storage_daily_name).filter_by(cluster_id=cluster_id)
        for item in items:
            self.assertIsNotNone(item.volume_request_storage_byte_seconds)
            self.assertIsNotNone(item.persistentvolumeclaim_usage_byte_seconds)

        storage_summary_name = OCP_REPORT_TABLE_MAP['storage_line_item_daily_summary']
        items = self.ocp_accessor._get_db_obj_query(storage_summary_name).filter_by(cluster_id=cluster_id)
        for item in items:
            self.assertIsNotNone(item.volume_request_storage_gigabyte_months)
            self.assertIsNotNone(item.persistentvolumeclaim_usage_gigabyte_months)
示例#12
0
    def test_update_summary_tables_end_date(self):
        """Test that the summary table task respects a date range."""
        ce_table_name = AWS_CUR_TABLE_MAP['cost_entry']
        daily_table_name = AWS_CUR_TABLE_MAP['line_item_daily']
        summary_table_name = AWS_CUR_TABLE_MAP['line_item_daily_summary']

        start_date = datetime.utcnow()
        start_date = start_date.replace(day=1, month=(start_date.month - 1)).date()
        end_date = start_date + timedelta(days=10)

        daily_table = getattr(self.accessor.report_schema, daily_table_name)
        summary_table = getattr(self.accessor.report_schema, summary_table_name)
        ce_table = getattr(self.accessor.report_schema, ce_table_name)

        ce_start_date = self.accessor._session\
            .query(func.min(ce_table.interval_start))\
            .filter(func.date(ce_table.interval_start) >= start_date).first()[0]

        ce_end_date = self.accessor._session\
            .query(func.max(ce_table.interval_start))\
            .filter(func.date(ce_table.interval_start) <= end_date).first()[0]

        # The summary tables will only include dates where there is data
        expected_start_date = max(start_date, ce_start_date.date())
        expected_end_date = min(end_date, ce_end_date.date())

        update_summary_tables('testcustomer', start_date, end_date)

        result_start_date, result_end_date = self.accessor._session.query(
            func.min(daily_table.usage_start),
            func.max(daily_table.usage_start)
        ).first()

        self.assertEqual(result_start_date, expected_start_date)
        self.assertEqual(result_end_date, expected_end_date)

        result_start_date, result_end_date = self.accessor._session.query(
            func.min(summary_table.usage_start),
            func.max(summary_table.usage_start)
        ).first()

        self.assertEqual(result_start_date, expected_start_date)
        self.assertEqual(result_end_date, expected_end_date)
示例#13
0
    def test_update_summary_tables_ocp_end_date(self, mock_cpu_rate, mock_mem_rate, mock_charge_info):
        """Test that the summary table task respects a date range."""
        mock_cpu_rate.return_value = 1.5
        mock_mem_rate.return_value = 2.5
        provider = Provider.PROVIDER_OCP
        provider_ocp_uuid = self.ocp_test_provider_uuid
        ce_table_name = OCP_REPORT_TABLE_MAP["report"]
        daily_table_name = OCP_REPORT_TABLE_MAP["line_item_daily"]

        start_date = self.start_date.replace(
            day=1, hour=0, minute=0, second=0, microsecond=0
        ) + relativedelta.relativedelta(months=-1)

        end_date = start_date + timedelta(days=10)
        end_date = end_date.replace(hour=23, minute=59, second=59)

        daily_table = getattr(self.ocp_accessor.report_schema, daily_table_name)
        ce_table = getattr(self.ocp_accessor.report_schema, ce_table_name)

        with schema_context(self.schema):
            ce_start_date = ce_table.objects.filter(interval_start__gte=start_date).aggregate(Min("interval_start"))[
                "interval_start__min"
            ]

            ce_end_date = ce_table.objects.filter(interval_start__lte=end_date).aggregate(Max("interval_start"))[
                "interval_start__max"
            ]

        # The summary tables will only include dates where there is data
        expected_start_date = max(start_date, ce_start_date)
        expected_start_date = expected_start_date.replace(hour=0, minute=0, second=0, microsecond=0)
        expected_end_date = min(end_date, ce_end_date)
        expected_end_date = expected_end_date.replace(hour=0, minute=0, second=0, microsecond=0)

        update_summary_tables(self.schema, provider, provider_ocp_uuid, start_date, end_date)
        with schema_context(self.schema):
            daily_entry = daily_table.objects.all().aggregate(Min("usage_start"), Max("usage_end"))
            result_start_date = daily_entry["usage_start__min"]
            result_end_date = daily_entry["usage_end__max"]

        self.assertEqual(result_start_date, expected_start_date)
        self.assertEqual(result_end_date, expected_end_date)
示例#14
0
    def test_update_summary_tables_ocp_end_date(self, mock_cpu_rate,
                                                mock_mem_rate,
                                                mock_charge_info):
        """Test that the summary table task respects a date range."""
        mock_cpu_rate.return_value = 1.5
        mock_mem_rate.return_value = 2.5
        provider = Provider.PROVIDER_OCP
        provider_ocp_uuid = self.ocp_test_provider_uuid
        ce_table_name = OCP_REPORT_TABLE_MAP["report"]
        daily_table_name = OCP_REPORT_TABLE_MAP["line_item_daily"]

        start_date = DateHelper().last_month_start
        end_date = DateHelper().last_month_end
        daily_table = getattr(self.ocp_accessor.report_schema,
                              daily_table_name)
        ce_table = getattr(self.ocp_accessor.report_schema, ce_table_name)

        with schema_context(self.schema):
            daily_table.objects.all().delete()
            ce_start_date = ce_table.objects.filter(
                interval_start__gte=start_date.date()).aggregate(
                    Min("interval_start"))["interval_start__min"]

            ce_end_date = ce_table.objects.filter(
                interval_start__lte=end_date.date()).aggregate(
                    Max("interval_start"))["interval_start__max"]

        # The summary tables will only include dates where there is data
        expected_start_date = max(start_date, ce_start_date)
        expected_end_date = min(end_date, ce_end_date)

        update_summary_tables(self.schema, provider, provider_ocp_uuid,
                              start_date, end_date)
        with schema_context(self.schema):
            daily_entry = daily_table.objects.all().aggregate(
                Min("usage_start"), Max("usage_end"))
            result_start_date = daily_entry["usage_start__min"]
            result_end_date = daily_entry["usage_end__max"]

        self.assertEqual(result_start_date, expected_start_date.date())
        self.assertEqual(result_end_date, expected_end_date.date())
示例#15
0
    def test_update_summary_tables_aws(self, mock_charge_info):
        """Test that the summary table task runs."""
        provider = 'AWS'
        provider_aws_uuid = self.aws_test_provider_uuid

        daily_table_name = AWS_CUR_TABLE_MAP['line_item_daily']
        summary_table_name = AWS_CUR_TABLE_MAP['line_item_daily_summary']
        start_date = self.start_date.replace(day=1) + relativedelta.relativedelta(months=-1)

        daily_query = self.aws_accessor._get_db_obj_query(daily_table_name)
        summary_query = self.aws_accessor._get_db_obj_query(summary_table_name)

        initial_daily_count = daily_query.count()
        initial_summary_count = summary_query.count()

        self.assertEqual(initial_daily_count, 0)
        self.assertEqual(initial_summary_count, 0)

        update_summary_tables(self.schema_name, provider, provider_aws_uuid, start_date)

        self.assertNotEqual(daily_query.count(), initial_daily_count)
        self.assertNotEqual(summary_query.count(), initial_summary_count)
示例#16
0
    def test_update_summary_tables_ocp(self, mock_markup, mock_rate_map, mock_charge_info, mock_view, mock_chain):
        """Test that the summary table task runs."""
        markup = {}
        mem_rate = {"tiered_rates": [{"value": "1.5", "unit": "USD"}]}
        cpu_rate = {"tiered_rates": [{"value": "2.5", "unit": "USD"}]}
        rate_metric_map = {"cpu_core_usage_per_hour": cpu_rate, "memory_gb_usage_per_hour": mem_rate}

        mock_markup.return_value = markup
        mock_rate_map.return_value = rate_metric_map

        provider = Provider.PROVIDER_OCP
        provider_ocp_uuid = self.ocp_test_provider_uuid

        daily_table_name = OCP_REPORT_TABLE_MAP["line_item_daily"]
        start_date = self.start_date.replace(
            day=1, hour=0, minute=0, second=0, microsecond=0
        ) + relativedelta.relativedelta(months=-1)
        end_date = start_date + timedelta(days=10)

        with schema_context(self.schema):
            daily_query = self.ocp_accessor._get_db_obj_query(daily_table_name)

            initial_daily_count = daily_query.count()

        self.assertEqual(initial_daily_count, 0)
        update_summary_tables(self.schema, provider, provider_ocp_uuid, start_date, end_date)

        with schema_context(self.schema):
            self.assertNotEqual(daily_query.count(), initial_daily_count)

        update_charge_info(
            schema_name=self.schema, provider_uuid=provider_ocp_uuid, start_date=start_date, end_date=end_date
        )

        table_name = OCP_REPORT_TABLE_MAP["line_item_daily_summary"]
        with ProviderDBAccessor(provider_ocp_uuid) as provider_accessor:
            provider_obj = provider_accessor.get_provider()

        usage_period_qry = self.ocp_accessor.get_usage_period_query_by_provider(provider_obj.uuid)
        with schema_context(self.schema):
            cluster_id = usage_period_qry.first().cluster_id

            items = self.ocp_accessor._get_db_obj_query(table_name).filter(cluster_id=cluster_id)
            for item in items:
                self.assertIsNotNone(item.pod_charge_memory_gigabyte_hours)
                self.assertIsNotNone(item.pod_charge_cpu_core_hours)

            storage_daily_name = OCP_REPORT_TABLE_MAP["storage_line_item_daily"]

            items = self.ocp_accessor._get_db_obj_query(storage_daily_name).filter(cluster_id=cluster_id)
            for item in items:
                self.assertIsNotNone(item.volume_request_storage_byte_seconds)
                self.assertIsNotNone(item.persistentvolumeclaim_usage_byte_seconds)

            storage_summary_name = OCP_REPORT_TABLE_MAP["line_item_daily_summary"]
            items = self.ocp_accessor._get_db_obj_query(storage_summary_name).filter(
                cluster_id=cluster_id, data_source="Storage"
            )
            for item in items:
                self.assertIsNotNone(item.volume_request_storage_gigabyte_months)
                self.assertIsNotNone(item.persistentvolumeclaim_usage_gigabyte_months)

        mock_chain.return_value.apply_async.assert_called()
示例#17
0
    def test_update_summary_tables_ocp(self, mock_markup, mock_rate_map,
                                       mock_charge_info, mock_cost_summary):
        """Test that the summary table task runs."""
        markup = {}
        mem_rate = {'tiered_rates': [{'value': '1.5', 'unit': 'USD'}]}
        cpu_rate = {'tiered_rates': [{'value': '2.5', 'unit': 'USD'}]}
        rate_metric_map = {
            'cpu_core_usage_per_hour': cpu_rate,
            'memory_gb_usage_per_hour': mem_rate
        }

        mock_markup.return_value = markup
        mock_rate_map.return_value = rate_metric_map

        provider = 'OCP'
        provider_ocp_uuid = self.ocp_test_provider_uuid

        daily_table_name = OCP_REPORT_TABLE_MAP['line_item_daily']
        start_date = self.start_date.replace(
            day=1) + relativedelta.relativedelta(months=-1)

        with schema_context(self.schema):
            daily_query = self.ocp_accessor._get_db_obj_query(daily_table_name)

            initial_daily_count = daily_query.count()

        self.assertEqual(initial_daily_count, 0)
        update_summary_tables(self.schema, provider, provider_ocp_uuid,
                              start_date)

        with schema_context(self.schema):
            self.assertNotEqual(daily_query.count(), initial_daily_count)

        update_charge_info(schema_name=self.schema,
                           provider_uuid=provider_ocp_uuid)

        table_name = OCP_REPORT_TABLE_MAP['line_item_daily_summary']
        with ProviderDBAccessor(provider_ocp_uuid) as provider_accessor:
            provider_obj = provider_accessor.get_provider()

        usage_period_qry = self.ocp_accessor.get_usage_period_query_by_provider(
            provider_obj.id)
        with schema_context(self.schema):
            cluster_id = usage_period_qry.first().cluster_id

            items = self.ocp_accessor._get_db_obj_query(table_name).filter(
                cluster_id=cluster_id)
            for item in items:
                self.assertIsNotNone(item.pod_charge_memory_gigabyte_hours)
                self.assertIsNotNone(item.pod_charge_cpu_core_hours)

            storage_daily_name = OCP_REPORT_TABLE_MAP[
                'storage_line_item_daily']

            items = self.ocp_accessor._get_db_obj_query(
                storage_daily_name).filter(cluster_id=cluster_id)
            for item in items:
                self.assertIsNotNone(item.volume_request_storage_byte_seconds)
                self.assertIsNotNone(
                    item.persistentvolumeclaim_usage_byte_seconds)

            storage_summary_name = OCP_REPORT_TABLE_MAP[
                'line_item_daily_summary']
            items = self.ocp_accessor._get_db_obj_query(
                storage_summary_name).filter(cluster_id=cluster_id,
                                             data_source='Storage')
            for item in items:
                self.assertIsNotNone(
                    item.volume_request_storage_gigabyte_months)
                self.assertIsNotNone(
                    item.persistentvolumeclaim_usage_gigabyte_months)

        mock_charge_info.apply_async.assert_called()
        mock_cost_summary.si.assert_called()
示例#18
0
    def test_update_summary_tables_ocp(self, mock_cost_model, mock_charge_info,
                                       mock_view, mock_chain):
        """Test that the summary table task runs."""
        infrastructure_rates = {
            "cpu_core_usage_per_hour": 1.5,
            "memory_gb_usage_per_hour": 2.5,
            "storage_gb_usage_per_month": 0.5,
        }
        markup = {}

        mock_cost_model.return_value.__enter__.return_value.infrastructure_rates = infrastructure_rates
        mock_cost_model.return_value.__enter__.return_value.supplementary_rates = {}
        mock_cost_model.return_value.__enter__.return_value.markup = markup

        provider = Provider.PROVIDER_OCP
        provider_ocp_uuid = self.ocp_test_provider_uuid

        daily_table_name = OCP_REPORT_TABLE_MAP["line_item_daily"]
        start_date = DateHelper().last_month_start
        end_date = DateHelper().last_month_end

        with schema_context(self.schema):
            daily_query = self.ocp_accessor._get_db_obj_query(daily_table_name)
            daily_query.delete()

            initial_daily_count = daily_query.count()

        self.assertEqual(initial_daily_count, 0)
        update_summary_tables(self.schema, provider, provider_ocp_uuid,
                              start_date, end_date)

        with schema_context(self.schema):
            self.assertNotEqual(daily_query.count(), initial_daily_count)

        update_cost_model_costs(schema_name=self.schema,
                                provider_uuid=provider_ocp_uuid,
                                start_date=start_date,
                                end_date=end_date)

        table_name = OCP_REPORT_TABLE_MAP["line_item_daily_summary"]
        with ProviderDBAccessor(provider_ocp_uuid) as provider_accessor:
            provider_obj = provider_accessor.get_provider()

        usage_period_qry = self.ocp_accessor.get_usage_period_query_by_provider(
            provider_obj.uuid)
        with schema_context(self.schema):
            cluster_id = usage_period_qry.first().cluster_id

            items = self.ocp_accessor._get_db_obj_query(table_name).filter(
                usage_start__gte=start_date,
                usage_start__lte=end_date,
                cluster_id=cluster_id,
                data_source="Pod")
            for item in items:
                self.assertNotEqual(item.infrastructure_usage_cost.get("cpu"),
                                    0)
                self.assertNotEqual(
                    item.infrastructure_usage_cost.get("memory"), 0)

            storage_daily_name = OCP_REPORT_TABLE_MAP[
                "storage_line_item_daily"]

            items = self.ocp_accessor._get_db_obj_query(
                storage_daily_name).filter(cluster_id=cluster_id)
            for item in items:
                self.assertIsNotNone(item.volume_request_storage_byte_seconds)
                self.assertIsNotNone(
                    item.persistentvolumeclaim_usage_byte_seconds)

            storage_summary_name = OCP_REPORT_TABLE_MAP[
                "line_item_daily_summary"]
            items = self.ocp_accessor._get_db_obj_query(
                storage_summary_name).filter(cluster_id=cluster_id,
                                             data_source="Storage")
            for item in items:
                self.assertIsNotNone(
                    item.volume_request_storage_gigabyte_months)
                self.assertIsNotNone(
                    item.persistentvolumeclaim_usage_gigabyte_months)

        mock_chain.return_value.apply_async.assert_called()