Ejemplo n.º 1
0
class OCPUtilTests(MasuTestCase):
    """Test the OCP utility functions."""
    def setUp(self):
        super().setUp()
        self.common_accessor = ReportingCommonDBAccessor()
        self.column_map = self.common_accessor.column_map
        self.accessor = OCPReportDBAccessor(schema=self.test_schema,
                                            column_map=self.column_map)
        self.provider_accessor = ProviderDBAccessor(
            provider_uuid=self.ocp_test_provider_uuid)
        self.report_schema = self.accessor.report_schema
        self.creator = ReportObjectCreator(self.accessor, self.column_map,
                                           self.report_schema.column_types)
        self.all_tables = list(OCP_REPORT_TABLE_MAP.values())

        self.provider_id = self.provider_accessor.get_provider().id
        reporting_period = self.creator.create_ocp_report_period(
            provider_id=self.provider_id)
        report = self.creator.create_ocp_report(
            reporting_period, reporting_period.report_period_start)
        self.creator.create_ocp_usage_line_item(reporting_period, report)
        self.creator.create_ocp_storage_line_item(reporting_period, report)

    def tearDown(self):
        """Return the database to a pre-test state."""
        self.accessor._session.rollback()
        for table_name in self.all_tables:
            tables = self.accessor._get_db_obj_query(table_name).all()
            for table in tables:
                self.accessor._session.delete(table)
        self.accessor.commit()
        self.accessor.close_session()
        self.provider_accessor.close_session()
        self.common_accessor.close_session()

    def test_get_cluster_id_from_provider(self):
        """Test that the cluster ID is returned from OCP provider."""
        cluster_id = utils.get_cluster_id_from_provider(
            self.ocp_test_provider_uuid)
        self.assertIsNotNone(cluster_id)

    def test_get_cluster_id_from_non_ocp_provider(self):
        """Test that None is returned when getting cluster ID on non-OCP provider."""
        cluster_id = utils.get_cluster_id_from_provider(
            self.aws_test_provider_uuid)
        self.assertIsNone(cluster_id)

    def test_get_provider_uuid_from_cluster_id(self):
        """Test that the provider uuid is returned for a cluster ID."""
        cluster_id = self.ocp_provider_resource_name
        provider_uuid = utils.get_provider_uuid_from_cluster_id(cluster_id)
        try:
            UUID(provider_uuid)
        except ValueError:
            self.fail('{} is not a valid uuid.'.format(str(provider_uuid)))

    def test_get_provider_uuid_from_invalid_cluster_id(self):
        """Test that the provider uuid is not returned for an invalid cluster ID."""
        cluster_id = 'bad_cluster_id'
        provider_uuid = utils.get_provider_uuid_from_cluster_id(cluster_id)
        self.assertIsNone(provider_uuid)

    def test_poll_ingest_override_for_provider(self):
        """Test that OCP polling override returns True if insights local path exists."""
        fake_dir = tempfile.mkdtemp()
        with patch.object(Config, 'INSIGHTS_LOCAL_REPORT_DIR', fake_dir):
            cluster_id = utils.get_cluster_id_from_provider(
                self.ocp_test_provider_uuid)
            expected_path = '{}/{}/'.format(Config.INSIGHTS_LOCAL_REPORT_DIR,
                                            cluster_id)
            os.makedirs(expected_path, exist_ok=True)
            self.assertTrue(
                utils.poll_ingest_override_for_provider(
                    self.ocp_test_provider_uuid))
        shutil.rmtree(fake_dir)
Ejemplo n.º 2
0
class TestUpdateSummaryTablesTask(MasuTestCase):
    """Test cases for Processor summary table Celery tasks."""

    @classmethod
    def setUpClass(cls):
        """Set up for the class."""
        super().setUpClass()
        cls.aws_tables = list(AWS_CUR_TABLE_MAP.values())
        cls.ocp_tables = list(OCP_REPORT_TABLE_MAP.values())
        cls.all_tables = list(AWS_CUR_TABLE_MAP.values()) + list(OCP_REPORT_TABLE_MAP.values())
        with ReportingCommonDBAccessor() as report_common_db:
            cls.column_map = report_common_db.column_map

        cls.creator = ReportObjectCreator(cls.schema, cls.column_map)

    def setUp(self):
        """Set up each test."""
        super().setUp()
        self.aws_accessor = AWSReportDBAccessor(schema=self.schema, column_map=self.column_map)
        self.ocp_accessor = OCPReportDBAccessor(schema=self.schema, column_map=self.column_map)

        # Populate some line item data so that the summary tables
        # have something to pull from
        self.start_date = DateAccessor().today_with_timezone("UTC").replace(day=1)
        last_month = self.start_date - relativedelta.relativedelta(months=1)

        for cost_entry_date in (self.start_date, last_month):
            bill = self.creator.create_cost_entry_bill(provider_uuid=self.aws_provider_uuid, bill_date=cost_entry_date)
            cost_entry = self.creator.create_cost_entry(bill, cost_entry_date)
            for family in ["Storage", "Compute Instance", "Database Storage", "Database Instance"]:
                product = self.creator.create_cost_entry_product(family)
                pricing = self.creator.create_cost_entry_pricing()
                reservation = self.creator.create_cost_entry_reservation()
                self.creator.create_cost_entry_line_item(bill, cost_entry, product, pricing, reservation)
        provider_ocp_uuid = self.ocp_test_provider_uuid

        with ProviderDBAccessor(provider_uuid=provider_ocp_uuid) as provider_accessor:
            provider_uuid = provider_accessor.get_provider().uuid

        cluster_id = self.ocp_provider_resource_name
        for period_date in (self.start_date, last_month):
            period = self.creator.create_ocp_report_period(
                provider_uuid=provider_uuid, period_date=period_date, cluster_id=cluster_id
            )
            report = self.creator.create_ocp_report(period, period_date)
            for _ in range(25):
                self.creator.create_ocp_usage_line_item(period, report)

    @patch("masu.processor.tasks.chain")
    @patch("masu.processor.tasks.refresh_materialized_views")
    @patch("masu.processor.tasks.update_charge_info")
    def test_update_summary_tables_aws(self, mock_charge_info, mock_views, mock_chain):
        """Test that the summary table task runs."""
        provider = Provider.PROVIDER_AWS
        provider_aws_uuid = self.aws_provider_uuid

        daily_table_name = AWS_CUR_TABLE_MAP["line_item_daily"]
        summary_table_name = AWS_CUR_TABLE_MAP["line_item_daily_summary"]
        start_date = self.start_date.replace(day=1) + relativedelta.relativedelta(months=-1)

        with schema_context(self.schema):
            daily_query = self.aws_accessor._get_db_obj_query(daily_table_name)
            summary_query = self.aws_accessor._get_db_obj_query(summary_table_name)

            initial_daily_count = daily_query.count()
            initial_summary_count = summary_query.count()

        self.assertEqual(initial_daily_count, 0)
        self.assertEqual(initial_summary_count, 0)

        update_summary_tables(self.schema, provider, provider_aws_uuid, start_date)

        with schema_context(self.schema):
            self.assertNotEqual(daily_query.count(), initial_daily_count)
            self.assertNotEqual(summary_query.count(), initial_summary_count)

        mock_chain.return_value.apply_async.assert_called()

    @patch("masu.processor.tasks.update_charge_info")
    def test_update_summary_tables_aws_end_date(self, mock_charge_info):
        """Test that the summary table task respects a date range."""
        provider = Provider.PROVIDER_AWS
        provider_aws_uuid = self.aws_provider_uuid
        ce_table_name = AWS_CUR_TABLE_MAP["cost_entry"]
        daily_table_name = AWS_CUR_TABLE_MAP["line_item_daily"]
        summary_table_name = AWS_CUR_TABLE_MAP["line_item_daily_summary"]

        start_date = self.start_date.replace(
            day=1, hour=0, minute=0, second=0, microsecond=0
        ) + relativedelta.relativedelta(months=-1)

        end_date = start_date + timedelta(days=10)
        end_date = end_date.replace(hour=23, minute=59, second=59)

        daily_table = getattr(self.aws_accessor.report_schema, daily_table_name)
        summary_table = getattr(self.aws_accessor.report_schema, summary_table_name)
        ce_table = getattr(self.aws_accessor.report_schema, ce_table_name)

        with schema_context(self.schema):
            ce_start_date = ce_table.objects.filter(interval_start__gte=start_date).aggregate(Min("interval_start"))[
                "interval_start__min"
            ]
            ce_end_date = ce_table.objects.filter(interval_start__lte=end_date).aggregate(Max("interval_start"))[
                "interval_start__max"
            ]

        # The summary tables will only include dates where there is data
        expected_start_date = max(start_date, ce_start_date)
        expected_start_date = expected_start_date.replace(hour=0, minute=0, second=0, microsecond=0)
        expected_end_date = min(end_date, ce_end_date)
        expected_end_date = expected_end_date.replace(hour=0, minute=0, second=0, microsecond=0)

        update_summary_tables(self.schema, provider, provider_aws_uuid, start_date, end_date)

        with schema_context(self.schema):
            daily_entry = daily_table.objects.all().aggregate(Min("usage_start"), Max("usage_end"))
            result_start_date = daily_entry["usage_start__min"]
            result_end_date = daily_entry["usage_end__max"]

        self.assertEqual(result_start_date, expected_start_date)
        self.assertEqual(result_end_date, expected_end_date)

        with schema_context(self.schema):
            summary_entry = summary_table.objects.all().aggregate(Min("usage_start"), Max("usage_end"))
            result_start_date = summary_entry["usage_start__min"]
            result_end_date = summary_entry["usage_end__max"]

        self.assertEqual(result_start_date, expected_start_date)
        self.assertEqual(result_end_date, expected_end_date)

    @patch("masu.processor.tasks.chain")
    @patch("masu.processor.tasks.refresh_materialized_views")
    @patch("masu.processor.tasks.update_charge_info")
    @patch("masu.database.cost_model_db_accessor.CostModelDBAccessor._make_rate_by_metric_map")
    @patch("masu.database.cost_model_db_accessor.CostModelDBAccessor.get_markup")
    def test_update_summary_tables_ocp(self, mock_markup, mock_rate_map, mock_charge_info, mock_view, mock_chain):
        """Test that the summary table task runs."""
        markup = {}
        mem_rate = {"tiered_rates": [{"value": "1.5", "unit": "USD"}]}
        cpu_rate = {"tiered_rates": [{"value": "2.5", "unit": "USD"}]}
        rate_metric_map = {"cpu_core_usage_per_hour": cpu_rate, "memory_gb_usage_per_hour": mem_rate}

        mock_markup.return_value = markup
        mock_rate_map.return_value = rate_metric_map

        provider = Provider.PROVIDER_OCP
        provider_ocp_uuid = self.ocp_test_provider_uuid

        daily_table_name = OCP_REPORT_TABLE_MAP["line_item_daily"]
        start_date = self.start_date.replace(
            day=1, hour=0, minute=0, second=0, microsecond=0
        ) + relativedelta.relativedelta(months=-1)
        end_date = start_date + timedelta(days=10)

        with schema_context(self.schema):
            daily_query = self.ocp_accessor._get_db_obj_query(daily_table_name)

            initial_daily_count = daily_query.count()

        self.assertEqual(initial_daily_count, 0)
        update_summary_tables(self.schema, provider, provider_ocp_uuid, start_date, end_date)

        with schema_context(self.schema):
            self.assertNotEqual(daily_query.count(), initial_daily_count)

        update_charge_info(
            schema_name=self.schema, provider_uuid=provider_ocp_uuid, start_date=start_date, end_date=end_date
        )

        table_name = OCP_REPORT_TABLE_MAP["line_item_daily_summary"]
        with ProviderDBAccessor(provider_ocp_uuid) as provider_accessor:
            provider_obj = provider_accessor.get_provider()

        usage_period_qry = self.ocp_accessor.get_usage_period_query_by_provider(provider_obj.uuid)
        with schema_context(self.schema):
            cluster_id = usage_period_qry.first().cluster_id

            items = self.ocp_accessor._get_db_obj_query(table_name).filter(cluster_id=cluster_id)
            for item in items:
                self.assertIsNotNone(item.pod_charge_memory_gigabyte_hours)
                self.assertIsNotNone(item.pod_charge_cpu_core_hours)

            storage_daily_name = OCP_REPORT_TABLE_MAP["storage_line_item_daily"]

            items = self.ocp_accessor._get_db_obj_query(storage_daily_name).filter(cluster_id=cluster_id)
            for item in items:
                self.assertIsNotNone(item.volume_request_storage_byte_seconds)
                self.assertIsNotNone(item.persistentvolumeclaim_usage_byte_seconds)

            storage_summary_name = OCP_REPORT_TABLE_MAP["line_item_daily_summary"]
            items = self.ocp_accessor._get_db_obj_query(storage_summary_name).filter(
                cluster_id=cluster_id, data_source="Storage"
            )
            for item in items:
                self.assertIsNotNone(item.volume_request_storage_gigabyte_months)
                self.assertIsNotNone(item.persistentvolumeclaim_usage_gigabyte_months)

        mock_chain.return_value.apply_async.assert_called()

    @patch("masu.processor.tasks.update_charge_info")
    @patch("masu.database.cost_model_db_accessor.CostModelDBAccessor.get_memory_gb_usage_per_hour_rates")
    @patch("masu.database.cost_model_db_accessor.CostModelDBAccessor.get_cpu_core_usage_per_hour_rates")
    def test_update_summary_tables_ocp_end_date(self, mock_cpu_rate, mock_mem_rate, mock_charge_info):
        """Test that the summary table task respects a date range."""
        mock_cpu_rate.return_value = 1.5
        mock_mem_rate.return_value = 2.5
        provider = Provider.PROVIDER_OCP
        provider_ocp_uuid = self.ocp_test_provider_uuid
        ce_table_name = OCP_REPORT_TABLE_MAP["report"]
        daily_table_name = OCP_REPORT_TABLE_MAP["line_item_daily"]

        start_date = self.start_date.replace(
            day=1, hour=0, minute=0, second=0, microsecond=0
        ) + relativedelta.relativedelta(months=-1)

        end_date = start_date + timedelta(days=10)
        end_date = end_date.replace(hour=23, minute=59, second=59)

        daily_table = getattr(self.ocp_accessor.report_schema, daily_table_name)
        ce_table = getattr(self.ocp_accessor.report_schema, ce_table_name)

        with schema_context(self.schema):
            ce_start_date = ce_table.objects.filter(interval_start__gte=start_date).aggregate(Min("interval_start"))[
                "interval_start__min"
            ]

            ce_end_date = ce_table.objects.filter(interval_start__lte=end_date).aggregate(Max("interval_start"))[
                "interval_start__max"
            ]

        # The summary tables will only include dates where there is data
        expected_start_date = max(start_date, ce_start_date)
        expected_start_date = expected_start_date.replace(hour=0, minute=0, second=0, microsecond=0)
        expected_end_date = min(end_date, ce_end_date)
        expected_end_date = expected_end_date.replace(hour=0, minute=0, second=0, microsecond=0)

        update_summary_tables(self.schema, provider, provider_ocp_uuid, start_date, end_date)
        with schema_context(self.schema):
            daily_entry = daily_table.objects.all().aggregate(Min("usage_start"), Max("usage_end"))
            result_start_date = daily_entry["usage_start__min"]
            result_end_date = daily_entry["usage_end__max"]

        self.assertEqual(result_start_date, expected_start_date)
        self.assertEqual(result_end_date, expected_end_date)

    @patch("masu.processor.tasks.update_summary_tables")
    def test_get_report_data_for_all_providers(self, mock_update):
        """Test GET report_data endpoint with provider_uuid=*."""
        start_date = date.today()
        update_all_summary_tables(start_date)

        mock_update.delay.assert_called_with(ANY, ANY, ANY, str(start_date), ANY)

    def test_refresh_materialized_views(self):
        """Test that materialized views are refreshed."""
        manifest_dict = {
            "assembly_id": "12345",
            "billing_period_start_datetime": DateAccessor().today_with_timezone("UTC"),
            "num_total_files": 2,
            "provider_uuid": self.aws_provider_uuid,
            "task": "170653c0-3e66-4b7e-a764-336496d7ca5a",
        }
        fake_aws = FakeAWSCostData(self.aws_provider)
        generator = AWSReportDataGenerator(self.tenant)
        generator.add_data_to_tenant(fake_aws)

        with ReportManifestDBAccessor() as manifest_accessor:
            manifest = manifest_accessor.add(**manifest_dict)
            manifest.save()

        refresh_materialized_views(self.schema, Provider.PROVIDER_AWS, manifest_id=manifest.id)

        views_to_check = [view for view in AWS_MATERIALIZED_VIEWS if "Cost" in view._meta.db_table]

        with schema_context(self.schema):
            for view in views_to_check:
                self.assertNotEqual(view.objects.count(), 0)

        with ReportManifestDBAccessor() as manifest_accessor:
            manifest = manifest_accessor.get_manifest_by_id(manifest.id)
            self.assertIsNotNone(manifest.manifest_completed_datetime)

    def test_vacuum_schema(self):
        """Test that the vacuum schema task runs."""
        logging.disable(logging.NOTSET)
        expected = "INFO:masu.processor.tasks:VACUUM"
        with self.assertLogs("masu.processor.tasks", level="INFO") as logger:
            vacuum_schema(self.schema)
            self.assertIn(expected, logger.output)
Ejemplo n.º 3
0
class TestUpdateSummaryTablesTask(MasuTestCase):
    """Test cases for Processor summary table Celery tasks."""
    @classmethod
    def setUpClass(cls):
        """Set up for the class."""
        super().setUpClass()
        cls.aws_tables = list(AWS_CUR_TABLE_MAP.values())
        cls.ocp_tables = list(OCP_REPORT_TABLE_MAP.values())
        cls.all_tables = list(AWS_CUR_TABLE_MAP.values()) + list(
            OCP_REPORT_TABLE_MAP.values())

        cls.creator = ReportObjectCreator(cls.schema)

    def setUp(self):
        """Set up each test."""
        super().setUp()
        self.aws_accessor = AWSReportDBAccessor(schema=self.schema)
        self.ocp_accessor = OCPReportDBAccessor(schema=self.schema)

        # Populate some line item data so that the summary tables
        # have something to pull from
        self.start_date = DateHelper().today.replace(day=1)

    @patch("masu.processor.tasks.chain")
    @patch("masu.processor.tasks.refresh_materialized_views")
    @patch("masu.processor.tasks.update_cost_model_costs")
    def test_update_summary_tables_aws(self, mock_charge_info, mock_views,
                                       mock_chain):
        """Test that the summary table task runs."""
        provider = Provider.PROVIDER_AWS
        provider_aws_uuid = self.aws_provider_uuid

        daily_table_name = AWS_CUR_TABLE_MAP["line_item_daily"]
        summary_table_name = AWS_CUR_TABLE_MAP["line_item_daily_summary"]
        start_date = self.start_date.replace(
            day=1) + relativedelta.relativedelta(months=-1)

        with schema_context(self.schema):
            daily_query = self.aws_accessor._get_db_obj_query(daily_table_name)
            summary_query = self.aws_accessor._get_db_obj_query(
                summary_table_name)
            daily_query.delete()
            summary_query.delete()

            initial_daily_count = daily_query.count()
            initial_summary_count = summary_query.count()

        self.assertEqual(initial_daily_count, 0)
        self.assertEqual(initial_summary_count, 0)

        update_summary_tables(self.schema, provider, provider_aws_uuid,
                              start_date)

        with schema_context(self.schema):
            self.assertNotEqual(daily_query.count(), initial_daily_count)
            self.assertNotEqual(summary_query.count(), initial_summary_count)

        mock_chain.return_value.apply_async.assert_called()

    @patch("masu.processor.tasks.update_cost_model_costs")
    def test_update_summary_tables_aws_end_date(self, mock_charge_info):
        """Test that the summary table task respects a date range."""
        provider = Provider.PROVIDER_AWS_LOCAL
        provider_aws_uuid = self.aws_provider_uuid
        ce_table_name = AWS_CUR_TABLE_MAP["cost_entry"]
        daily_table_name = AWS_CUR_TABLE_MAP["line_item_daily"]
        summary_table_name = AWS_CUR_TABLE_MAP["line_item_daily_summary"]

        start_date = DateHelper().last_month_start

        end_date = DateHelper().last_month_end

        daily_table = getattr(self.aws_accessor.report_schema,
                              daily_table_name)
        summary_table = getattr(self.aws_accessor.report_schema,
                                summary_table_name)
        ce_table = getattr(self.aws_accessor.report_schema, ce_table_name)
        with schema_context(self.schema):
            daily_table.objects.all().delete()
            summary_table.objects.all().delete()
            ce_start_date = ce_table.objects.filter(
                interval_start__gte=start_date.date()).aggregate(
                    Min("interval_start"))["interval_start__min"]
            ce_end_date = ce_table.objects.filter(
                interval_start__lte=end_date.date()).aggregate(
                    Max("interval_start"))["interval_start__max"]

        # The summary tables will only include dates where there is data
        expected_start_date = max(start_date, ce_start_date)
        expected_start_date = expected_start_date.replace(hour=0,
                                                          minute=0,
                                                          second=0,
                                                          microsecond=0)
        expected_end_date = min(end_date, ce_end_date)
        expected_end_date = expected_end_date.replace(hour=0,
                                                      minute=0,
                                                      second=0,
                                                      microsecond=0)

        update_summary_tables(self.schema, provider, provider_aws_uuid,
                              start_date, end_date)

        with schema_context(self.schema):
            daily_entry = daily_table.objects.all().aggregate(
                Min("usage_start"), Max("usage_end"))
            result_start_date = daily_entry["usage_start__min"]
            result_end_date = daily_entry["usage_end__max"]

        self.assertEqual(result_start_date, expected_start_date.date())
        self.assertEqual(result_end_date, expected_end_date.date())

        with schema_context(self.schema):
            summary_entry = summary_table.objects.all().aggregate(
                Min("usage_start"), Max("usage_end"))
            result_start_date = summary_entry["usage_start__min"]
            result_end_date = summary_entry["usage_end__max"]

        self.assertEqual(result_start_date, expected_start_date.date())
        self.assertEqual(result_end_date, expected_end_date.date())

    @patch("masu.processor.tasks.chain")
    @patch("masu.processor.tasks.refresh_materialized_views")
    @patch("masu.processor.tasks.update_cost_model_costs")
    @patch("masu.processor.ocp.ocp_cost_model_cost_updater.CostModelDBAccessor"
           )
    def test_update_summary_tables_ocp(self, mock_cost_model, mock_charge_info,
                                       mock_view, mock_chain):
        """Test that the summary table task runs."""
        infrastructure_rates = {
            "cpu_core_usage_per_hour": 1.5,
            "memory_gb_usage_per_hour": 2.5,
            "storage_gb_usage_per_month": 0.5,
        }
        markup = {}

        mock_cost_model.return_value.__enter__.return_value.infrastructure_rates = infrastructure_rates
        mock_cost_model.return_value.__enter__.return_value.supplementary_rates = {}
        mock_cost_model.return_value.__enter__.return_value.markup = markup

        provider = Provider.PROVIDER_OCP
        provider_ocp_uuid = self.ocp_test_provider_uuid

        daily_table_name = OCP_REPORT_TABLE_MAP["line_item_daily"]
        start_date = DateHelper().last_month_start
        end_date = DateHelper().last_month_end

        with schema_context(self.schema):
            daily_query = self.ocp_accessor._get_db_obj_query(daily_table_name)
            daily_query.delete()

            initial_daily_count = daily_query.count()

        self.assertEqual(initial_daily_count, 0)
        update_summary_tables(self.schema, provider, provider_ocp_uuid,
                              start_date, end_date)

        with schema_context(self.schema):
            self.assertNotEqual(daily_query.count(), initial_daily_count)

        update_cost_model_costs(schema_name=self.schema,
                                provider_uuid=provider_ocp_uuid,
                                start_date=start_date,
                                end_date=end_date)

        table_name = OCP_REPORT_TABLE_MAP["line_item_daily_summary"]
        with ProviderDBAccessor(provider_ocp_uuid) as provider_accessor:
            provider_obj = provider_accessor.get_provider()

        usage_period_qry = self.ocp_accessor.get_usage_period_query_by_provider(
            provider_obj.uuid)
        with schema_context(self.schema):
            cluster_id = usage_period_qry.first().cluster_id

            items = self.ocp_accessor._get_db_obj_query(table_name).filter(
                usage_start__gte=start_date,
                usage_start__lte=end_date,
                cluster_id=cluster_id,
                data_source="Pod")
            for item in items:
                self.assertNotEqual(item.infrastructure_usage_cost.get("cpu"),
                                    0)
                self.assertNotEqual(
                    item.infrastructure_usage_cost.get("memory"), 0)

            storage_daily_name = OCP_REPORT_TABLE_MAP[
                "storage_line_item_daily"]

            items = self.ocp_accessor._get_db_obj_query(
                storage_daily_name).filter(cluster_id=cluster_id)
            for item in items:
                self.assertIsNotNone(item.volume_request_storage_byte_seconds)
                self.assertIsNotNone(
                    item.persistentvolumeclaim_usage_byte_seconds)

            storage_summary_name = OCP_REPORT_TABLE_MAP[
                "line_item_daily_summary"]
            items = self.ocp_accessor._get_db_obj_query(
                storage_summary_name).filter(cluster_id=cluster_id,
                                             data_source="Storage")
            for item in items:
                self.assertIsNotNone(
                    item.volume_request_storage_gigabyte_months)
                self.assertIsNotNone(
                    item.persistentvolumeclaim_usage_gigabyte_months)

        mock_chain.return_value.apply_async.assert_called()

    @patch("masu.processor.tasks.update_cost_model_costs")
    @patch(
        "masu.database.cost_model_db_accessor.CostModelDBAccessor.get_memory_gb_usage_per_hour_rates"
    )
    @patch(
        "masu.database.cost_model_db_accessor.CostModelDBAccessor.get_cpu_core_usage_per_hour_rates"
    )
    def test_update_summary_tables_ocp_end_date(self, mock_cpu_rate,
                                                mock_mem_rate,
                                                mock_charge_info):
        """Test that the summary table task respects a date range."""
        mock_cpu_rate.return_value = 1.5
        mock_mem_rate.return_value = 2.5
        provider = Provider.PROVIDER_OCP
        provider_ocp_uuid = self.ocp_test_provider_uuid
        ce_table_name = OCP_REPORT_TABLE_MAP["report"]
        daily_table_name = OCP_REPORT_TABLE_MAP["line_item_daily"]

        start_date = DateHelper().last_month_start
        end_date = DateHelper().last_month_end
        daily_table = getattr(self.ocp_accessor.report_schema,
                              daily_table_name)
        ce_table = getattr(self.ocp_accessor.report_schema, ce_table_name)

        with schema_context(self.schema):
            daily_table.objects.all().delete()
            ce_start_date = ce_table.objects.filter(
                interval_start__gte=start_date.date()).aggregate(
                    Min("interval_start"))["interval_start__min"]

            ce_end_date = ce_table.objects.filter(
                interval_start__lte=end_date.date()).aggregate(
                    Max("interval_start"))["interval_start__max"]

        # The summary tables will only include dates where there is data
        expected_start_date = max(start_date, ce_start_date)
        expected_end_date = min(end_date, ce_end_date)

        update_summary_tables(self.schema, provider, provider_ocp_uuid,
                              start_date, end_date)
        with schema_context(self.schema):
            daily_entry = daily_table.objects.all().aggregate(
                Min("usage_start"), Max("usage_end"))
            result_start_date = daily_entry["usage_start__min"]
            result_end_date = daily_entry["usage_end__max"]

        self.assertEqual(result_start_date, expected_start_date.date())
        self.assertEqual(result_end_date, expected_end_date.date())

    @patch("masu.processor.tasks.update_summary_tables")
    def test_get_report_data_for_all_providers(self, mock_update):
        """Test GET report_data endpoint with provider_uuid=*."""
        start_date = date.today()
        update_all_summary_tables(start_date)

        mock_update.delay.assert_called_with(ANY, ANY, ANY, str(start_date),
                                             ANY)

    def test_refresh_materialized_views(self):
        """Test that materialized views are refreshed."""
        manifest_dict = {
            "assembly_id": "12345",
            "billing_period_start_datetime": DateHelper().today,
            "num_total_files": 2,
            "provider_uuid": self.aws_provider_uuid,
            "task": "170653c0-3e66-4b7e-a764-336496d7ca5a",
        }

        with ReportManifestDBAccessor() as manifest_accessor:
            manifest = manifest_accessor.add(**manifest_dict)
            manifest.save()

        refresh_materialized_views(self.schema,
                                   Provider.PROVIDER_AWS,
                                   manifest_id=manifest.id)

        views_to_check = [
            view for view in AWS_MATERIALIZED_VIEWS
            if "Cost" in view._meta.db_table
        ]

        with schema_context(self.schema):
            for view in views_to_check:
                self.assertNotEqual(view.objects.count(), 0)

        with ReportManifestDBAccessor() as manifest_accessor:
            manifest = manifest_accessor.get_manifest_by_id(manifest.id)
            self.assertIsNotNone(manifest.manifest_completed_datetime)

    @patch("masu.processor.tasks.connection")
    def test_vacuum_schema(self, mock_conn):
        """Test that the vacuum schema task runs."""
        logging.disable(logging.NOTSET)
        mock_conn.cursor.return_value.__enter__.return_value.fetchall.return_value = [
            ("table", )
        ]
        expected = "INFO:masu.processor.tasks:VACUUM ANALYZE acct10001.table"
        with self.assertLogs("masu.processor.tasks", level="INFO") as logger:
            vacuum_schema(self.schema)
            self.assertIn(expected, logger.output)

    @patch("masu.processor.tasks.connection")
    def test_autovacuum_tune_schema_default_table(self, mock_conn):
        """Test that the autovacuum tuning runs."""
        logging.disable(logging.NOTSET)

        # Make sure that the AUTOVACUUM_TUNING environment variable is unset!
        if "AUTOVACUUM_TUNING" in os.environ:
            del os.environ["AUTOVACUUM_TUNING"]

        mock_conn.cursor.return_value.__enter__.return_value.fetchall.return_value = [
            ("cost_model", 20000000, {})
        ]
        expected = (
            "INFO:masu.processor.tasks:ALTER TABLE acct10001.cost_model set (autovacuum_vacuum_scale_factor = 0.01);"
        )
        with self.assertLogs("masu.processor.tasks", level="INFO") as logger:
            autovacuum_tune_schema(self.schema)
            self.assertIn(expected, logger.output)

        mock_conn.cursor.return_value.__enter__.return_value.fetchall.return_value = [
            ("cost_model", 2000000, {})
        ]
        expected = (
            "INFO:masu.processor.tasks:ALTER TABLE acct10001.cost_model set (autovacuum_vacuum_scale_factor = 0.02);"
        )
        with self.assertLogs("masu.processor.tasks", level="INFO") as logger:
            autovacuum_tune_schema(self.schema)
            self.assertIn(expected, logger.output)

        mock_conn.cursor.return_value.__enter__.return_value.fetchall.return_value = [
            ("cost_model", 200000, {})
        ]
        expected = (
            "INFO:masu.processor.tasks:ALTER TABLE acct10001.cost_model set (autovacuum_vacuum_scale_factor = 0.05);"
        )
        with self.assertLogs("masu.processor.tasks", level="INFO") as logger:
            autovacuum_tune_schema(self.schema)
            self.assertIn(expected, logger.output)

        mock_conn.cursor.return_value.__enter__.return_value.fetchall.return_value = [
            ("cost_model", 200000, {
                "autovacuum_vacuum_scale_factor": Decimal("0.05")
            })
        ]
        expected = "INFO:masu.processor.tasks:Altered autovacuum_vacuum_scale_factor on 0 tables"
        with self.assertLogs("masu.processor.tasks", level="INFO") as logger:
            autovacuum_tune_schema(self.schema)
            self.assertIn(expected, logger.output)

        mock_conn.cursor.return_value.__enter__.return_value.fetchall.return_value = [
            ("cost_model", 20000, {
                "autovacuum_vacuum_scale_factor": Decimal("0.02")
            })
        ]
        expected = "INFO:masu.processor.tasks:ALTER TABLE acct10001.cost_model reset (autovacuum_vacuum_scale_factor);"
        with self.assertLogs("masu.processor.tasks", level="INFO") as logger:
            autovacuum_tune_schema(self.schema)
            self.assertIn(expected, logger.output)

    @patch("masu.processor.tasks.connection")
    def test_autovacuum_tune_schema_custom_table(self, mock_conn):
        """Test that the autovacuum tuning runs."""
        logging.disable(logging.NOTSET)
        scale_table = [(10000000, "0.0001"), (1000000, "0.004"),
                       (100000, "0.011")]
        os.environ["AUTOVACUUM_TUNING"] = json.dumps(scale_table)

        mock_conn.cursor.return_value.__enter__.return_value.fetchall.return_value = [
            ("cost_model", 20000000, {})
        ]
        expected = (
            "INFO:masu.processor.tasks:ALTER TABLE acct10001.cost_model set (autovacuum_vacuum_scale_factor = 0.0001);"
        )
        with self.assertLogs("masu.processor.tasks", level="INFO") as logger:
            autovacuum_tune_schema(self.schema)
            self.assertIn(expected, logger.output)

        mock_conn.cursor.return_value.__enter__.return_value.fetchall.return_value = [
            ("cost_model", 2000000, {})
        ]
        expected = (
            "INFO:masu.processor.tasks:ALTER TABLE acct10001.cost_model set (autovacuum_vacuum_scale_factor = 0.004);"
        )
        with self.assertLogs("masu.processor.tasks", level="INFO") as logger:
            autovacuum_tune_schema(self.schema)
            self.assertIn(expected, logger.output)

        mock_conn.cursor.return_value.__enter__.return_value.fetchall.return_value = [
            ("cost_model", 200000, {})
        ]
        expected = (
            "INFO:masu.processor.tasks:ALTER TABLE acct10001.cost_model set (autovacuum_vacuum_scale_factor = 0.011);"
        )
        with self.assertLogs("masu.processor.tasks", level="INFO") as logger:
            autovacuum_tune_schema(self.schema)
            self.assertIn(expected, logger.output)

        mock_conn.cursor.return_value.__enter__.return_value.fetchall.return_value = [
            ("cost_model", 200000, {
                "autovacuum_vacuum_scale_factor": Decimal("0.011")
            })
        ]
        expected = "INFO:masu.processor.tasks:Altered autovacuum_vacuum_scale_factor on 0 tables"
        with self.assertLogs("masu.processor.tasks", level="INFO") as logger:
            autovacuum_tune_schema(self.schema)
            self.assertIn(expected, logger.output)

        mock_conn.cursor.return_value.__enter__.return_value.fetchall.return_value = [
            ("cost_model", 20000, {
                "autovacuum_vacuum_scale_factor": Decimal("0.004")
            })
        ]
        expected = "INFO:masu.processor.tasks:ALTER TABLE acct10001.cost_model reset (autovacuum_vacuum_scale_factor);"
        with self.assertLogs("masu.processor.tasks", level="INFO") as logger:
            autovacuum_tune_schema(self.schema)
            self.assertIn(expected, logger.output)

        del os.environ["AUTOVACUUM_TUNING"]

    @patch("masu.processor.tasks.connection")
    def test_autovacuum_tune_schema_manual_setting(self, mock_conn):
        """Test that the autovacuum tuning runs."""
        logging.disable(logging.NOTSET)

        # Make sure that the AUTOVACUUM_TUNING environment variable is unset!
        if "AUTOVACUUM_TUNING" in os.environ:
            del os.environ["AUTOVACUUM_TUNING"]

        mock_conn.cursor.return_value.__enter__.return_value.fetchall.return_value = [
            ("cost_model", 200000, {
                "autovacuum_vacuum_scale_factor": Decimal("0.04")
            })
        ]
        expected = "INFO:masu.processor.tasks:Altered autovacuum_vacuum_scale_factor on 0 tables"
        with self.assertLogs("masu.processor.tasks", level="INFO") as logger:
            autovacuum_tune_schema(self.schema)
            self.assertIn(expected, logger.output)

        mock_conn.cursor.return_value.__enter__.return_value.fetchall.return_value = [
            ("cost_model", 200000, {
                "autovacuum_vacuum_scale_factor": Decimal("0.06")
            })
        ]
        expected = (
            "INFO:masu.processor.tasks:ALTER TABLE acct10001.cost_model set (autovacuum_vacuum_scale_factor = 0.05);"
        )
        with self.assertLogs("masu.processor.tasks", level="INFO") as logger:
            autovacuum_tune_schema(self.schema)
            self.assertIn(expected, logger.output)

    @patch("masu.processor.tasks.connection")
    def test_autovacuum_tune_schema_invalid_setting(self, mock_conn):
        """Test that the autovacuum tuning runs."""
        logging.disable(logging.NOTSET)

        # Make sure that the AUTOVACUUM_TUNING environment variable is unset!
        if "AUTOVACUUM_TUNING" in os.environ:
            del os.environ["AUTOVACUUM_TUNING"]

        # This invalid setting should be treated as though there was no setting
        mock_conn.cursor.return_value.__enter__.return_value.fetchall.return_value = [
            ("cost_model", 20000000, {
                "autovacuum_vacuum_scale_factor": ""
            })
        ]
        expected = (
            "INFO:masu.processor.tasks:ALTER TABLE acct10001.cost_model set (autovacuum_vacuum_scale_factor = 0.01);"
        )
        with self.assertLogs("masu.processor.tasks", level="INFO") as logger:
            autovacuum_tune_schema(self.schema)
            self.assertIn(expected, logger.output)

    def test_autovacuum_tune_schedule(self):
        vh = next(
            iter(koku_celery.app.conf.beat_schedule["vacuum-schemas"]
                 ["schedule"].hour))
        avh = next(
            iter(koku_celery.app.conf.beat_schedule["autovacuum-tune-schemas"]
                 ["schedule"].hour))
        self.assertTrue(avh == (23 if vh == 0 else (vh - 1)))
Ejemplo n.º 4
0
class TestUpdateSummaryTablesTask(MasuTestCase):
    """Test cases for Processor summary table Celery tasks."""

    @classmethod
    def setUpClass(cls):
        """Setup for the class."""
        super().setUpClass()
        cls.aws_tables = list(AWS_CUR_TABLE_MAP.values())
        cls.ocp_tables = list(OCP_REPORT_TABLE_MAP.values())
        cls.all_tables = list(AWS_CUR_TABLE_MAP.values()) + \
            list(OCP_REPORT_TABLE_MAP.values())
        report_common_db = ReportingCommonDBAccessor()
        cls.column_map = report_common_db.column_map
        report_common_db.close_session()

    @classmethod
    def tearDownClass(cls):
        """Tear down the test class."""
        super().tearDownClass()

    def setUp(self):
        """Set up each test."""
        super().setUp()
        self.schema_name = self.test_schema
        self.aws_accessor = AWSReportDBAccessor(schema=self.schema_name,
                                                column_map=self.column_map)
        self.ocp_accessor = OCPReportDBAccessor(schema=self.schema_name,
                                                column_map=self.column_map)

        self.creator = ReportObjectCreator(
            self.aws_accessor,
            self.column_map,
            self.aws_accessor.report_schema.column_types
        )

        # Populate some line item data so that the summary tables
        # have something to pull from
        self.start_date = DateAccessor().today_with_timezone('UTC').replace(day=1)
        last_month = self.start_date - relativedelta.relativedelta(months=1)

        for cost_entry_date in (self.start_date, last_month):
            bill = self.creator.create_cost_entry_bill(cost_entry_date)
            cost_entry = self.creator.create_cost_entry(bill, cost_entry_date)
            for family in ['Storage', 'Compute Instance', 'Database Storage',
                           'Database Instance']:
                product = self.creator.create_cost_entry_product(family)
                pricing = self.creator.create_cost_entry_pricing()
                reservation = self.creator.create_cost_entry_reservation()
                self.creator.create_cost_entry_line_item(
                    bill,
                    cost_entry,
                    product,
                    pricing,
                    reservation
                )
        provider_ocp_uuid = self.ocp_test_provider_uuid

        with ProviderDBAccessor(provider_uuid=provider_ocp_uuid) as provider_accessor:
            provider_id = provider_accessor.get_provider().id

        cluster_id = self.ocp_provider_resource_name
        for period_date in (self.start_date, last_month):
            period = self.creator.create_ocp_report_period(period_date, provider_id=provider_id,
                                                           cluster_id=cluster_id)
            report = self.creator.create_ocp_report(period, period_date)
            for _ in range(25):
                self.creator.create_ocp_usage_line_item(period, report)

    def tearDown(self):
        """Return the database to a pre-test state."""
        for table_name in self.aws_tables:
            tables = self.aws_accessor._get_db_obj_query(table_name).all()
            for table in tables:
                self.aws_accessor._session.delete(table)
        self.aws_accessor.commit()
        for table_name in self.ocp_tables:
            tables = self.ocp_accessor._get_db_obj_query(table_name).all()
            for table in tables:
                self.ocp_accessor._session.delete(table)
        self.ocp_accessor.commit()

        self.aws_accessor._session.rollback()
        self.aws_accessor.close_connections()
        self.aws_accessor.close_session()
        self.ocp_accessor.close_connections()
        self.ocp_accessor.close_session()
        super().tearDown()

    @patch('masu.processor.tasks.update_charge_info')
    def test_update_summary_tables_aws(self, mock_charge_info):
        """Test that the summary table task runs."""
        provider = 'AWS'
        provider_aws_uuid = self.aws_test_provider_uuid

        daily_table_name = AWS_CUR_TABLE_MAP['line_item_daily']
        summary_table_name = AWS_CUR_TABLE_MAP['line_item_daily_summary']
        start_date = self.start_date.replace(day=1) + relativedelta.relativedelta(months=-1)

        daily_query = self.aws_accessor._get_db_obj_query(daily_table_name)
        summary_query = self.aws_accessor._get_db_obj_query(summary_table_name)

        initial_daily_count = daily_query.count()
        initial_summary_count = summary_query.count()

        self.assertEqual(initial_daily_count, 0)
        self.assertEqual(initial_summary_count, 0)

        update_summary_tables(self.schema_name, provider, provider_aws_uuid, start_date)

        self.assertNotEqual(daily_query.count(), initial_daily_count)
        self.assertNotEqual(summary_query.count(), initial_summary_count)

    @patch('masu.processor.tasks.update_charge_info')
    def test_update_summary_tables_aws_end_date(self, mock_charge_info):
        """Test that the summary table task respects a date range."""
        provider = 'AWS'
        provider_aws_uuid = self.aws_test_provider_uuid
        ce_table_name = AWS_CUR_TABLE_MAP['cost_entry']
        daily_table_name = AWS_CUR_TABLE_MAP['line_item_daily']
        summary_table_name = AWS_CUR_TABLE_MAP['line_item_daily_summary']

        start_date = self.start_date.replace(day=1,
                                             hour=0,
                                             minute=0,
                                             second=0,
                                             microsecond=0) + relativedelta.relativedelta(months=-1)

        end_date = start_date + timedelta(days=10)
        end_date = end_date.replace(hour=23, minute=59, second=59)

        daily_table = getattr(self.aws_accessor.report_schema, daily_table_name)
        summary_table = getattr(self.aws_accessor.report_schema, summary_table_name)
        ce_table = getattr(self.aws_accessor.report_schema, ce_table_name)

        ce_start_date = self.aws_accessor._session\
            .query(func.min(ce_table.interval_start))\
            .filter(ce_table.interval_start >= start_date).first()[0]

        ce_end_date = self.aws_accessor._session\
            .query(func.max(ce_table.interval_start))\
            .filter(ce_table.interval_start <= end_date).first()[0]

        # The summary tables will only include dates where there is data
        expected_start_date = max(start_date, ce_start_date)
        expected_start_date = expected_start_date.replace(hour=0, minute=0,
                                                          second=0,
                                                          microsecond=0)
        expected_end_date = min(end_date, ce_end_date)
        expected_end_date = expected_end_date.replace(hour=0, minute=0,
                                                      second=0, microsecond=0)

        update_summary_tables(self.schema_name, provider, provider_aws_uuid, start_date, end_date)

        result_start_date, result_end_date = self.aws_accessor._session.query(
            func.min(daily_table.usage_start),
            func.max(daily_table.usage_end)
        ).first()

        self.assertEqual(result_start_date, expected_start_date)
        self.assertEqual(result_end_date, expected_end_date)

        result_start_date, result_end_date = self.aws_accessor._session.query(
            func.min(summary_table.usage_start),
            func.max(summary_table.usage_end)
        ).first()

        self.assertEqual(result_start_date, expected_start_date)
        self.assertEqual(result_end_date, expected_end_date)

    @patch('masu.processor.tasks.update_charge_info')
    @patch('masu.database.ocp_rate_db_accessor.OCPRateDBAccessor.get_memory_gb_usage_per_hour_rates')
    @patch('masu.database.ocp_rate_db_accessor.OCPRateDBAccessor.get_cpu_core_usage_per_hour_rates')
    def test_update_summary_tables_ocp(self, mock_cpu_rate, mock_mem_rate, mock_charge_info):
        """Test that the summary table task runs."""
        mem_rate = {'tiered_rate': [{'value': '1.5', 'unit': 'USD'}]}
        cpu_rate = {'tiered_rate': [{'value': '2.5', 'unit': 'USD'}]}

        mock_cpu_rate.return_value = cpu_rate
        mock_mem_rate.return_value = mem_rate

        provider = 'OCP'
        provider_ocp_uuid = self.ocp_test_provider_uuid

        daily_table_name = OCP_REPORT_TABLE_MAP['line_item_daily']
        start_date = self.start_date.replace(day=1) + relativedelta.relativedelta(months=-1)

        daily_query = self.ocp_accessor._get_db_obj_query(daily_table_name)

        initial_daily_count = daily_query.count()

        self.assertEqual(initial_daily_count, 0)
        update_summary_tables(self.schema_name, provider, provider_ocp_uuid, start_date)

        self.assertNotEqual(daily_query.count(), initial_daily_count)

        update_charge_info(schema_name=self.test_schema, provider_uuid=provider_ocp_uuid)

        table_name = OCP_REPORT_TABLE_MAP['line_item_daily_summary']
        with ProviderDBAccessor(provider_ocp_uuid) as provider_accessor:
            provider_obj = provider_accessor.get_provider()

        usage_period_qry = self.ocp_accessor.get_usage_period_query_by_provider(provider_obj.id)
        cluster_id = usage_period_qry.first().cluster_id

        items = self.ocp_accessor._get_db_obj_query(table_name).filter_by(cluster_id=cluster_id)
        for item in items:
            self.assertIsNotNone(item.pod_charge_memory_gigabyte_hours)
            self.assertIsNotNone(item.pod_charge_cpu_core_hours)

        storage_daily_name = OCP_REPORT_TABLE_MAP['storage_line_item_daily']
        items = self.ocp_accessor._get_db_obj_query(storage_daily_name).filter_by(cluster_id=cluster_id)
        for item in items:
            self.assertIsNotNone(item.volume_request_storage_byte_seconds)
            self.assertIsNotNone(item.persistentvolumeclaim_usage_byte_seconds)

        storage_summary_name = OCP_REPORT_TABLE_MAP['storage_line_item_daily_summary']
        items = self.ocp_accessor._get_db_obj_query(storage_summary_name).filter_by(cluster_id=cluster_id)
        for item in items:
            self.assertIsNotNone(item.volume_request_storage_gigabyte_months)
            self.assertIsNotNone(item.persistentvolumeclaim_usage_gigabyte_months)

    @patch('masu.processor.tasks.update_charge_info')
    @patch('masu.database.ocp_rate_db_accessor.OCPRateDBAccessor.get_memory_gb_usage_per_hour_rates')
    @patch('masu.database.ocp_rate_db_accessor.OCPRateDBAccessor.get_cpu_core_usage_per_hour_rates')
    def test_update_summary_tables_ocp_end_date(self, mock_cpu_rate, mock_mem_rate, mock_charge_info, ):
        """Test that the summary table task respects a date range."""
        mock_cpu_rate.return_value = 1.5
        mock_mem_rate.return_value = 2.5
        provider = 'OCP'
        provider_ocp_uuid = self.ocp_test_provider_uuid
        ce_table_name = OCP_REPORT_TABLE_MAP['report']
        daily_table_name = OCP_REPORT_TABLE_MAP['line_item_daily']

        start_date = self.start_date.replace(day=1,
                                             hour=0, minute=0, second=0,
                                             microsecond=0) + relativedelta.relativedelta(months=-1)

        end_date = start_date + timedelta(days=10)
        end_date = end_date.replace(hour=23, minute=59, second=59)

        daily_table = getattr(self.ocp_accessor.report_schema, daily_table_name)
        ce_table = getattr(self.ocp_accessor.report_schema, ce_table_name)

        ce_start_date = self.ocp_accessor._session\
            .query(func.min(ce_table.interval_start))\
            .filter(ce_table.interval_start >= start_date).first()[0]

        ce_end_date = self.ocp_accessor._session\
            .query(func.max(ce_table.interval_start))\
            .filter(ce_table.interval_start <= end_date).first()[0]

        # The summary tables will only include dates where there is data
        expected_start_date = max(start_date, ce_start_date)
        expected_start_date = expected_start_date.replace(hour=0, minute=0,
                                                          second=0,
                                                          microsecond=0)
        expected_end_date = min(end_date, ce_end_date)
        expected_end_date = expected_end_date.replace(hour=0, minute=0,
                                                      second=0, microsecond=0)

        update_summary_tables(self.schema_name, provider, provider_ocp_uuid, start_date, end_date)
        result_start_date, result_end_date = self.ocp_accessor._session.query(
            func.min(daily_table.usage_start),
            func.max(daily_table.usage_end)
        ).first()

        self.assertEqual(result_start_date, expected_start_date)
        self.assertEqual(result_end_date, expected_end_date)

    def test_update_charge_info_aws(self):
        """Test that update_charge_info is not called for AWS."""
        update_charge_info(schema_name=self.test_schema,
                           provider_uuid=self.aws_test_provider_uuid)
        # FIXME: no asserts on test

    @patch('masu.processor.tasks.update_summary_tables')
    def test_get_report_data_for_all_providers(self, mock_update):
        """Test GET report_data endpoint with provider_uuid=*."""
        start_date = date.today()
        update_all_summary_tables(start_date)

        mock_update.delay.assert_called_with(
            ANY, ANY, ANY, str(start_date), ANY)
Ejemplo n.º 5
0
class TestUpdateSummaryTablesTask(MasuTestCase):
    """Test cases for Processor summary table Celery tasks."""
    @classmethod
    def setUpClass(cls):
        """Setup for the class."""
        super().setUpClass()
        cls.aws_tables = list(AWS_CUR_TABLE_MAP.values())
        cls.ocp_tables = list(OCP_REPORT_TABLE_MAP.values())
        cls.all_tables = list(AWS_CUR_TABLE_MAP.values()) + list(
            OCP_REPORT_TABLE_MAP.values())
        with ReportingCommonDBAccessor() as report_common_db:
            cls.column_map = report_common_db.column_map

        cls.creator = ReportObjectCreator(cls.schema, cls.column_map)

    def setUp(self):
        """Set up each test."""
        super().setUp()
        self.aws_accessor = AWSReportDBAccessor(schema=self.schema,
                                                column_map=self.column_map)
        self.ocp_accessor = OCPReportDBAccessor(schema=self.schema,
                                                column_map=self.column_map)

        # Populate some line item data so that the summary tables
        # have something to pull from
        self.start_date = DateAccessor().today_with_timezone('UTC').replace(
            day=1)
        last_month = self.start_date - relativedelta.relativedelta(months=1)

        for cost_entry_date in (self.start_date, last_month):
            bill = self.creator.create_cost_entry_bill(
                provider_id=self.aws_provider.id, bill_date=cost_entry_date)
            cost_entry = self.creator.create_cost_entry(bill, cost_entry_date)
            for family in [
                    'Storage',
                    'Compute Instance',
                    'Database Storage',
                    'Database Instance',
            ]:
                product = self.creator.create_cost_entry_product(family)
                pricing = self.creator.create_cost_entry_pricing()
                reservation = self.creator.create_cost_entry_reservation()
                self.creator.create_cost_entry_line_item(
                    bill, cost_entry, product, pricing, reservation)
        provider_ocp_uuid = self.ocp_test_provider_uuid

        with ProviderDBAccessor(
                provider_uuid=provider_ocp_uuid) as provider_accessor:
            provider_id = provider_accessor.get_provider().id

        cluster_id = self.ocp_provider_resource_name
        for period_date in (self.start_date, last_month):
            period = self.creator.create_ocp_report_period(
                period_date, provider_id=provider_id, cluster_id=cluster_id)
            report = self.creator.create_ocp_report(period, period_date)
            for _ in range(25):
                self.creator.create_ocp_usage_line_item(period, report)

    @patch('masu.processor.tasks.update_cost_summary_table')
    @patch('masu.processor.tasks.update_charge_info')
    def test_update_summary_tables_aws(self, mock_charge_info,
                                       mock_cost_summary):
        """Test that the summary table task runs."""
        provider = 'AWS'
        provider_aws_uuid = self.aws_test_provider_uuid

        daily_table_name = AWS_CUR_TABLE_MAP['line_item_daily']
        summary_table_name = AWS_CUR_TABLE_MAP['line_item_daily_summary']
        start_date = self.start_date.replace(
            day=1) + relativedelta.relativedelta(months=-1)

        with schema_context(self.schema):
            daily_query = self.aws_accessor._get_db_obj_query(daily_table_name)
            summary_query = self.aws_accessor._get_db_obj_query(
                summary_table_name)

            initial_daily_count = daily_query.count()
            initial_summary_count = summary_query.count()

        self.assertEqual(initial_daily_count, 0)
        self.assertEqual(initial_summary_count, 0)

        update_summary_tables(self.schema, provider, provider_aws_uuid,
                              start_date)

        with schema_context(self.schema):
            self.assertNotEqual(daily_query.count(), initial_daily_count)
            self.assertNotEqual(summary_query.count(), initial_summary_count)

        mock_charge_info.apply_async.assert_called()
        mock_cost_summary.si.assert_called()

    @patch('masu.processor.tasks.update_charge_info')
    def test_update_summary_tables_aws_end_date(self, mock_charge_info):
        """Test that the summary table task respects a date range."""
        provider = 'AWS'
        provider_aws_uuid = self.aws_test_provider_uuid
        ce_table_name = AWS_CUR_TABLE_MAP['cost_entry']
        daily_table_name = AWS_CUR_TABLE_MAP['line_item_daily']
        summary_table_name = AWS_CUR_TABLE_MAP['line_item_daily_summary']

        start_date = self.start_date.replace(
            day=1, hour=0, minute=0, second=0,
            microsecond=0) + relativedelta.relativedelta(months=-1)

        end_date = start_date + timedelta(days=10)
        end_date = end_date.replace(hour=23, minute=59, second=59)

        daily_table = getattr(self.aws_accessor.report_schema,
                              daily_table_name)
        summary_table = getattr(self.aws_accessor.report_schema,
                                summary_table_name)
        ce_table = getattr(self.aws_accessor.report_schema, ce_table_name)

        with schema_context(self.schema):
            ce_start_date = ce_table.objects\
                .filter(interval_start__gte=start_date)\
                .aggregate(Min('interval_start'))['interval_start__min']
            ce_end_date = ce_table.objects\
                .filter(interval_start__lte=end_date)\
                .aggregate(Max('interval_start'))['interval_start__max']

        # The summary tables will only include dates where there is data
        expected_start_date = max(start_date, ce_start_date)
        expected_start_date = expected_start_date.replace(hour=0,
                                                          minute=0,
                                                          second=0,
                                                          microsecond=0)
        expected_end_date = min(end_date, ce_end_date)
        expected_end_date = expected_end_date.replace(hour=0,
                                                      minute=0,
                                                      second=0,
                                                      microsecond=0)

        update_summary_tables(self.schema, provider, provider_aws_uuid,
                              start_date, end_date)

        with schema_context(self.schema):
            daily_entry = daily_table.objects.all().aggregate(
                Min('usage_start'), Max('usage_end'))
            result_start_date = daily_entry['usage_start__min']
            result_end_date = daily_entry['usage_end__max']

        self.assertEqual(result_start_date, expected_start_date)
        self.assertEqual(result_end_date, expected_end_date)

        with schema_context(self.schema):
            summary_entry = summary_table.objects.all().aggregate(
                Min('usage_start'), Max('usage_end'))
            result_start_date = summary_entry['usage_start__min']
            result_end_date = summary_entry['usage_end__max']

        self.assertEqual(result_start_date, expected_start_date)
        self.assertEqual(result_end_date, expected_end_date)

    @patch('masu.processor.tasks.update_cost_summary_table')
    @patch('masu.processor.tasks.update_charge_info')
    @patch(
        'masu.database.cost_model_db_accessor.CostModelDBAccessor._make_rate_by_metric_map'
    )
    @patch(
        'masu.database.cost_model_db_accessor.CostModelDBAccessor.get_markup')
    def test_update_summary_tables_ocp(self, mock_markup, mock_rate_map,
                                       mock_charge_info, mock_cost_summary):
        """Test that the summary table task runs."""
        markup = {}
        mem_rate = {'tiered_rates': [{'value': '1.5', 'unit': 'USD'}]}
        cpu_rate = {'tiered_rates': [{'value': '2.5', 'unit': 'USD'}]}
        rate_metric_map = {
            'cpu_core_usage_per_hour': cpu_rate,
            'memory_gb_usage_per_hour': mem_rate
        }

        mock_markup.return_value = markup
        mock_rate_map.return_value = rate_metric_map

        provider = 'OCP'
        provider_ocp_uuid = self.ocp_test_provider_uuid

        daily_table_name = OCP_REPORT_TABLE_MAP['line_item_daily']
        start_date = self.start_date.replace(
            day=1) + relativedelta.relativedelta(months=-1)

        with schema_context(self.schema):
            daily_query = self.ocp_accessor._get_db_obj_query(daily_table_name)

            initial_daily_count = daily_query.count()

        self.assertEqual(initial_daily_count, 0)
        update_summary_tables(self.schema, provider, provider_ocp_uuid,
                              start_date)

        with schema_context(self.schema):
            self.assertNotEqual(daily_query.count(), initial_daily_count)

        update_charge_info(schema_name=self.schema,
                           provider_uuid=provider_ocp_uuid)

        table_name = OCP_REPORT_TABLE_MAP['line_item_daily_summary']
        with ProviderDBAccessor(provider_ocp_uuid) as provider_accessor:
            provider_obj = provider_accessor.get_provider()

        usage_period_qry = self.ocp_accessor.get_usage_period_query_by_provider(
            provider_obj.id)
        with schema_context(self.schema):
            cluster_id = usage_period_qry.first().cluster_id

            items = self.ocp_accessor._get_db_obj_query(table_name).filter(
                cluster_id=cluster_id)
            for item in items:
                self.assertIsNotNone(item.pod_charge_memory_gigabyte_hours)
                self.assertIsNotNone(item.pod_charge_cpu_core_hours)

            storage_daily_name = OCP_REPORT_TABLE_MAP[
                'storage_line_item_daily']

            items = self.ocp_accessor._get_db_obj_query(
                storage_daily_name).filter(cluster_id=cluster_id)
            for item in items:
                self.assertIsNotNone(item.volume_request_storage_byte_seconds)
                self.assertIsNotNone(
                    item.persistentvolumeclaim_usage_byte_seconds)

            storage_summary_name = OCP_REPORT_TABLE_MAP[
                'line_item_daily_summary']
            items = self.ocp_accessor._get_db_obj_query(
                storage_summary_name).filter(cluster_id=cluster_id,
                                             data_source='Storage')
            for item in items:
                self.assertIsNotNone(
                    item.volume_request_storage_gigabyte_months)
                self.assertIsNotNone(
                    item.persistentvolumeclaim_usage_gigabyte_months)

        mock_charge_info.apply_async.assert_called()
        mock_cost_summary.si.assert_called()

    @patch('masu.processor.tasks.update_charge_info')
    @patch(
        'masu.database.cost_model_db_accessor.CostModelDBAccessor.get_memory_gb_usage_per_hour_rates'
    )
    @patch(
        'masu.database.cost_model_db_accessor.CostModelDBAccessor.get_cpu_core_usage_per_hour_rates'
    )
    def test_update_summary_tables_ocp_end_date(self, mock_cpu_rate,
                                                mock_mem_rate,
                                                mock_charge_info):
        """Test that the summary table task respects a date range."""
        mock_cpu_rate.return_value = 1.5
        mock_mem_rate.return_value = 2.5
        provider = 'OCP'
        provider_ocp_uuid = self.ocp_test_provider_uuid
        ce_table_name = OCP_REPORT_TABLE_MAP['report']
        daily_table_name = OCP_REPORT_TABLE_MAP['line_item_daily']

        start_date = self.start_date.replace(
            day=1, hour=0, minute=0, second=0,
            microsecond=0) + relativedelta.relativedelta(months=-1)

        end_date = start_date + timedelta(days=10)
        end_date = end_date.replace(hour=23, minute=59, second=59)

        daily_table = getattr(self.ocp_accessor.report_schema,
                              daily_table_name)
        ce_table = getattr(self.ocp_accessor.report_schema, ce_table_name)

        with schema_context(self.schema):
            ce_start_date = ce_table.objects\
                .filter(interval_start__gte=start_date)\
                .aggregate(Min('interval_start'))['interval_start__min']

            ce_end_date = ce_table.objects\
                .filter(interval_start__lte=end_date)\
                .aggregate(Max('interval_start'))['interval_start__max']

        # The summary tables will only include dates where there is data
        expected_start_date = max(start_date, ce_start_date)
        expected_start_date = expected_start_date.replace(hour=0,
                                                          minute=0,
                                                          second=0,
                                                          microsecond=0)
        expected_end_date = min(end_date, ce_end_date)
        expected_end_date = expected_end_date.replace(hour=0,
                                                      minute=0,
                                                      second=0,
                                                      microsecond=0)

        update_summary_tables(self.schema, provider, provider_ocp_uuid,
                              start_date, end_date)
        with schema_context(self.schema):
            daily_entry = daily_table.objects.all().aggregate(
                Min('usage_start'), Max('usage_end'))
            result_start_date = daily_entry['usage_start__min']
            result_end_date = daily_entry['usage_end__max']

        self.assertEqual(result_start_date, expected_start_date)
        self.assertEqual(result_end_date, expected_end_date)

    @patch('masu.processor.tasks.update_summary_tables')
    def test_get_report_data_for_all_providers(self, mock_update):
        """Test GET report_data endpoint with provider_uuid=*."""
        start_date = date.today()
        update_all_summary_tables(start_date)

        mock_update.delay.assert_called_with(ANY, ANY, ANY, str(start_date),
                                             ANY)

    @patch(
        'masu.database.ocp_report_db_accessor.OCPReportDBAccessor.populate_cost_summary_table'
    )
    def test_update_cost_summary_table(self, mock_update):
        """Tests that the updater updates the cost summary table."""
        provider = 'OCP'
        provider_aws_uuid = self.ocp_test_provider_uuid
        manifest_id = None
        start_date = self.start_date.replace(
            day=1, hour=0, minute=0, second=0,
            microsecond=0) + relativedelta.relativedelta(months=-1)

        update_cost_summary_table(self.schema,
                                  provider_aws_uuid,
                                  start_date=start_date)

        mock_update.assert_called()