Beispiel #1
0
    def test_execute_query_with_group_by_tag_and_limit(self):
        """Test that data is grouped by tag key and limited."""
        data_generator = OCPReportDataGenerator(self.tenant, dated_tags=False)
        data_generator.add_data_to_tenant()
        group_by_key = 'app_label'

        url = reverse('reports-openshift-cpu')
        client = APIClient()
        params = {
            'filter[resolution]': 'monthly',
            'filter[time_scope_value]': '-2',
            'filter[time_scope_units]': 'month',
            f'group_by[tag:{group_by_key}]': '*',
            'filter[limit]': 2
        }
        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, 200)

        data = response.json()
        data = data.get('data', [])
        # default ordered by usage
        previous_tag_usage = data[0].get('app_labels', [])[0].get('values', [{}])[0].get('usage', {}).get('value', 0)
        for entry in data[0].get('app_labels', []):
            current_tag_usage = entry.get('values', [{}])[0].get('usage', {}).get('value', 0)
            if 'Other' not in entry.get('app_label'):
                self.assertTrue(current_tag_usage <= previous_tag_usage)
                previous_tag_usage = current_tag_usage
Beispiel #2
0
    def test_add_current_month_deltas_no_previous_data_wo_query_data(self):
        """Test that current month deltas are calculated with no previous month data."""
        OCPReportDataGenerator(self.tenant, self.provider).remove_data_from_tenant()
        OCPReportDataGenerator(
            self.tenant, self.provider, current_month_only=True
        ).add_data_to_tenant()

        url = '?filter[time_scope_value]=-2&filter[resolution]=monthly&filter[time_scope_units]=month&filter[limit]=1&delta=usage__request'  # noqa: E501
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)

        q_table = handler._mapper.provider_map.get('tables').get('query')
        with tenant_context(self.tenant):
            query = q_table.objects.filter(handler.query_filter)
            query_data = query.annotate(**handler.annotations)
            group_by_value = handler._get_group_by()
            query_group_by = ['date'] + group_by_value
            query_order_by = ('-date',)
            query_order_by += (handler.order,)

            annotations = annotations = handler.report_annotations
            query_data = query_data.values(*query_group_by).annotate(**annotations)

            aggregates = handler._mapper.report_type_map.get('aggregates')
            metric_sum = query.aggregate(**aggregates)
            query_sum = {
                key: metric_sum.get(key) if metric_sum.get(key) else Decimal(0)
                for key in aggregates
            }

            result = handler.add_current_month_deltas(query_data, query_sum)

            self.assertEqual(result, query_data)
            self.assertEqual(handler.query_delta['value'], Decimal(0))
            self.assertIsNone(handler.query_delta['percent'])
    def test_provider_statistics_negative_case(self):
        """Test that the provider statistics method returns None for tenant misalignment."""
        # Create Provider
        provider_authentication = ProviderAuthentication.objects.create(provider_resource_name='cluster_id_1001')
        provider = Provider.objects.create(name='ocpprovidername',
                                           type='AWS',
                                           created_by=self.user,
                                           customer=self.customer,
                                           authentication=provider_authentication,)

        data_generator = OCPReportDataGenerator(self.tenant, provider)
        data_generator.add_data_to_tenant(**{'provider_uuid': provider.uuid})

        provider_uuid = provider.uuid
        manager = ProviderManager(provider_uuid)

        stats = manager.provider_statistics(self.tenant)

        self.assertIn(str(data_generator.dh.this_month_start.date()), stats.keys())
        self.assertIn(str(data_generator.dh.last_month_start.date()), stats.keys())

        for key, value in stats.items():
            key_date_obj = parser.parse(key)
            value_data = value.pop()

            self.assertIsNotNone(value_data.get('assembly_id'))
            self.assertIsNotNone(value_data.get('files_processed'))
            self.assertEqual(value_data.get('billing_period_start'), key_date_obj.date())
            self.assertGreater(parser.parse(value_data.get('last_process_start_date')), key_date_obj)
            self.assertGreater(parser.parse(value_data.get('last_process_complete_date')), key_date_obj)
            self.assertIsNone(value_data.get('summary_data_creation_datetime'))
            self.assertIsNone(value_data.get('summary_data_updated_datetime'))
            self.assertIsNone(value_data.get('derived_cost_datetime'))
Beispiel #4
0
 def setUp(self):
     """Set up the customer view tests."""
     super().setUp()
     _, self.provider = create_generic_provider(Provider.PROVIDER_OCP,
                                                self.headers)
     self.data_generator = OCPReportDataGenerator(self.tenant,
                                                  self.provider)
     self.data_generator.add_data_to_tenant()
Beispiel #5
0
 def setUp(self):
     """Set up the customer view tests."""
     super().setUp()
     self.data_generator = OCPReportDataGenerator(self.tenant)
     self.data_generator.add_data_to_tenant()
     serializer = UserSerializer(data=self.user_data, context=self.request_context)
     if serializer.is_valid(raise_exception=True):
         serializer.save()
Beispiel #6
0
 def add_ocp_data_to_tenant(self):
     """Populate tenant with OCP data."""
     assert self.cluster_id, "method must be called after add_data_to_tenant"
     self.ocp_generator = OCPReportDataGenerator(self.tenant, self.provider, self.current_month_only)
     ocp_config = {
         "cluster_id": self.cluster_id,
         "cluster_alias": self.cluster_alias,
         "namespaces": self.namespaces,
         "nodes": self.nodes,
     }
     self.ocp_generator.add_data_to_tenant(**ocp_config)
Beispiel #7
0
    def test_ocp_cpu_query_group_by_cluster(self):
        """Test that group by cluster includes cluster and cluster_alias."""
        for _ in range(1, 5):
            OCPReportDataGenerator(self.tenant).add_data_to_tenant()

        # '?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&group_by[cluster]=*'
        params = {
            'filter': {
                'resolution': 'monthly',
                'time_scope_value': -1,
                'time_scope_units': 'month',
                'limit': 3
            },
            'group_by': {
                'cluster': ['*']
            }
        }
        query_params = FakeQueryParameters(params,
                                           report_type='cpu',
                                           tenant=self.tenant)
        handler = OCPReportQueryHandler(query_params.mock_qp)

        query_data = handler.execute_query()
        for data in query_data.get('data'):
            self.assertIn('clusters', data)
            for cluster_data in data.get('clusters'):
                self.assertIn('cluster', cluster_data)
                self.assertIn('values', cluster_data)
                for cluster_value in cluster_data.get('values'):
                    self.assertIn('cluster', cluster_value)
                    self.assertIn('cluster_alias', cluster_value)
                    self.assertIsNotNone('cluster', cluster_value)
                    self.assertIsNotNone('cluster_alias', cluster_value)
Beispiel #8
0
    def test_add_current_month_deltas_no_previous_data_w_query_data(self):
        """Test that current month deltas are calculated with no previous data for field two."""
        OCPReportDataGenerator(self.tenant).remove_data_from_tenant()
        OCPReportDataGenerator(self.tenant,
                               current_month_only=True).add_data_to_tenant()

        query_params = {
            'filter': {
                'resolution': 'monthly',
                'time_scope_value': -1,
                'limit': 1
            },
        }
        query_string = '?filter[resolution]=monthly&' + \
                       'filter[time_scope_value]=-1&' + \
                       'filter[limit]=1'

        handler = OCPReportQueryHandler(query_params, query_string,
                                        self.tenant, **{'report_type': 'cpu'})
        handler._delta = 'usage__foo'

        q_table = handler._mapper.provider_map.get('tables').get('query')
        with tenant_context(self.tenant):
            query = q_table.objects.filter(handler.query_filter)
            query_data = query.annotate(**handler.annotations)
            group_by_value = handler._get_group_by()
            query_group_by = ['date'] + group_by_value
            query_order_by = ('-date', )
            query_order_by += (handler.order, )

            annotations = annotations = handler.report_annotations
            query_data = query_data.values(*query_group_by).annotate(
                **annotations)

            aggregates = handler._mapper.report_type_map.get('aggregates')
            metric_sum = query.aggregate(**aggregates)
            query_sum = {
                key: metric_sum.get(key) if metric_sum.get(key) else Decimal(0)
                for key in aggregates
            }

            result = handler.add_current_month_deltas(query_data, query_sum)

            self.assertEqual(result, query_data)
            self.assertIsNotNone(handler.query_delta['value'])
            self.assertIsNone(handler.query_delta['percent'])
Beispiel #9
0
    def test_add_current_month_deltas_no_previous_data_wo_query_data(self):
        """Test that current month deltas are calculated with no previous month data."""
        OCPReportDataGenerator(self.tenant).remove_data_from_tenant()
        OCPReportDataGenerator(self.tenant, current_month_only=True).add_data_to_tenant()

        query_params = {'filter': {'resolution': 'monthly',
                                   'time_scope_value': -2,
                                   'limit': 1},
                        }

        handler = OCPReportQueryHandler(
            query_params,
            None,
            self.tenant,
            **{'report_type': 'cpu'}
        )
        handler._delta = 'usage__request'

        q_table = handler._mapper.query_table
        with tenant_context(self.tenant):
            query = q_table.objects.filter(handler.query_filter)
            query_data = query.annotate(**handler.annotations)
            group_by_value = handler._get_group_by()
            query_group_by = ['date'] + group_by_value
            query_order_by = ('-date', )
            query_order_by += (handler.order,)

            annotations = annotations = handler.report_annotations
            query_data = query_data.values(*query_group_by).annotate(**annotations)

            aggregates = handler._mapper.report_type_map.get('aggregates')
            metric_sum = query.aggregate(**aggregates)
            query_sum = {key: metric_sum.get(key) if metric_sum.get(key) else Decimal(0) for key in aggregates}

            result = handler.add_current_month_deltas(query_data, query_sum)

            expected_delta = query_sum.get('usage', 0) - query_sum.get('request', 0)
            expected_percent = query_sum.get('usage', 0) / query_sum.get('request', 0) * 100

            self.assertEqual(result, query_data)
            self.assertEqual(handler.query_delta['value'], expected_delta)
            self.assertEqual(handler.query_delta['percent'], expected_percent)
Beispiel #10
0
    def test_filter_by_infrastructure_ocp(self):
        """Test that filter by infrastructure for ocp not on aws."""
        data_generator = OCPReportDataGenerator(self.tenant,
                                                current_month_only=True)
        data_generator.add_data_to_tenant()

        query_params = {
            'filter': {
                'resolution': 'monthly',
                'time_scope_value': -1,
                'time_scope_units': 'month',
                'infrastructures': ['AWS']
            },
        }
        query_string = '?filter[resolution]=monthly&' + \
                       'filter[time_scope_value]=-1&' + \
                       'filter[time_scope_units]=month&' + \
                       'filter[infrastructures]=aws'

        handler = OCPReportQueryHandler(query_params, query_string,
                                        self.tenant, **{'report_type': 'cpu'})

        query_data = handler.execute_query()

        for entry in query_data.get('data', []):
            for value in entry.get('values', []):
                self.assertEqual(value.get('usage').get('value'), 0)
                self.assertEqual(value.get('request').get('value'), 0)
        data_generator.remove_data_from_tenant()
Beispiel #11
0
    def test_ocp_infrastructure_type(self):
        """Test that the provider infrastructure returns Unknown when running stand alone."""
        provider_authentication = ProviderAuthentication.objects.create(
            provider_resource_name='cluster_id_1001')
        provider = Provider.objects.create(
            name='ocpprovidername',
            type=Provider.PROVIDER_OCP,
            created_by=self.user,
            customer=self.customer,
            authentication=provider_authentication,
        )
        ocp_aws_data_generator = OCPAWSReportDataGenerator(
            self.tenant, provider, current_month_only=True)
        data_generator = OCPReportDataGenerator(self.tenant,
                                                provider,
                                                current_month_only=True)
        data_generator.add_data_to_tenant()
        ocp_aws_data_generator.create_ocp_provider(
            data_generator.cluster_id, data_generator.cluster_alias)

        provider_uuid = ocp_aws_data_generator.provider_uuid
        manager = ProviderManager(provider_uuid)
        infrastructure_name = manager.get_infrastructure_name(self.tenant)
        self.assertEqual(infrastructure_name, 'Unknown')

        data_generator.remove_data_from_tenant()
        ocp_aws_data_generator.remove_data_from_tenant()
Beispiel #12
0
    def test_get_cluster_capacity_monthly_resolution_group_by_cluster(self):
        """Test that cluster capacity returns capacity by cluster."""
        # Add data for a second cluster
        OCPReportDataGenerator(self.tenant).add_data_to_tenant()

        query_params = {
            'filter': {
                'resolution': 'monthly',
                'time_scope_value': -1,
                'time_scope_units': 'month'
            },
            'group_by': {
                'cluster': ['*']
            },
        }
        query_string = '?filter[resolution]=monthly&' + \
                       'filter[time_scope_value]=-1&' + \
                       'filter[time_scope_units]=month&' + \
                       'group_by[cluster]=*'

        handler = OCPReportQueryHandler(query_params, query_string,
                                        self.tenant, **{'report_type': 'cpu'})

        query_data = handler.execute_query()

        capacity_by_cluster = defaultdict(Decimal)
        total_capacity = Decimal(0)
        query_filter = handler.query_filter
        query_group_by = ['usage_start', 'cluster_id']
        annotations = {'capacity': Max('cluster_capacity_cpu_core_hours')}
        cap_key = list(annotations.keys())[0]

        q_table = handler._mapper.provider_map.get('tables').get('query')
        query = q_table.objects.filter(query_filter)

        with tenant_context(self.tenant):
            cap_data = query.values(*query_group_by).annotate(**annotations)
            for entry in cap_data:
                cluster_id = entry.get('cluster_id', '')
                capacity_by_cluster[cluster_id] += entry.get(cap_key, 0)
                total_capacity += entry.get(cap_key, 0)

        for entry in query_data.get('data', []):
            for cluster in entry.get('clusters', []):
                cluster_name = cluster.get('cluster', '')
                capacity = cluster.get('values')[0].get('capacity',
                                                        {}).get('value')
                self.assertEqual(capacity, capacity_by_cluster[cluster_name])

        self.assertEqual(
            query_data.get('total', {}).get('capacity', {}).get('value'),
            total_capacity)
Beispiel #13
0
    def test_add_current_month_deltas_no_previous_data_w_query_data(self):
        """Test that current month deltas are calculated with no previous data for field two."""
        OCPReportDataGenerator(self.tenant,
                               self.provider).remove_data_from_tenant()
        OCPReportDataGenerator(self.tenant,
                               self.provider,
                               current_month_only=True).add_data_to_tenant()

        url = "?filter[time_scope_value]=-1&filter[resolution]=monthly&filter[limit]=1"
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        handler._delta = "usage__foo"

        q_table = handler._mapper.provider_map.get("tables").get("query")
        with tenant_context(self.tenant):
            query = q_table.objects.filter(handler.query_filter)
            query_data = query.annotate(**handler.annotations)
            group_by_value = handler._get_group_by()
            query_group_by = ["date"] + group_by_value
            query_order_by = ("-date", )
            query_order_by += (handler.order, )

            annotations = annotations = handler.report_annotations
            query_data = query_data.values(*query_group_by).annotate(
                **annotations)

            aggregates = handler._mapper.report_type_map.get("aggregates")
            metric_sum = query.aggregate(**aggregates)
            query_sum = {
                key: metric_sum.get(key) if metric_sum.get(key) else Decimal(0)
                for key in aggregates
            }

            result = handler.add_current_month_deltas(query_data, query_sum)

            self.assertEqual(result, query_data)
            self.assertIsNotNone(handler.query_delta["value"])
            self.assertIsNone(handler.query_delta["percent"])
Beispiel #14
0
    def test_provider_statistics(self):
        """Test that the provider statistics method returns report stats."""
        # Create Provider
        provider_authentication = ProviderAuthentication.objects.create(provider_resource_name="cluster_id_1001")
        provider = Provider.objects.create(
            name="ocpprovidername",
            type=Provider.PROVIDER_OCP,
            created_by=self.user,
            customer=self.customer,
            authentication=provider_authentication,
        )

        data_generator = OCPReportDataGenerator(self.tenant, provider)
        data_generator.add_data_to_tenant(**{"provider_uuid": provider.uuid})

        provider_uuid = provider.uuid
        manager = ProviderManager(provider_uuid)

        stats = manager.provider_statistics(self.tenant)

        self.assertIn(str(data_generator.dh.this_month_start.date()), stats.keys())
        self.assertIn(str(data_generator.dh.last_month_start.date()), stats.keys())

        for key, value in stats.items():
            key_date_obj = parser.parse(key)
            value_data = value.pop()

            self.assertIsNotNone(value_data.get("assembly_id"))
            self.assertIsNotNone(value_data.get("files_processed"))
            self.assertEqual(value_data.get("billing_period_start"), key_date_obj.date())
            self.assertGreater(parser.parse(value_data.get("last_process_start_date")), key_date_obj)
            self.assertGreater(parser.parse(value_data.get("last_process_complete_date")), key_date_obj)
            self.assertGreater(parser.parse(value_data.get("last_manifest_complete_date")), key_date_obj)
            self.assertGreater(parser.parse(value_data.get("summary_data_creation_datetime")), key_date_obj)
            self.assertGreater(parser.parse(value_data.get("summary_data_updated_datetime")), key_date_obj)
            self.assertGreater(parser.parse(value_data.get("derived_cost_datetime")), key_date_obj)
Beispiel #15
0
    def setUp(self):
        """Set up the customer view tests."""
        super().setUp()
        self.dh = DateHelper()

        self.this_month_filter = {'usage_start__gte': self.dh.this_month_start}
        self.ten_day_filter = {
            'usage_start__gte': self.dh.n_days_ago(self.dh.today, 9)
        }
        self.thirty_day_filter = {
            'usage_start__gte': self.dh.n_days_ago(self.dh.today, 29)
        }
        self.last_month_filter = {
            'usage_start__gte': self.dh.last_month_start,
            'usage_end__lte': self.dh.last_month_end
        }
        OCPReportDataGenerator(self.tenant).add_data_to_tenant()
Beispiel #16
0
    def setUp(self):
        """Set up the customer view tests."""
        super().setUp()
        self.dh = DateHelper()

        _, self.provider = create_generic_provider(Provider.PROVIDER_OCP, self.headers)

        self.this_month_filter = {'usage_start__gte': self.dh.this_month_start}
        self.ten_day_filter = {'usage_start__gte': self.dh.n_days_ago(self.dh.today, 9)}
        self.thirty_day_filter = {
            'usage_start__gte': self.dh.n_days_ago(self.dh.today, 29)
        }
        self.last_month_filter = {
            'usage_start__gte': self.dh.last_month_start,
            'usage_end__lte': self.dh.last_month_end,
        }
        OCPReportDataGenerator(
            self.tenant, self.provider
        ).add_data_to_tenant()
    def test_ocp_infrastructure_type(self):
        """Test that the provider infrastructure returns Unknown when running stand alone."""
        ocp_aws_data_generator = OCPAWSReportDataGenerator(self.tenant, current_month_only=True)
        data_generator = OCPReportDataGenerator(self.tenant, current_month_only=True)
        data_generator.add_data_to_tenant()
        ocp_aws_data_generator.create_ocp_provider(data_generator.cluster_id, data_generator.cluster_alias)

        provider_uuid = ocp_aws_data_generator.provider_uuid
        manager = ProviderManager(provider_uuid)
        infrastructure_name = manager.get_infrastructure_name(self.tenant)
        self.assertEqual(infrastructure_name, 'Unknown')

        data_generator.remove_data_from_tenant()
        ocp_aws_data_generator.remove_data_from_tenant()
Beispiel #18
0
    def test_ocp_cpu_query_group_by_cluster(self):
        """Test that group by cluster includes cluster and cluster_alias."""
        for _ in range(1, 5):
            OCPReportDataGenerator(self.tenant, self.provider).add_data_to_tenant()

        url = '?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&filter[limit]=3&group_by[cluster]=*'  # noqa: E501
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)

        query_data = handler.execute_query()
        for data in query_data.get('data'):
            self.assertIn('clusters', data)
            for cluster_data in data.get('clusters'):
                self.assertIn('cluster', cluster_data)
                self.assertIn('values', cluster_data)
                for cluster_value in cluster_data.get('values'):
                    self.assertIn('cluster', cluster_value)
                    self.assertIn('cluster_alias', cluster_value)
                    self.assertIsNotNone('cluster', cluster_value)
                    self.assertIsNotNone('cluster_alias', cluster_value)
Beispiel #19
0
    def test_filter_by_infrastructure_ocp(self):
        """Test that filter by infrastructure for ocp not on aws."""
        data_generator = OCPReportDataGenerator(
            self.tenant, self.provider, current_month_only=True
        )
        data_generator.add_data_to_tenant()

        url = '?filter[resolution]=monthly&filter[time_scope_value]=-1&filter[time_scope_units]=month&filter[infrastructures]=aws'  # noqa: E501
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        query_data = handler.execute_query()

        for entry in query_data.get('data', []):
            for value in entry.get('values', []):
                self.assertEqual(value.get('usage').get('value'), 0)
                self.assertEqual(value.get('request').get('value'), 0)
        data_generator.remove_data_from_tenant()
Beispiel #20
0
    def test_get_cluster_capacity_monthly_resolution_group_by_cluster(self):
        """Test that cluster capacity returns capacity by cluster."""
        # Add data for a second cluster
        OCPReportDataGenerator(self.tenant, self.provider).add_data_to_tenant()

        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&group_by[cluster]=*"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        query_data = handler.execute_query()

        capacity_by_cluster = defaultdict(Decimal)
        total_capacity = Decimal(0)
        query_filter = handler.query_filter
        query_group_by = ["usage_start", "cluster_id"]
        annotations = {"capacity": Max("cluster_capacity_cpu_core_hours")}
        cap_key = list(annotations.keys())[0]

        q_table = handler._mapper.provider_map.get("tables").get("query")
        query = q_table.objects.filter(query_filter)

        with tenant_context(self.tenant):
            cap_data = query.values(*query_group_by).annotate(**annotations)
            for entry in cap_data:
                cluster_id = entry.get("cluster_id", "")
                capacity_by_cluster[cluster_id] += entry.get(cap_key, 0)
                total_capacity += entry.get(cap_key, 0)

        for entry in query_data.get("data", []):
            for cluster in entry.get("clusters", []):
                cluster_name = cluster.get("cluster", "")
                capacity = cluster.get("values")[0].get("capacity",
                                                        {}).get("value")
                self.assertEqual(capacity, capacity_by_cluster[cluster_name])

        self.assertEqual(
            query_data.get("total", {}).get("capacity", {}).get("value"),
            total_capacity)
    def test_provider_statistics_no_report_data(self):
        """Test that the provider statistics method returns no report stats with no report data."""
        # Create Provider
        provider_authentication = ProviderAuthentication.objects.create(provider_resource_name='cluster_id_1001')
        provider = Provider.objects.create(name='ocpprovidername',
                                           type='OCP',
                                           created_by=self.user,
                                           customer=self.customer,
                                           authentication=provider_authentication,)

        data_generator = OCPReportDataGenerator(self.tenant, provider)
        data_generator.remove_data_from_reporting_common()
        data_generator.remove_data_from_tenant()

        provider_uuid = provider.uuid
        manager = ProviderManager(provider_uuid)

        stats = manager.provider_statistics(self.tenant)
        self.assertEqual(stats, {})
Beispiel #22
0
    def test_filter_by_infrastructure_ocp(self):
        """Test that filter by infrastructure for ocp not on aws."""
        data_generator = OCPReportDataGenerator(self.tenant,
                                                self.provider,
                                                current_month_only=True)
        data_generator.add_data_to_tenant()

        url = "?filter[resolution]=monthly&filter[time_scope_value]=-1&filter[time_scope_units]=month&filter[infrastructures]=aws"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        query_data = handler.execute_query()

        self.assertTrue(
            query_data.get("data"))  # check that returned list is not empty
        for entry in query_data.get("data"):
            for value in entry.get("values"):
                self.assertEqual(value.get("usage").get("value"), 0)
                self.assertEqual(value.get("request").get("value"), 0)
        data_generator.remove_data_from_tenant()
Beispiel #23
0
 def setUp(self):
     """Set up the customer view tests."""
     super().setUp()
     OCPReportDataGenerator(self.tenant).add_data_to_tenant()
Beispiel #24
0
class OCPAzureReportDataGenerator:
    """Populate the database with OCP on Azure report data."""

    def __init__(self, tenant, provider, current_month_only=False, config=None):
        """Set up the class."""
        # prevent future whammy:
        assert isinstance(tenant, Tenant), "not a Tenant type"
        assert isinstance(provider, Provider), "not a Provider type"
        assert isinstance(current_month_only, bool), "not a bool type"
        if config:
            assert isinstance(config, FakeAzureConfig), "not a FakeAzureConfig type"

        self.tenant = tenant
        self.provider = provider
        self.current_month_only = current_month_only
        self.config = config if config else FakeAzureConfig()
        self.fake = Faker()
        self.dh = DateHelper()
        self.provider_uuid = provider.uuid
        self.ocp_generator = None

        # generate a list of dicts with unique keys.
        self.period_ranges, self.report_ranges = self.report_period_and_range()

    def report_period_and_range(self):
        """Return the report period and range."""
        period = []
        ranges = []
        if self.current_month_only:
            report_days = 10
            diff_from_first = self.dh.today - self.dh.this_month_start
            if diff_from_first.days < 10:
                report_days = 1 + diff_from_first.days
                period = [(self.dh.this_month_start, self.dh.this_month_end)]
                ranges = [list(self.dh.this_month_start + relativedelta(days=i) for i in range(report_days))]
            else:
                period = [(self.dh.this_month_start, self.dh.this_month_end)]
                ranges = [list(self.dh.today - relativedelta(days=i) for i in range(report_days))]

        else:
            period = [
                (self.dh.last_month_start, self.dh.last_month_end),
                (self.dh.this_month_start, self.dh.this_month_end),
            ]

            one_month_ago = self.dh.today - relativedelta(months=1)
            diff_from_first = self.dh.today - self.dh.this_month_start
            if diff_from_first.days < 10:
                report_days = 1 + diff_from_first.days
                ranges = [
                    list(self.dh.last_month_start + relativedelta(days=i) for i in range(report_days)),
                    list(self.dh.this_month_start + relativedelta(days=i) for i in range(report_days)),
                ]
            else:
                ranges = [
                    list(one_month_ago - relativedelta(days=i) for i in range(10)),
                    list(self.dh.today - relativedelta(days=i) for i in range(10)),
                ]
        return (period, ranges)

    def remove_data_from_tenant(self):
        """Remove the added data."""
        if self.ocp_generator:
            self.ocp_generator.remove_data_from_tenant()
        with tenant_context(self.tenant):
            for table in (OCPAzureCostLineItemDailySummary, OCPAzureCostLineItemProjectDailySummary):
                table.objects.all().delete()

    def add_ocp_data_to_tenant(self):
        """Populate tenant with OCP data."""
        assert self.cluster_id, "method must be called after add_data_to_tenant"
        self.ocp_generator = OCPReportDataGenerator(self.tenant, self.provider, self.current_month_only)
        ocp_config = {
            "cluster_id": self.cluster_id,
            "cluster_alias": self.cluster_alias,
            "namespaces": self.namespaces,
            "nodes": self.nodes,
        }
        self.ocp_generator.add_data_to_tenant(**ocp_config)

    def add_data_to_tenant(self, fixed_fields=None, service_name=None):
        """Populate tenant with data."""
        words = list({self.fake.word() for _ in range(10)})

        self.cluster_id = random.choice(words)
        self.cluster_alias = random.choice(words)
        self.namespaces = random.sample(words, k=2)
        self.nodes = random.sample(words, k=2)

        self.ocp_azure_summary_line_items = [
            {
                "namespace": random.choice(self.namespaces),
                "pod": random.choice(words),
                "node": node,
                "resource_id": self.fake.ean8(),
            }
            for node in self.nodes
        ]
        with tenant_context(self.tenant):
            for i, period in enumerate(self.period_ranges):
                for report_date in self.report_ranges[i]:
                    for row in self.ocp_azure_summary_line_items:
                        self._randomize_line_item(retained_fields=fixed_fields)
                        if service_name:
                            self.config.service_name = service_name
                        li = self._populate_ocp_azure_cost_line_item_daily_summary(row, report_date)
                        self._populate_ocp_azure_cost_line_item_project_daily_summary(li, row, report_date)
            self._populate_azure_tag_summary()

    def create_ocp_provider(self, cluster_id, cluster_alias, infrastructure_type="Unknown"):
        """Create OCP test provider."""
        auth = baker.make(ProviderAuthentication, provider_resource_name=cluster_id)
        bill = baker.make(ProviderBillingSource, bucket="")
        provider_uuid = uuid4()
        provider_data = {
            "uuid": provider_uuid,
            "name": cluster_alias,
            "authentication": auth,
            "billing_source": bill,
            "customer": None,
            "created_by": None,
            "type": Provider.PROVIDER_OCP,
            "setup_complete": False,
            "infrastructure": None,
        }
        provider = Provider(**provider_data)
        infrastructure = ProviderInfrastructureMap(
            infrastructure_provider=provider, infrastructure_type=infrastructure_type
        )
        infrastructure.save()
        provider.infrastructure = infrastructure
        provider.save()
        self.cluster_alias = cluster_alias
        self.provider_uuid = provider_uuid
        return provider

    def _randomize_line_item(self, retained_fields=None):
        """Update our FakeAzureConfig to generate a new line item."""
        DEFAULT_FIELDS = ["subscription_guid", "resource_location", "tags"]
        if not retained_fields:
            retained_fields = DEFAULT_FIELDS

        config_dict = {}
        for field in retained_fields:
            if field in self.config:
                config_dict[field] = getattr(self.config, field)
        self.config = FakeAzureConfig(**config_dict)

    def _populate_ocp_azure_cost_line_item_daily_summary(self, row, report_date):
        """Create OCP hourly usage line items."""
        if report_date:
            usage_dt = report_date
        else:
            usage_dt = self.fake.date_time_between_dates(self.dh.this_month_start, self.dh.today)
        usage_qty = random.random() * random.randrange(0, 100)
        pretax = usage_qty * self.config.meter_rate

        data = {
            # OCP Fields:
            "cluster_id": self.cluster_id,
            "cluster_alias": self.cluster_alias,
            "namespace": [row.get("namespace")],
            "pod": [row.get("pod")],
            "node": row.get("node"),
            "resource_id": row.get("resource_id"),
            "usage_start": usage_dt,
            "usage_end": usage_dt,
            # Azure Fields:
            "cost_entry_bill": baker.make(AzureCostEntryBill),
            "subscription_guid": self.config.subscription_guid,
            "instance_type": self.config.instance_type,
            "service_name": self.config.service_name,
            "resource_location": self.config.resource_location,
            "tags": self.select_tags(),
            "usage_quantity": usage_qty,
            "pretax_cost": pretax,
            "markup_cost": pretax * 0.1,
            "offer_id": random.choice([None, self.fake.pyint()]),
            "currency": "USD",
            "unit_of_measure": "some units",
            "shared_projects": 1,
            "project_costs": pretax,
        }

        line_item = OCPAzureCostLineItemDailySummary(**data)
        line_item.save()
        return line_item

    def _populate_ocp_azure_cost_line_item_project_daily_summary(self, li, row, report_date):
        """Create OCP hourly usage line items."""
        data = {
            # OCP Fields:
            "cluster_id": li.cluster_id,
            "cluster_alias": li.cluster_alias,
            "namespace": [row.get("namespace")],
            "pod": [row.get("pod")],
            "node": row.get("node"),
            "resource_id": row.get("resource_id"),
            "usage_start": li.usage_start,
            "usage_end": li.usage_end,
            # Azure Fields:
            "cost_entry_bill": li.cost_entry_bill,
            "subscription_guid": li.subscription_guid,
            "instance_type": li.instance_type,
            "service_name": li.service_name,
            "resource_location": li.resource_location,
            "usage_quantity": li.usage_quantity,
            "unit_of_measure": "some units",
            "offer_id": li.offer_id,
            "currency": "USD",
            "pretax_cost": li.pretax_cost,
            "project_markup_cost": li.markup_cost,
            "pod_cost": random.random() * li.pretax_cost,
        }

        line_item = OCPAzureCostLineItemProjectDailySummary(**data)
        line_item.save()

    def select_tags(self):
        """Return a random selection of the defined tags."""
        return {
            key: self.config.tags[key]
            for key in random.choices(
                list(self.config.tags.keys()), k=random.randrange(2, len(self.config.tags.keys()))
            )
        }

    def _populate_azure_tag_summary(self):
        """Populate the Azure tag summary table."""
        agg_sql = pkgutil.get_data("masu.database", f"sql/reporting_cloudtags_summary.sql")
        agg_sql = agg_sql.decode("utf-8")
        agg_sql_params = {
            "schema": connection.schema_name,
            "tag_table": "reporting_azuretags_summary",
            "lineitem_table": "reporting_ocpazurecostlineitem_daily_summary",
        }
        agg_sql, agg_sql_params = JinjaSql().prepare_query(agg_sql, agg_sql_params)

        with connection.cursor() as cursor:
            cursor.execute(agg_sql)
Beispiel #25
0
class OCPReportViewTest(IamTestCase):
    """Tests the report view."""

    @classmethod
    def setUpClass(cls):
        """Set up the test class."""
        super().setUpClass()
        cls.dh = DateHelper()

    def setUp(self):
        """Set up the customer view tests."""
        super().setUp()
        self.data_generator = OCPReportDataGenerator(self.tenant)
        self.data_generator.add_data_to_tenant()
        serializer = UserSerializer(data=self.user_data, context=self.request_context)
        if serializer.is_valid(raise_exception=True):
            serializer.save()

    def _calculate_expected_range(self, time_scope_value, time_scope_units):
        today = self.dh.today

        if time_scope_value == '-1' and time_scope_units == 'month':
            start_range = today.replace(day=1).date()
        elif time_scope_value == '-2' and time_scope_units == 'month':
            start_range = (today - relativedelta(months=1)).replace(day=1).date()
        elif time_scope_value == '-10' and time_scope_units == 'day':
            start_range = (today - relativedelta(days=10)).date()
        elif time_scope_value == '-30' and time_scope_units == 'day':
            start_range = (today - relativedelta(days=30)).date()

        end_range = today.replace(day=calendar.monthrange(today.year, today.month)[1]).date()

        return start_range, end_range

    def test_execute_ocp_tags_queries_keys_only(self):
        """Test that tag key data is for the correct time queries."""
        test_cases = [{'value': '-1', 'unit': 'month', 'resolution': 'monthly'},
                      {'value': '-2', 'unit': 'month', 'resolution': 'monthly'},
                      {'value': '-10', 'unit': 'day', 'resolution': 'daily'},
                      {'value': '-30', 'unit': 'day', 'resolution': 'daily'}]

        for case in test_cases:
            url = reverse('ocp-tags')
            client = APIClient()
            params = {
                'filter[resolution]': case.get('resolution'),
                'filter[time_scope_value]': case.get('value'),
                'filter[time_scope_units]': case.get('unit'),
                'key_only': True
            }
            url = url + '?' + urlencode(params, quote_via=quote_plus)
            response = client.get(url, **self.headers)

            self.assertEqual(response.status_code, 200)
            data = response.json()
            start_range, end_range = self._calculate_expected_range(case.get('value'), case.get('unit'))

            for label in data.get('data'):
                label_date = datetime.datetime.strptime(label.split('*')[0], '%m-%d-%Y')
                self.assertGreaterEqual(label_date.date(), start_range)
                self.assertLessEqual(label_date.date(), end_range)

            self.assertTrue(data.get('data'))
            self.assertTrue(isinstance(data.get('data'), list))

    def test_execute_ocp_tags_queries(self):
        """Test that tag data is for the correct time queries."""
        test_cases = [{'value': '-1', 'unit': 'month', 'resolution': 'monthly'},
                      {'value': '-2', 'unit': 'month', 'resolution': 'monthly'},
                      {'value': '-10', 'unit': 'day', 'resolution': 'daily'},
                      {'value': '-30', 'unit': 'day', 'resolution': 'daily'}]

        for case in test_cases:
            url = reverse('ocp-tags')
            client = APIClient()
            params = {
                'filter[resolution]': case.get('resolution'),
                'filter[time_scope_value]': case.get('value'),
                'filter[time_scope_units]': case.get('unit'),
                'key_only': False
            }
            url = url + '?' + urlencode(params, quote_via=quote_plus)
            response = client.get(url, **self.headers)
            self.assertEqual(response.status_code, 200)
            data = response.json()
            start_range, end_range = self._calculate_expected_range(case.get('value'), case.get('unit'))

            for tag in data.get('data'):
                label = tag.get('key')
                label_date = datetime.datetime.strptime(label.split('*')[0], '%m-%d-%Y')
                self.assertGreaterEqual(label_date.date(), start_range)
                self.assertLessEqual(label_date.date(), end_range)
                self.assertIsNotNone(tag.get('values'))

            self.assertTrue(data.get('data'))
            self.assertTrue(isinstance(data.get('data'), list))
Beispiel #26
0
 def setUp(self):
     """Set up the customer view tests."""
     super().setUp()
     _, self.provider = create_generic_provider('OCP', self.headers)
     OCPReportDataGenerator(self.tenant, self.provider).add_data_to_tenant()
Beispiel #27
0
    def setUp(self):
        """Set up the customer view tests."""
        super().setUp()
        self.data_generator = OCPReportDataGenerator(self.tenant)
        self.data_generator.add_data_to_tenant()
        serializer = UserSerializer(data=self.user_data, context=self.request_context)
        if serializer.is_valid(raise_exception=True):
            serializer.save()

        self.report_ocp_cpu = {
            'group_by': {
                'project': [
                    '*'
                ]
            },
            'filter': {
                'resolution': 'monthly',
                'time_scope_value': '-1',
                'time_scope_units': 'month'
            },
            'data': [
                {
                    'date': '2018-10',
                    'projects': [
                        {
                            'project': 'default',
                            'values': [
                                {
                                    'date': '2018-10',
                                    'project': 'default',
                                    'limit': 'null',
                                    'usage': 0.119385,
                                    'request': 9.506666
                                }
                            ]
                        },
                        {
                            'project': 'metering',
                            'values': [
                                {
                                    'date': '2018-10',
                                    'project': 'metering',
                                    'limit': 'null',
                                    'usage': 4.464511,
                                    'request': 53.985832
                                }
                            ]
                        },
                        {
                            'project': 'monitoring',
                            'values': [
                                {
                                    'date': '2018-10',
                                    'project': 'monitoring',
                                    'limit': 'null',
                                    'usage': 7.861343,
                                    'request': 17.920067
                                }
                            ]
                        },
                        {
                            'project': 'openshift-web-console',
                            'values': [
                                {
                                    'date': '2018-10',
                                    'project': 'openshift-web-console',
                                    'limit': 'null',
                                    'usage': 0.862687,
                                    'request': 4.753333
                                }
                            ]
                        }
                    ]
                }
            ],
            'total': {
                'pod_usage_cpu_core_hours': 13.307928,
                'pod_request_cpu_core_hours': 86.165898
            }
        }
        self.report_ocp_mem = {
            'group_by': {
                'project': [
                    '*'
                ]
            },
            'filter': {
                'resolution': 'monthly',
                'time_scope_value': '-1',
                'time_scope_units': 'month'
            },
            'data': [
                {
                    'date': '2018-10',
                    'projects': [
                        {
                            'project': 'default',
                            'values': [
                                {
                                    'date': '2018-10',
                                    'project': 'default',
                                    'memory_usage_gigabytes': 0.162249,
                                    'memory_requests_gigabytes': 1.063302
                                }
                            ]
                        },
                        {
                            'project': 'metering',
                            'values': [
                                {
                                    'date': '2018-10',
                                    'project': 'metering',
                                    'memory_usage_gigabytes': 5.899788,
                                    'memory_requests_gigabytes': 7.007081
                                }
                            ]
                        },
                        {
                            'project': 'monitoring',
                            'values': [
                                {
                                    'date': '2018-10',
                                    'project': 'monitoring',
                                    'memory_usage_gigabytes': 3.178287,
                                    'memory_requests_gigabytes': 4.153526
                                }
                            ]
                        },
                        {
                            'project': 'openshift-web-console',
                            'values': [
                                {
                                    'date': '2018-10',
                                    'project': 'openshift-web-console',
                                    'memory_usage_gigabytes': 0.068988,
                                    'memory_requests_gigabytes': 0.207677
                                }
                            ]
                        }
                    ]
                }
            ],
            'total': {
                'pod_usage_memory_gigabytes': 9.309312,
                'pod_request_memory_gigabytes': 12.431585
            }
        }
Beispiel #28
0
class OCPReportViewTest(IamTestCase):
    """Tests the report view."""

    @classmethod
    def setUpClass(cls):
        """Set up the test class."""
        super().setUpClass()
        cls.dh = DateHelper()
        cls.ten_days_ago = cls.dh.n_days_ago(cls.dh._now, 10)

    def setUp(self):
        """Set up the customer view tests."""
        super().setUp()
        self.data_generator = OCPReportDataGenerator(self.tenant)
        self.data_generator.add_data_to_tenant()
        serializer = UserSerializer(data=self.user_data, context=self.request_context)
        if serializer.is_valid(raise_exception=True):
            serializer.save()

        self.report_ocp_cpu = {
            'group_by': {
                'project': [
                    '*'
                ]
            },
            'filter': {
                'resolution': 'monthly',
                'time_scope_value': '-1',
                'time_scope_units': 'month'
            },
            'data': [
                {
                    'date': '2018-10',
                    'projects': [
                        {
                            'project': 'default',
                            'values': [
                                {
                                    'date': '2018-10',
                                    'project': 'default',
                                    'limit': 'null',
                                    'usage': 0.119385,
                                    'request': 9.506666
                                }
                            ]
                        },
                        {
                            'project': 'metering',
                            'values': [
                                {
                                    'date': '2018-10',
                                    'project': 'metering',
                                    'limit': 'null',
                                    'usage': 4.464511,
                                    'request': 53.985832
                                }
                            ]
                        },
                        {
                            'project': 'monitoring',
                            'values': [
                                {
                                    'date': '2018-10',
                                    'project': 'monitoring',
                                    'limit': 'null',
                                    'usage': 7.861343,
                                    'request': 17.920067
                                }
                            ]
                        },
                        {
                            'project': 'openshift-web-console',
                            'values': [
                                {
                                    'date': '2018-10',
                                    'project': 'openshift-web-console',
                                    'limit': 'null',
                                    'usage': 0.862687,
                                    'request': 4.753333
                                }
                            ]
                        }
                    ]
                }
            ],
            'total': {
                'pod_usage_cpu_core_hours': 13.307928,
                'pod_request_cpu_core_hours': 86.165898
            }
        }
        self.report_ocp_mem = {
            'group_by': {
                'project': [
                    '*'
                ]
            },
            'filter': {
                'resolution': 'monthly',
                'time_scope_value': '-1',
                'time_scope_units': 'month'
            },
            'data': [
                {
                    'date': '2018-10',
                    'projects': [
                        {
                            'project': 'default',
                            'values': [
                                {
                                    'date': '2018-10',
                                    'project': 'default',
                                    'memory_usage_gigabytes': 0.162249,
                                    'memory_requests_gigabytes': 1.063302
                                }
                            ]
                        },
                        {
                            'project': 'metering',
                            'values': [
                                {
                                    'date': '2018-10',
                                    'project': 'metering',
                                    'memory_usage_gigabytes': 5.899788,
                                    'memory_requests_gigabytes': 7.007081
                                }
                            ]
                        },
                        {
                            'project': 'monitoring',
                            'values': [
                                {
                                    'date': '2018-10',
                                    'project': 'monitoring',
                                    'memory_usage_gigabytes': 3.178287,
                                    'memory_requests_gigabytes': 4.153526
                                }
                            ]
                        },
                        {
                            'project': 'openshift-web-console',
                            'values': [
                                {
                                    'date': '2018-10',
                                    'project': 'openshift-web-console',
                                    'memory_usage_gigabytes': 0.068988,
                                    'memory_requests_gigabytes': 0.207677
                                }
                            ]
                        }
                    ]
                }
            ],
            'total': {
                'pod_usage_memory_gigabytes': 9.309312,
                'pod_request_memory_gigabytes': 12.431585
            }
        }

    @patch('api.report.ocp.ocp_query_handler.OCPReportQueryHandler')
    def test_generic_report_ocp_cpu_success(self, mock_handler):
        """Test OCP cpu generic report."""
        mock_handler.return_value.execute_query.return_value = self.report_ocp_cpu
        params = {
            'group_by[account]': '*',
            'filter[resolution]': 'monthly',
            'filter[time_scope_value]': '-1',
            'filter[time_scope_units]': 'month'
        }
        user = User.objects.get(
            username=self.user_data['username']
        )

        django_request = HttpRequest()
        qd = QueryDict(mutable=True)
        qd.update(params)
        django_request.GET = qd
        request = Request(django_request)
        request.user = user

        response = _generic_report(request, report='cpu', provider='ocp')
        self.assertIsInstance(response, Response)
        # FIXME
        # self.assertEqual(response.status_code, 200)

    @patch('api.report.ocp.ocp_query_handler.OCPReportQueryHandler')
    def test_generic_report_ocp_mem_success(self, mock_handler):
        """Test OCP memory generic report."""
        mock_handler.return_value.execute_query.return_value = self.report_ocp_mem
        params = {
            'group_by[account]': '*',
            'filter[resolution]': 'monthly',
            'filter[time_scope_value]': '-1',
            'filter[time_scope_units]': 'month'
        }
        user = User.objects.get(
            username=self.user_data['username']
        )

        django_request = HttpRequest()
        qd = QueryDict(mutable=True)
        qd.update(params)
        django_request.GET = qd
        request = Request(django_request)
        request.user = user

        response = _generic_report(request, report='memory', provider='ocp')
        self.assertIsInstance(response, Response)
        # FIXME
        # self.assertEqual(response.status_code, 200)

    def test_execute_query_ocp_cpu(self):
        """Test that OCP CPU endpoint works."""
        url = reverse('reports-openshift-cpu')
        client = APIClient()
        response = client.get(url, **self.headers)

        expected_end_date = str(self.dh.today.date())
        expected_start_date = str(self.dh.this_month_start.date())
        self.assertEqual(response.status_code, 200)
        data = response.json()
        dates = sorted([item.get('date') for item in data.get('data')])

        self.assertEqual(dates[0], expected_start_date)
        self.assertEqual(dates[-1], expected_end_date)

        for item in data.get('data'):
            if item.get('values'):
                values = item.get('values')[0]
                self.assertTrue('limit' in values)
                self.assertTrue('usage' in values)
                self.assertTrue('request' in values)

    def test_charge_api_has_units(self):
        """Test that the charge API returns units."""
        url = reverse('reports-openshift-charges')
        client = APIClient()
        response = client.get(url, **self.headers)
        response_json = response.json()

        total = response_json.get('meta', {}).get('total', {})
        data = response_json.get('data', {})
        self.assertTrue('cost' in total)
        self.assertEqual(total.get('cost', {}).get('units'), 'USD')

        for item in data:
            if item.get('values'):
                values = item.get('values')[0]
                self.assertTrue('cost' in values)
                self.assertEqual(values.get('cost', {}).get('units'), 'USD')

    def test_cpu_api_has_units(self):
        """Test that the CPU API returns units."""
        url = reverse('reports-openshift-cpu')
        client = APIClient()
        response = client.get(url, **self.headers)
        response_json = response.json()

        total = response_json.get('meta', {}).get('total', {})
        data = response_json.get('data', {})
        self.assertTrue('usage' in total)
        self.assertEqual(total.get('usage', {}).get('units'), 'Core-Hours')

        for item in data:
            if item.get('values'):
                values = item.get('values')[0]
                self.assertTrue('usage' in values)
                self.assertEqual(values.get('usage', {}).get('units'), 'Core-Hours')

    def test_memory_api_has_units(self):
        """Test that the charge API returns units."""
        url = reverse('reports-openshift-memory')
        client = APIClient()
        response = client.get(url, **self.headers)
        response_json = response.json()

        total = response_json.get('meta', {}).get('total', {})
        data = response_json.get('data', {})
        self.assertTrue('usage' in total)
        self.assertEqual(total.get('usage', {}).get('units'), 'GB-Hours')

        for item in data:
            if item.get('values'):
                values = item.get('values')[0]
                self.assertTrue('usage' in values)
                self.assertEqual(values.get('usage', {}).get('units'), 'GB-Hours')

    def test_execute_query_ocp_cpu_last_thirty_days(self):
        """Test that OCP CPU endpoint works."""
        url = reverse('reports-openshift-cpu')
        client = APIClient()
        params = {'filter[time_scope_value]': '-30',
                  'filter[time_scope_units]': 'day'}
        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)

        expected_end_date = self.dh.today
        expected_start_date = self.dh.n_days_ago(expected_end_date, 30)
        expected_end_date = str(expected_end_date.date())
        expected_start_date = str(expected_start_date.date())
        self.assertEqual(response.status_code, 200)
        data = response.json()
        dates = sorted([item.get('date') for item in data.get('data')])
        self.assertEqual(dates[0], expected_start_date)
        self.assertEqual(dates[-1], expected_end_date)

        for item in data.get('data'):
            if item.get('values'):
                values = item.get('values')[0]
                self.assertTrue('limit' in values)
                self.assertTrue('usage' in values)
                self.assertTrue('request' in values)

    def test_execute_query_ocp_cpu_this_month(self):
        """Test that data is returned for the full month."""
        url = reverse('reports-openshift-cpu')
        client = APIClient()
        params = {
            'filter[resolution]': 'monthly',
            'filter[time_scope_value]': '-1',
            'filter[time_scope_units]': 'month'
        }
        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)

        expected_date = self.dh.today.strftime('%Y-%m')

        self.assertEqual(response.status_code, 200)
        data = response.json()
        dates = sorted([item.get('date') for item in data.get('data')])
        self.assertEqual(dates[0], expected_date)

        values = data.get('data')[0].get('values')[0]
        self.assertTrue('limit' in values)
        self.assertTrue('usage' in values)
        self.assertTrue('request' in values)

    def test_execute_query_ocp_cpu_this_month_daily(self):
        """Test that data is returned for the full month."""
        url = reverse('reports-openshift-cpu')
        client = APIClient()
        params = {
            'filter[resolution]': 'daily',
            'filter[time_scope_value]': '-1',
            'filter[time_scope_units]': 'month'
        }
        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)

        expected_start_date = self.dh.this_month_start.strftime('%Y-%m-%d')
        expected_end_date = self.dh.today.strftime('%Y-%m-%d')

        self.assertEqual(response.status_code, 200)
        data = response.json()
        dates = sorted([item.get('date') for item in data.get('data')])
        self.assertEqual(dates[0], expected_start_date)
        self.assertEqual(dates[-1], expected_end_date)

        for item in data.get('data'):
            if item.get('values'):
                values = item.get('values')[0]
                self.assertTrue('limit' in values)
                self.assertTrue('usage' in values)
                self.assertTrue('request' in values)

    def test_execute_query_ocp_cpu_last_month(self):
        """Test that data is returned for the last month."""
        url = reverse('reports-openshift-cpu')
        client = APIClient()
        params = {
            'filter[resolution]': 'monthly',
            'filter[time_scope_value]': '-2',
            'filter[time_scope_units]': 'month'
        }
        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)

        expected_date = self.dh.last_month_start.strftime('%Y-%m')

        self.assertEqual(response.status_code, 200)
        data = response.json()
        dates = sorted([item.get('date') for item in data.get('data')])
        self.assertEqual(dates[0], expected_date)

        values = data.get('data')[0].get('values')[0]
        self.assertTrue('limit' in values)
        self.assertTrue('usage' in values)
        self.assertTrue('request' in values)

    def test_execute_query_ocp_cpu_last_month_daily(self):
        """Test that data is returned for the full month."""
        url = reverse('reports-openshift-cpu')
        client = APIClient()
        params = {
            'filter[resolution]': 'daily',
            'filter[time_scope_value]': '-2',
            'filter[time_scope_units]': 'month'
        }
        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)

        expected_start_date = self.dh.last_month_start.strftime('%Y-%m-%d')
        expected_end_date = self.dh.today.strftime('%Y-%m-%d')

        self.assertEqual(response.status_code, 200)
        data = response.json()
        dates = sorted([item.get('date') for item in data.get('data')])
        self.assertEqual(dates[0], expected_start_date)
        self.assertEqual(dates[-1], expected_end_date)

        for item in data.get('data'):
            if item.get('values'):
                values = item.get('values')[0]
                self.assertTrue('limit' in values)
                self.assertTrue('usage' in values)
                self.assertTrue('request' in values)

    def test_execute_query_ocp_memory(self):
        """Test that OCP Mem endpoint works."""
        url = reverse('reports-openshift-memory')
        client = APIClient()
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, 200)

    def test_execute_query_ocp_memory_group_by_limit(self):
        """Test that OCP Mem endpoint works with limits."""
        url = reverse('reports-openshift-memory')
        client = APIClient()
        params = {
            'group_by[node]': '*',
            'filter[limit]': '1',
        }
        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        data = response.json()

        with tenant_context(self.tenant):
            totals = OCPUsageLineItemDailySummary.objects\
                .filter(usage_start__gte=self.ten_days_ago)\
                .values(*['usage_start'])\
                .annotate(usage=Sum('pod_usage_memory_gigabyte_hours'))

        totals = {total.get('usage_start').strftime('%Y-%m-%d'): total.get('usage')
                  for total in totals}

        self.assertIn('nodes', data.get('data')[0])

        # Check if limit returns the correct number of results, and
        # that the totals add up properly
        for item in data.get('data'):
            if item.get('nodes'):
                date = item.get('date')
                projects = item.get('nodes')
                self.assertEqual(len(projects), 2)
                self.assertEqual(projects[1].get('node'), '1 Other')
                usage_total = projects[0].get('values')[0].get('usage', {}).get('value') + \
                    projects[1].get('values')[0].get('usage', {}).get('value')
                self.assertEqual(round(usage_total, 3),
                                 round(float(totals.get(date)), 3))

    def test_execute_query_ocp_charge(self):
        """Test that the charge endpoint is reachable."""
        url = reverse('reports-openshift-charges')
        client = APIClient()
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, 200)

    def test_execute_query_ocp_charge_with_delta(self):
        """Test that deltas work for charge."""
        url = reverse('reports-openshift-charges')
        client = APIClient()
        params = {
            'delta': 'cost',
            'filter[resolution]': 'daily',
            'filter[time_scope_value]': '-1',
            'filter[time_scope_units]': 'month'
        }
        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, 200)
        data = response.json()
        this_month_start = self.dh.this_month_start
        last_month_start = self.dh.last_month_start

        date_delta = relativedelta.relativedelta(months=1)

        def date_to_string(dt):
            return dt.strftime('%Y-%m-%d')

        def string_to_date(dt):
            return datetime.datetime.strptime(dt, '%Y-%m-%d').date()

        with tenant_context(self.tenant):
            current_total = OCPUsageLineItemDailySummary.objects\
                .filter(usage_start__gte=this_month_start)\
                .aggregate(
                    total=Sum(
                        F('pod_charge_cpu_core_hours') +  # noqa: W504
                        F('pod_charge_memory_gigabyte_hours')
                    )
                ).get('total')
            current_total = current_total if current_total is not None else 0

            current_totals = OCPUsageLineItemDailySummary.objects\
                .filter(usage_start__gte=this_month_start)\
                .annotate(**{'date': TruncDayString('usage_start')})\
                .values(*['date'])\
                .annotate(total=Sum(F('pod_charge_cpu_core_hours') + F('pod_charge_memory_gigabyte_hours')))

            prev_totals = OCPUsageLineItemDailySummary.objects\
                .filter(usage_start__gte=last_month_start)\
                .filter(usage_start__lt=this_month_start)\
                .annotate(**{'date': TruncDayString('usage_start')})\
                .values(*['date'])\
                .annotate(total=Sum(F('pod_charge_cpu_core_hours') + F('pod_charge_memory_gigabyte_hours')))

        current_totals = {total.get('date'): total.get('total')
                          for total in current_totals}
        prev_totals = {date_to_string(string_to_date(total.get('date')) + date_delta): total.get('total')
                       for total in prev_totals
                       if date_to_string(string_to_date(total.get('date')) + date_delta) in current_totals}

        prev_total = sum(prev_totals.values())
        prev_total = prev_total if prev_total is not None else 0

        expected_delta = current_total - prev_total
        delta = data.get('meta', {}).get('delta', {}).get('value')
        self.assertEqual(round(delta, 3), round(float(expected_delta), 3))
        for item in data.get('data'):
            date = item.get('date')
            expected_delta = current_totals.get(date, 0) - prev_totals.get(date, 0)
            values = item.get('values', [])
            delta_value = 0
            if values:
                delta_value = values[0].get('delta_value')
            self.assertEqual(round(delta_value, 3), round(float(expected_delta), 3))

    def test_execute_query_ocp_charge_with_invalid_delta(self):
        """Test that bad deltas don't work for charge."""
        url = reverse('reports-openshift-charges')
        client = APIClient()
        params = {'delta': 'usage'}
        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, 400)

        params = {'delta': 'request'}
        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, 400)

    def test_execute_query_ocp_cpu_with_delta_charge(self):
        """Test that charge deltas work for CPU."""
        url = reverse('reports-openshift-cpu')
        client = APIClient()
        params = {
            'delta': 'cost'
        }
        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, 200)

    def test_execute_query_ocp_cpu_with_delta_usage(self):
        """Test that usage deltas work for CPU."""
        url = reverse('reports-openshift-cpu')
        client = APIClient()
        params = {
            'delta': 'usage'
        }
        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, 200)

    def test_execute_query_ocp_cpu_with_delta_request(self):
        """Test that request deltas work for CPU."""
        url = reverse('reports-openshift-cpu')
        client = APIClient()
        params = {
            'delta': 'request'
        }
        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, 200)

    def test_execute_query_ocp_memory_with_delta(self):
        """Test that deltas work for CPU."""
        url = reverse('reports-openshift-memory')
        client = APIClient()
        params = {'delta': 'request'}
        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, 200)

    def test_execute_query_ocp_cpu_with_delta_usage__capacity(self):
        """Test that usage v capacity deltas work."""
        delta = 'usage__capacity'
        url = reverse('reports-openshift-cpu')
        client = APIClient()
        params = {
            'delta': delta
        }
        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, 200)

        delta_one, delta_two = delta.split('__')
        data = response.json()
        for entry in data.get('data', []):
            values = entry.get('values', {})[0]
            delta_percent = (values.get(delta_one, {}).get('value') /  # noqa: W504
                             values.get(delta_two, {}).get('value') * 100) \
                                 if values.get(delta_two, {}).get('value') else 0
            self.assertEqual(round(values.get('delta_percent'), 3), round(delta_percent, 3))

    def test_execute_query_ocp_cpu_with_delta_usage__request(self):
        """Test that usage v request deltas work."""
        delta = 'usage__request'
        url = reverse('reports-openshift-cpu')
        client = APIClient()
        params = {
            'delta': delta
        }
        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, 200)

        delta_one, delta_two = delta.split('__')
        data = response.json()
        for entry in data.get('data', []):
            values = entry.get('values', {})[0]
            delta_percent = (values.get(delta_one, {}).get('value') /  # noqa: W504
                             values.get(delta_two, {}).get('value') * 100) \
                                 if values.get(delta_two, {}).get('value') else 0
            self.assertEqual(round(values.get('delta_percent'), 3), round(delta_percent, 3))

    def test_execute_query_ocp_cpu_with_delta_request__capacity(self):
        """Test that request v capacity deltas work."""
        delta = 'request__capacity'
        url = reverse('reports-openshift-cpu')
        client = APIClient()
        params = {
            'delta': delta
        }
        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, 200)

        delta_one, delta_two = delta.split('__')
        data = response.json()
        for entry in data.get('data', []):
            values = entry.get('values', {})[0]
            delta_percent = (values.get(delta_one, {}).get('value') /  # noqa: W504
                             values.get(delta_two, {}).get('value') * 100) if values.get(delta_two) else 0
            self.assertAlmostEqual(values.get('delta_percent'), delta_percent)

    def test_execute_query_group_by_project(self):
        """Test that grouping by project filters data."""
        with tenant_context(self.tenant):
            # Force Django to do GROUP BY to get nodes
            projects = OCPUsageLineItemDailySummary.objects\
                .filter(usage_start__gte=self.ten_days_ago)\
                .values(*['namespace'])\
                .annotate(project_count=Count('namespace'))\
                .all()
            project_of_interest = projects[0].get('namespace')

        url = reverse('reports-openshift-cpu')
        client = APIClient()
        params = {'group_by[project]': project_of_interest}

        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, 200)

        data = response.json()
        for entry in data.get('data', []):
            for project in entry.get('projects', []):
                self.assertEqual(project.get('project'), project_of_interest)

    def test_execute_query_group_by_cluster(self):
        """Test that grouping by cluster filters data."""
        with tenant_context(self.tenant):
            # Force Django to do GROUP BY to get nodes
            clusters = OCPUsageLineItemDailySummary.objects\
                .filter(usage_start__gte=self.ten_days_ago)\
                .values(*['cluster_id'])\
                .annotate(cluster_count=Count('cluster_id'))\
                .all()
            cluster_of_interest = clusters[0].get('cluster_id')

        url = reverse('reports-openshift-cpu')
        client = APIClient()
        params = {'group_by[cluster]': cluster_of_interest}

        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, 200)

        data = response.json()
        for entry in data.get('data', []):
            for cluster in entry.get('clusters', []):
                self.assertEqual(cluster.get('cluster'), cluster_of_interest)

    def test_execute_query_group_by_pod_fails(self):
        """Test that grouping by pod filters data."""
        url = reverse('reports-openshift-cpu')
        client = APIClient()
        params = {'group_by[pod]': '*'}

        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, 400)

    def test_execute_query_group_by_node(self):
        """Test that grouping by node filters data."""
        with tenant_context(self.tenant):
            # Force Django to do GROUP BY to get nodes
            nodes = OCPUsageLineItemDailySummary.objects\
                .values(*['node'])\
                .filter(usage_start__gte=self.ten_days_ago)\
                .values(*['node'])\
                .annotate(node_count=Count('node'))\
                .all()
            node_of_interest = nodes[0].get('node')

        url = reverse('reports-openshift-cpu')
        client = APIClient()
        params = {'group_by[node]': node_of_interest}

        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, 200)

        data = response.json()
        for entry in data.get('data', []):
            for node in entry.get('nodes', []):
                self.assertEqual(node.get('node'), node_of_interest)

    def test_execute_query_with_tag_filter(self):
        """Test that data is filtered by tag key."""
        handler = OCPTagQueryHandler({'filter': {'type': 'pod'}}, '?filter[type]=pod', self.tenant)
        tag_keys = handler.get_tag_keys()
        filter_key = tag_keys[0]

        with tenant_context(self.tenant):
            labels = OCPUsageLineItemDailySummary.objects\
                .filter(usage_start__gte=self.ten_days_ago)\
                .filter(pod_labels__has_key=filter_key)\
                .values(*['pod_labels'])\
                .all()
            label_of_interest = labels[0]
            filter_value = label_of_interest.get('pod_labels', {}).get(filter_key)

            totals = OCPUsageLineItemDailySummary.objects\
                .filter(usage_start__gte=self.ten_days_ago)\
                .filter(**{f'pod_labels__{filter_key}': filter_value})\
                .aggregate(
                    **{
                        'usage': Sum('pod_usage_cpu_core_hours'),
                        'request': Sum('pod_request_cpu_core_hours'),
                        'limit': Sum('pod_limit_cpu_core_hours'),
                        'cost': Sum('pod_charge_cpu_core_hours')
                    }
                )

        url = reverse('reports-openshift-cpu')
        client = APIClient()
        params = {f'filter[tag:{filter_key}]': filter_value}

        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, 200)

        data = response.json()
        data_totals = data.get('meta', {}).get('total', {})
        for key in totals:
            expected = round(float(totals[key]), 6)
            result = data_totals.get(key, {}).get('value')
            self.assertEqual(result, expected)

    def test_execute_query_with_wildcard_tag_filter(self):
        """Test that data is filtered to include entries with tag key."""
        handler = OCPTagQueryHandler({'filter': {'type': 'pod'}}, '?filter[type]=pod', self.tenant)
        tag_keys = handler.get_tag_keys()
        filter_key = tag_keys[0]

        with tenant_context(self.tenant):
            totals = OCPUsageLineItemDailySummary.objects\
                .filter(usage_start__gte=self.ten_days_ago)\
                .filter(**{'pod_labels__has_key': filter_key})\
                .aggregate(
                    **{
                        'usage': Sum('pod_usage_cpu_core_hours'),
                        'request': Sum('pod_request_cpu_core_hours'),
                        'limit': Sum('pod_limit_cpu_core_hours'),
                        'derived_cost': Sum('pod_charge_cpu_core_hours')
                    }
                )

        url = reverse('reports-openshift-cpu')
        client = APIClient()
        params = {f'filter[tag:{filter_key}]': '*'}

        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, 200)

        data = response.json()
        data_totals = data.get('meta', {}).get('total', {})
        for key in totals:
            expected = round(float(totals[key]), 6)
            result = data_totals.get(key, {}).get('value')
            self.assertEqual(result, expected)

    def test_execute_query_with_tag_group_by(self):
        """Test that data is grouped by tag key."""
        handler = OCPTagQueryHandler({'filter': {'type': 'pod'}}, '?filter[type]=pod', self.tenant)
        tag_keys = handler.get_tag_keys()
        group_by_key = tag_keys[0]

        url = reverse('reports-openshift-cpu')
        client = APIClient()
        params = {f'group_by[tag:{group_by_key}]': '*'}

        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, 200)

        data = response.json()
        data = data.get('data', [])
        expected_keys = ['date', group_by_key + 's']
        for entry in data:
            self.assertEqual(list(entry.keys()), expected_keys)

    def test_execute_query_with_group_by_tag_and_limit(self):
        """Test that data is grouped by tag key and limited."""
        data_generator = OCPReportDataGenerator(self.tenant, dated_tags=False)
        data_generator.add_data_to_tenant()
        group_by_key = 'app_label'

        url = reverse('reports-openshift-cpu')
        client = APIClient()
        params = {
            'filter[resolution]': 'monthly',
            'filter[time_scope_value]': '-2',
            'filter[time_scope_units]': 'month',
            f'group_by[tag:{group_by_key}]': '*',
            'filter[limit]': 2
        }
        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, 200)

        data = response.json()
        data = data.get('data', [])
        # default ordered by usage
        previous_tag_usage = data[0].get('app_labels', [])[0].get('values', [{}])[0].get('usage', {}).get('value', 0)
        for entry in data[0].get('app_labels', []):
            current_tag_usage = entry.get('values', [{}])[0].get('usage', {}).get('value', 0)
            if 'Other' not in entry.get('app_label'):
                self.assertTrue(current_tag_usage <= previous_tag_usage)
                previous_tag_usage = current_tag_usage

    def test_execute_query_with_group_by_and_limit(self):
        """Test that data is grouped by and limited."""
        url = reverse('reports-openshift-cpu')
        client = APIClient()
        params = {
            'group_by[node]': '*',
            'filter[limit]': 1
        }
        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, 200)

        data = response.json()
        data = data.get('data', [])
        for entry in data:
            other = entry.get('nodes', [])[-1:]
            self.assertIn('Other', other[0].get('node'))

    def test_execute_query_with_group_by_order_by_and_limit(self):
        """Test that data is grouped by and limited on order by."""
        order_by_options = ['cost', 'usage', 'request', 'limit']
        for option in order_by_options:
            url = reverse('reports-openshift-cpu')
            client = APIClient()
            order_by_dict_key = 'order_by[{}]'.format(option)
            params = {
                'filter[resolution]': 'monthly',
                'filter[time_scope_value]': '-1',
                'filter[time_scope_units]': 'month',
                'group_by[node]': '*',
                order_by_dict_key: 'desc',
                'filter[limit]': 1
            }

            url = url + '?' + urlencode(params, quote_via=quote_plus)
            response = client.get(url, **self.headers)
            self.assertEqual(response.status_code, 200)

            data = response.json()
            data = data.get('data', [])
            previous_value = data[0].get('nodes', [])[0].get('values', [])[0].get(option, {}).get('value')
            for entry in data[0].get('nodes', []):
                current_value = entry.get('values', [])[0].get(option, {}).get('value')
                self.assertTrue(current_value <= previous_value)
                previous_value = current_value

    def test_execute_query_with_order_by_delta_and_limit(self):
        """Test that data is grouped and limited by order by delta."""
        url = reverse('reports-openshift-cpu')
        client = APIClient()
        params = {
            'filter[resolution]': 'monthly',
            'filter[time_scope_value]': '-1',
            'filter[time_scope_units]': 'month',
            'group_by[node]': '*',
            'order_by[delta]': 'desc',
            'filter[limit]': 1,
            'delta': 'usage__capacity'
        }
        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, 200)

        data = response.json()
        data = data.get('data', [])
        previous_usage = (
            data[0].get('nodes', [])[0].get('values', [])[0].get('usage', {}).get('value') /  # noqa: W504
            data[0].get('nodes', [])[0].get('values', [])[0].get('capacity', {}).get('value')
        )
        for entry in data[0].get('nodes', []):
            current_usage = (
                data[0].get('nodes', [])[0].get('values', [])[0].get('usage', {}).get('value') /  # noqa: W504
                data[0].get('nodes', [])[0].get('values', [])[0].get('capacity', {}).get('value')
        )
            self.assertTrue(current_usage >= previous_usage)
            previous_usage = current_usage

    def test_execute_query_volume(self):
        """Test that the volume endpoint functions."""
        url = reverse('reports-openshift-volume')
        client = APIClient()
        params = {
            'filter[resolution]': 'monthly',
            'filter[time_scope_value]': '-1',
            'filter[time_scope_units]': 'month',
        }
        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, 200)

        data = response.json()
        values = data.get('data')[0].get('values')[0]
        self.assertTrue('usage' in values)
        self.assertTrue('request' in values)
        self.assertTrue('cost' in values)
        self.assertEqual(values.get('usage', {}).get('units'), 'GB-Mo')

    def test_execute_query_default_pagination(self):
        """Test that the default pagination works."""
        url = reverse('reports-openshift-volume')
        client = APIClient()
        params = {
            'filter[resolution]': 'monthly',
            'filter[time_scope_value]': '-1',
            'filter[time_scope_units]': 'month',
        }
        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, 200)

        response_data = response.json()
        data = response_data.get('data', [])
        meta = response_data.get('meta', {})
        count = meta.get('count', 0)

        self.assertIn('total', meta)
        self.assertIn('filter', meta)
        self.assertIn('count', meta)

        self.assertEqual(len(data), count)

    def test_execute_query_limit_pagination(self):
        """Test that the default pagination works with a limit."""
        limit = 5
        start_date = self.dh.this_month_start.date().strftime('%Y-%m-%d')
        url = reverse('reports-openshift-cpu')
        client = APIClient()
        params = {
            'filter[resolution]': 'daily',
            'filter[time_scope_value]': '-1',
            'filter[time_scope_units]': 'month',
            'limit': limit
        }
        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, 200)

        response_data = response.json()
        data = response_data.get('data', [])
        meta = response_data.get('meta', {})
        count = meta.get('count', 0)

        self.assertIn('total', meta)
        self.assertIn('filter', meta)
        self.assertIn('count', meta)

        self.assertNotEqual(len(data), count)
        if limit > count:
            self.assertEqual(len(data), count)
        else:
            self.assertEqual(len(data), limit)
        self.assertEqual(data[0].get('date'), start_date)

    def test_execute_query_limit_offset_pagination(self):
        """Test that the default pagination works with an offset."""
        limit = 5
        offset = 5
        start_date = (self.dh.this_month_start + datetime.timedelta(days=5))\
            .date()\
            .strftime('%Y-%m-%d')
        url = reverse('reports-openshift-cpu')
        client = APIClient()
        params = {
            'filter[resolution]': 'daily',
            'filter[time_scope_value]': '-1',
            'filter[time_scope_units]': 'month',
            'limit': limit,
            'offset': offset
        }
        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, 200)

        response_data = response.json()
        data = response_data.get('data', [])
        meta = response_data.get('meta', {})
        count = meta.get('count', 0)

        self.assertIn('total', meta)
        self.assertIn('filter', meta)
        self.assertIn('count', meta)

        self.assertNotEqual(len(data), count)
        if limit + offset > count:
            self.assertEqual(len(data), max((count - offset), 0))
        else:
            self.assertEqual(len(data), limit)
        self.assertEqual(data[0].get('date'), start_date)

    def test_execute_query_filter_limit_offset_pagination(self):
        """Test that the ranked group pagination works."""
        limit = 1
        offset = 0

        url = reverse('reports-openshift-cpu')
        client = APIClient()
        params = {
            'filter[resolution]': 'monthly',
            'filter[time_scope_value]': '-1',
            'filter[time_scope_units]': 'month',
            'group_by[project]': '*',
            'filter[limit]': limit,
            'filter[offset]': offset
        }
        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, 200)

        response_data = response.json()
        data = response_data.get('data', [])
        meta = response_data.get('meta', {})
        count = meta.get('count', 0)

        self.assertIn('total', meta)
        self.assertIn('filter', meta)
        self.assertIn('count', meta)

        for entry in data:
            projects = entry.get('projects', [])
            if limit + offset > count:
                self.assertEqual(len(projects), max((count - offset), 0))
            else:
                self.assertEqual(len(projects), limit)

    def test_execute_query_filter_limit_high_offset_pagination(self):
        """Test that the default pagination works."""
        limit = 1
        offset = 10

        url = reverse('reports-openshift-cpu')
        client = APIClient()
        params = {
            'filter[resolution]': 'monthly',
            'filter[time_scope_value]': '-1',
            'filter[time_scope_units]': 'month',
            'group_by[project]': '*',
            'filter[limit]': limit,
            'filter[offset]': offset
        }
        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, 200)

        response_data = response.json()
        data = response_data.get('data', [])
        meta = response_data.get('meta', {})
        count = meta.get('count', 0)

        self.assertIn('total', meta)
        self.assertIn('filter', meta)
        self.assertIn('count', meta)

        for entry in data:
            projects = entry.get('projects', [])
            if limit + offset > count:
                self.assertEqual(len(projects), max((count - offset), 0))
            else:
                self.assertEqual(len(projects), limit)
Beispiel #29
0
class OCPTagsViewTest(IamTestCase):
    """Tests the report view."""
    @classmethod
    def setUpClass(cls):
        """Set up the test class."""
        super().setUpClass()
        cls.dh = DateHelper()
        cls.ten_days_ago = cls.dh.n_days_ago(cls.dh._now, 9)

    def setUp(self):
        """Set up the customer view tests."""
        super().setUp()
        _, self.provider = create_generic_provider(Provider.PROVIDER_OCP,
                                                   self.headers)
        self.data_generator = OCPReportDataGenerator(self.tenant,
                                                     self.provider)
        self.data_generator.add_data_to_tenant()

    def _calculate_expected_range(self, time_scope_value, time_scope_units):
        today = self.dh.today

        if time_scope_value == '-1' and time_scope_units == 'month':
            start_range = today.replace(day=1).date()
        elif time_scope_value == '-2' and time_scope_units == 'month':
            start_range = (today -
                           relativedelta(months=1)).replace(day=1).date()
        elif time_scope_value == '-10' and time_scope_units == 'day':
            start_range = (today - relativedelta(days=10)).date()
        elif time_scope_value == '-30' and time_scope_units == 'day':
            start_range = (today - relativedelta(days=30)).date()

        end_range = today.replace(
            day=calendar.monthrange(today.year, today.month)[1]).date()

        return start_range, end_range

    def test_execute_ocp_tags_queries_keys_only(self):
        """Test that tag key data is for the correct time queries."""
        test_cases = [{
            'value': '-1',
            'unit': 'month',
            'resolution': 'monthly'
        }, {
            'value': '-2',
            'unit': 'month',
            'resolution': 'monthly'
        }, {
            'value': '-10',
            'unit': 'day',
            'resolution': 'daily'
        }, {
            'value': '-30',
            'unit': 'day',
            'resolution': 'daily'
        }]

        for case in test_cases:
            url = reverse('openshift-tags')
            client = APIClient()
            params = {
                'filter[resolution]': case.get('resolution'),
                'filter[time_scope_value]': case.get('value'),
                'filter[time_scope_units]': case.get('unit'),
                'key_only': True
            }
            url = url + '?' + urlencode(params, quote_via=quote_plus)
            response = client.get(url, **self.headers)

            self.assertEqual(response.status_code, status.HTTP_200_OK)
            data = response.json()
            start_range, end_range = self._calculate_expected_range(
                case.get('value'), case.get('unit'))

            for label in data.get('data'):
                label_date = datetime.datetime.strptime(
                    label.split('*')[0], '%m-%d-%Y')
                self.assertGreaterEqual(label_date.date(), start_range)
                self.assertLessEqual(label_date.date(), end_range)

            self.assertTrue(data.get('data'))
            self.assertTrue(isinstance(data.get('data'), list))

    def test_execute_ocp_tags_queries(self):
        """Test that tag data is for the correct time queries."""
        test_cases = [{
            'value': '-1',
            'unit': 'month',
            'resolution': 'monthly'
        }, {
            'value': '-2',
            'unit': 'month',
            'resolution': 'monthly'
        }, {
            'value': '-10',
            'unit': 'day',
            'resolution': 'daily'
        }, {
            'value': '-30',
            'unit': 'day',
            'resolution': 'daily'
        }]

        for case in test_cases:
            url = reverse('openshift-tags')
            client = APIClient()
            params = {
                'filter[resolution]': case.get('resolution'),
                'filter[time_scope_value]': case.get('value'),
                'filter[time_scope_units]': case.get('unit'),
                'key_only': False
            }
            url = url + '?' + urlencode(params, quote_via=quote_plus)
            response = client.get(url, **self.headers)
            self.assertEqual(response.status_code, status.HTTP_200_OK)
            data = response.json()
            start_range, end_range = self._calculate_expected_range(
                case.get('value'), case.get('unit'))

            for tag in data.get('data'):
                label = tag.get('key')
                label_date = datetime.datetime.strptime(
                    label.split('*')[0], '%m-%d-%Y')
                self.assertGreaterEqual(label_date.date(), start_range)
                self.assertLessEqual(label_date.date(), end_range)
                self.assertIsNotNone(tag.get('values'))

            self.assertTrue(data.get('data'))
            self.assertTrue(isinstance(data.get('data'), list))

    def test_execute_ocp_tags_type_queries(self):
        """Test that tag data is for the correct type queries."""
        test_cases = [{
            'value': '-1',
            'unit': 'month',
            'resolution': 'monthly',
            'type': 'pod'
        }, {
            'value': '-2',
            'unit': 'month',
            'resolution': 'monthly',
            'type': 'pod'
        }, {
            'value': '-10',
            'unit': 'day',
            'resolution': 'daily',
            'type': 'pod'
        }, {
            'value': '-30',
            'unit': 'day',
            'resolution': 'daily',
            'type': 'storage'
        }]

        for case in test_cases:
            url = reverse('openshift-tags')
            client = APIClient()
            params = {
                'filter[resolution]': case.get('resolution'),
                'filter[time_scope_value]': case.get('value'),
                'filter[time_scope_units]': case.get('unit'),
                'key_only': False,
                'filter[type]': case.get('type')
            }
            url = url + '?' + urlencode(params, quote_via=quote_plus)
            response = client.get(url, **self.headers)
            self.assertEqual(response.status_code, status.HTTP_200_OK)
            data = response.json()
            start_range, end_range = self._calculate_expected_range(
                case.get('value'), case.get('unit'))

            for tag in data.get('data'):
                label = tag.get('key')
                label_date = datetime.datetime.strptime(
                    label.split('*')[0], '%m-%d-%Y')
                self.assertGreaterEqual(label_date.date(), start_range)
                self.assertLessEqual(label_date.date(), end_range)
                self.assertIsNotNone(tag.get('values'))

            self.assertTrue(data.get('data'))
            self.assertTrue(isinstance(data.get('data'), list))

    def test_execute_query_with_and_filter(self):
        """Test the filter[and:] param in the view."""
        url = reverse('openshift-tags')
        client = APIClient()

        with tenant_context(self.tenant):
            projects = OCPUsageLineItemDailySummary.objects\
                .filter(usage_start__gte=self.ten_days_ago)\
                .values('namespace').distinct()
            projects = [project.get('namespace') for project in projects]
        params = {
            'filter[resolution]': 'daily',
            'filter[time_scope_value]': '-10',
            'filter[time_scope_units]': 'day',
            'filter[and:project]': projects
        }
        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, status.HTTP_200_OK)

        response_data = response.json()
        self.assertEqual(response_data.get('data', []), [])
Beispiel #30
0
class OCPAzureReportDataGenerator(object):
    """Populate the database with OCP on Azure report data."""

    def __init__(self, tenant, provider, current_month_only=False, config=None):
        """Set up the class."""
        # prevent future whammy:
        assert isinstance(tenant, Tenant), 'not a Tenant type'
        assert isinstance(provider, Provider), 'not a Provider type'
        assert isinstance(current_month_only, bool), 'not a bool type'
        if config:
            assert isinstance(config, FakeAzureConfig), 'not a FakeAzureConfig type'

        self.tenant = tenant
        self.provider = provider
        self.current_month_only = current_month_only
        self.config = config if config else FakeAzureConfig()
        self.fake = Faker()
        self.dh = DateHelper()
        self.provider_uuid = provider.uuid
        self.ocp_generator = None

        # generate a list of dicts with unique keys.
        self.period_ranges, self.report_ranges = self.report_period_and_range()

    def report_period_and_range(self):
        """Return the report period and range."""
        period = []
        ranges = []
        if self.current_month_only:
            report_days = 10
            diff_from_first = self.dh.today - self.dh.this_month_start
            if diff_from_first.days < 10:
                report_days = 1 + diff_from_first.days
                period = [(self.dh.this_month_start, self.dh.this_month_end)]
                ranges = [
                    list(
                        self.dh.this_month_start + relativedelta(days=i)
                        for i in range(report_days)
                    )
                ]
            else:
                period = [(self.dh.this_month_start, self.dh.this_month_end)]
                ranges = [
                    list(
                        self.dh.today - relativedelta(days=i)
                        for i in range(report_days)
                    )
                ]

        else:
            period = [
                (self.dh.last_month_start, self.dh.last_month_end),
                (self.dh.this_month_start, self.dh.this_month_end),
            ]

            one_month_ago = self.dh.today - relativedelta(months=1)
            diff_from_first = self.dh.today - self.dh.this_month_start
            if diff_from_first.days < 10:
                report_days = 1 + diff_from_first.days
                ranges = [
                    list(
                        self.dh.last_month_start + relativedelta(days=i)
                        for i in range(report_days)
                    ),
                    list(
                        self.dh.this_month_start + relativedelta(days=i)
                        for i in range(report_days)
                    ),
                ]
            else:
                ranges = [
                    list(one_month_ago - relativedelta(days=i) for i in range(10)),
                    list(self.dh.today - relativedelta(days=i) for i in range(10)),
                ]
        return (period, ranges)

    def remove_data_from_tenant(self):
        """Remove the added data."""
        if self.ocp_generator:
            self.ocp_generator.remove_data_from_tenant()
        with tenant_context(self.tenant):
            for table in (
                OCPAzureCostLineItemDailySummary,
                OCPAzureCostLineItemProjectDailySummary,
            ):
                table.objects.all().delete()

    def add_ocp_data_to_tenant(self):
        """Populate tenant with OCP data."""
        assert self.cluster_id, 'method must be called after add_data_to_tenant'
        self.ocp_generator = OCPReportDataGenerator(self.tenant, self.provider, self.current_month_only)
        ocp_config = {
            'cluster_id': self.cluster_id,
            'cluster_alias': self.cluster_alias,
            'namespaces': self.namespaces,
            'nodes': self.nodes,
        }
        self.ocp_generator.add_data_to_tenant(**ocp_config)

    def add_data_to_tenant(self, fixed_fields=None, service_name=None):
        """Populate tenant with data."""
        words = list(set([self.fake.word() for _ in range(10)]))

        self.cluster_id = random.choice(words)
        self.cluster_alias = random.choice(words)
        self.namespaces = random.sample(words, k=2)
        self.nodes = random.sample(words, k=2)

        self.ocp_azure_summary_line_items = [
            {
                'namespace': random.choice(self.namespaces),
                'pod': random.choice(words),
                'node': node,
                'resource_id': self.fake.ean8(),
            }
            for node in self.nodes
        ]
        with tenant_context(self.tenant):
            for i, period in enumerate(self.period_ranges):
                for report_date in self.report_ranges[i]:
                    for row in self.ocp_azure_summary_line_items:
                        self._randomize_line_item(retained_fields=fixed_fields)
                        if service_name:
                            self.config.service_name = service_name
                        li = self._populate_ocp_azure_cost_line_item_daily_summary(row, report_date)
                        self._populate_ocp_azure_cost_line_item_project_daily_summary(
                            li, row, report_date
                        )

    def create_ocp_provider(self, cluster_id, cluster_alias):
        """Create OCP test provider."""
        auth = baker.make(
            ProviderAuthentication,
            provider_resource_name=cluster_id,
        )
        bill = baker.make(
            ProviderBillingSource,
            bucket='',
        )
        provider_uuid = uuid4()
        provider_data = {
            'uuid': provider_uuid,
            'name': cluster_alias,
            'authentication': auth,
            'billing_source': bill,
            'customer': None,
            'created_by': None,
            'type': Provider.PROVIDER_OCP,
            'setup_complete': False
        }
        provider = Provider(**provider_data)
        provider.save()
        self.cluster_alias = cluster_alias
        self.provider_uuid = provider_uuid
        return provider

    def _randomize_line_item(self, retained_fields=None):
        """Update our FakeAzureConfig to generate a new line item."""
        DEFAULT_FIELDS = ['subscription_guid', 'resource_location', 'tags']
        if not retained_fields:
            retained_fields = DEFAULT_FIELDS

        config_dict = {}
        for field in retained_fields:
            if field in self.config:
                config_dict[field] = getattr(self.config, field)
        self.config = FakeAzureConfig(**config_dict)

    def _populate_ocp_azure_cost_line_item_daily_summary(self, row, report_date):
        """Create OCP hourly usage line items."""
        if report_date:
            usage_dt = report_date
        else:
            usage_dt = self.fake.date_time_between_dates(
                self.dh.this_month_start, self.dh.today
            )
        usage_qty = random.random() * random.randrange(0, 100)
        pretax = usage_qty * self.config.meter_rate

        data = {
            # OCP Fields:
            'cluster_id': self.cluster_id,
            'cluster_alias': self.cluster_alias,
            'namespace': [row.get('namespace')],
            'pod': [row.get('pod')],
            'node': row.get('node'),
            'resource_id': row.get('resource_id'),
            'usage_start': usage_dt,
            'usage_end': usage_dt,
            # Azure Fields:
            'cost_entry_bill': baker.make(AzureCostEntryBill),
            'subscription_guid': self.config.subscription_guid,
            'instance_type': self.config.instance_type,
            'service_name': self.config.service_name,
            'resource_location': self.config.resource_location,
            'tags': self.select_tags(),
            'usage_quantity': usage_qty,
            'pretax_cost': pretax,
            'markup_cost': pretax * 0.1,
            'offer_id': random.choice([None, self.fake.pyint()]),
            'currency': 'USD',
            'unit_of_measure': 'some units',
            'shared_projects': 1,
            'project_costs': pretax,
        }

        line_item = OCPAzureCostLineItemDailySummary(**data)
        line_item.save()
        return line_item

    def _populate_ocp_azure_cost_line_item_project_daily_summary(self, li, row, report_date):
        """Create OCP hourly usage line items."""
        data = {
            # OCP Fields:
            'cluster_id': li.cluster_id,
            'cluster_alias': li.cluster_alias,
            'namespace': [row.get('namespace')],
            'pod': [row.get('pod')],
            'node': row.get('node'),
            'resource_id': row.get('resource_id'),
            'usage_start': li.usage_start,
            'usage_end': li.usage_end,
            # Azure Fields:
            'cost_entry_bill': li.cost_entry_bill,
            'subscription_guid': li.subscription_guid,
            'instance_type': li.instance_type,
            'service_name': li.service_name,
            'resource_location': li.resource_location,
            'usage_quantity': li.usage_quantity,
            'unit_of_measure': 'some units',
            'offer_id': li.offer_id,
            'currency': 'USD',
            'pretax_cost': li.pretax_cost,
            'project_markup_cost': li.markup_cost,
            'pod_cost': random.random() * li.pretax_cost,
        }

        line_item = OCPAzureCostLineItemProjectDailySummary(**data)
        line_item.save()

    def select_tags(self):
        """Return a random selection of the defined tags."""
        return {
            key: self.config.tags[key]
            for key in random.choices(
                list(self.config.tags.keys()),
                k=random.randrange(2, len(self.config.tags.keys())),
            )
        }