class OCPAWSQueryHandlerTestNoData(IamTestCase):
    """Tests for the OCP report query handler with no data."""

    def setUp(self):
        """Set up the customer view tests."""
        super().setUp()
        self.dh = DateHelper()

        self.this_month_filter = {"usage_start__gte": self.dh.this_month_start}
        self.ten_day_filter = {"usage_start__gte": self.dh.n_days_ago(self.dh.today, 9)}
        self.thirty_day_filter = {"usage_start__gte": self.dh.n_days_ago(self.dh.today, 29)}
        self.last_month_filter = {
            "usage_start__gte": self.dh.last_month_start,
            "usage_end__lte": self.dh.last_month_end,
        }

    def test_execute_sum_query_instance_types(self):
        """Test that the sum query runs properly for instance-types."""
        url = "?"
        query_params = self.mocked_query_params(url, OCPAzureInstanceTypeView)
        handler = OCPAzureReportQueryHandler(query_params)
        query_output = handler.execute_query()
        self.assertIsNotNone(query_output.get("data"))
        self.assertIsNotNone(query_output.get("total"))
        total = query_output.get("total")
        keys_units = {"cost": "USD", "markup_cost": "USD", "usage": "Instance Type Placeholder", "count": "instances"}
        for key, unit in keys_units.items():
            self.assertIsNotNone(total.get(key))
            self.assertIsInstance(total.get(key), dict)
            self.assertEqual(total.get(key).get("value"), 0)
            self.assertEqual(total.get(key).get("units"), unit)
Esempio n. 2
0
    def test_predict_response_date(self):
        """Test that predict() returns expected date range."""
        dh = DateHelper()

        expected = []
        for n in range(0, 10):
            expected.append({
                "usage_start":
                dh.n_days_ago(dh.today, 10 - n).date(),
                "total_cost":
                5,
                "infrastructure_cost":
                3,
                "supplementary_cost":
                2,
            })
        mock_qset = MockQuerySet(expected)

        mocked_table = Mock()
        mocked_table.objects.filter.return_value.order_by.return_value.values.return_value.annotate.return_value = (  # noqa: E501
            mock_qset)
        mocked_table.len = mock_qset.len

        params = self.mocked_query_params("?", AWSCostForecastView)
        instance = AWSForecast(params)

        instance.cost_summary_table = mocked_table

        results = instance.predict()

        for item in results:
            self.assertIsInstance(item.get("date"), date)
            self.assertLessEqual(item.get("date"), dh.this_month_end.date())
Esempio n. 3
0
    def test_parse_filter_dates_invalid(self):
        """Test parse of invalid data for filter date-based param should not succeed."""
        dh = DateHelper()
        scenarios = [
            {"start_date": dh.today.date()},
            {"end_date": dh.today.date()},
            {"start_date": dh.yesterday.date(), "end_date": dh.tomorrow.date()},
            {"start_date": dh.n_days_ago(materialized_view_month_start(dh), 1).date(), "end_date": dh.today.date()},
            {"start_date": "llamas", "end_date": dh.yesterday.date()},
            {"start_date": dh.yesterday.date(), "end_date": "alpacas"},
            {"start_date": "llamas", "end_date": "alpacas"},
            {
                "start_date": dh.last_month_start.date(),
                "end_date": dh.last_month_end.date(),
                "filter": {"time_scope_units": "day"},
            },
            {
                "start_date": dh.last_month_start.date(),
                "end_date": dh.last_month_end.date(),
                "filter": {"time_scope_value": "-1"},
            },
            {
                "start_date": dh.last_month_start.date(),
                "end_date": dh.last_month_end.date(),
                "filter": {"time_scope_units": "day", "time_scope_value": "-1"},
            },
        ]

        for params in scenarios:
            with self.subTest(params=params):
                serializer = OrgQueryParamSerializer(data=params)
                self.assertFalse(serializer.is_valid())
Esempio n. 4
0
    def test_predict_flat(self):
        """Test that predict() returns expected values for flat costs."""
        dh = DateHelper()

        expected = []
        for n in range(0, 10):
            expected.append({
                "usage_start":
                dh.n_days_ago(dh.today, 10 - n).date(),
                "total_cost":
                5,
                "infrastructure_cost":
                3,
                "supplementary_cost":
                2,
            })
        mock_qset = MockQuerySet(expected)

        mocked_table = Mock()
        mocked_table.objects.filter.return_value.order_by.return_value.values.return_value.annotate.return_value = (  # noqa: E501
            mock_qset)

        mocked_table.len = mock_qset.len

        params = self.mocked_query_params("?", OCPAzureCostForecastView)
        instance = OCPAzureForecast(params)

        instance.cost_summary_table = mocked_table

        results = instance.predict()

        for result in results:
            for val in result.get("values", []):
                self.assertIsInstance(val.get("date"), date)

                for item, cost in [
                    (val.get("cost"), 5),
                    (val.get("infrastructure"), 3),
                    (val.get("supplementary"), 2),
                ]:
                    self.assertAlmostEqual(float(
                        item.get("total").get("value")),
                                           cost,
                                           delta=0.0001)
                    self.assertAlmostEqual(float(
                        item.get("confidence_max").get("value")),
                                           cost,
                                           delta=0.0001)
                    self.assertAlmostEqual(float(
                        item.get("confidence_min").get("value")),
                                           cost,
                                           delta=0.0001)
                    self.assertAlmostEqual(float(
                        item.get("rsquared").get("value")),
                                           1,
                                           delta=0.0001)
                    self.assertGreaterEqual(
                        float(item.get("pvalues").get("value")), 0)
Esempio n. 5
0
    def test_predict_few_values(self):
        """Test that predict() behaves well with a limited data set."""
        dh = DateHelper()

        num_elements = [AWSForecast.MINIMUM - 1, AWSForecast.MINIMUM, AWSForecast.MINIMUM + 1]

        for number in num_elements:
            with self.subTest(num_elements=number):
                expected = []
                for n in range(0, number):
                    # the test data needs to include some jitter to avoid
                    # division-by-zero in the underlying dot-product maths.
                    expected.append(
                        {
                            "usage_start": dh.n_days_ago(dh.today, 10 - n).date(),
                            "total_cost": 5 + (0.01 * n),
                            "infrastructure_cost": 3 + (0.01 * n),
                            "supplementary_cost": 2 + (0.01 * n),
                        }
                    )
                mock_qset = MockQuerySet(expected)

                mocked_table = Mock()
                mocked_table.objects.filter.return_value.order_by.return_value.values.return_value.annotate.return_value = (  # noqa: E501
                    mock_qset
                )
                mocked_table.len = mock_qset.len

                params = self.mocked_query_params("?", AWSCostForecastView)
                instance = AWSForecast(params)

                instance.cost_summary_table = mocked_table
                if number < AWSForecast.MINIMUM:
                    # forecasting isn't useful with less than the minimum number of data points.
                    with self.assertLogs(logger="forecast.forecast", level=logging.WARNING):
                        results = instance.predict()
                        self.assertEqual(results, [])
                else:
                    results = instance.predict()

                    self.assertNotEqual(results, [])

                    for result in results:
                        for val in result.get("values", []):
                            self.assertIsInstance(val.get("date"), date)

                            item = val.get("cost")
                            self.assertGreaterEqual(float(item.get("total").get("value")), 0)
                            self.assertGreaterEqual(float(item.get("confidence_max").get("value")), 0)
                            self.assertGreaterEqual(float(item.get("confidence_min").get("value")), 0)
                            self.assertGreaterEqual(float(item.get("rsquared").get("value")), 0)
                            for pval in item.get("pvalues").get("value"):
                                self.assertGreaterEqual(float(pval), 0)
                    # test that the results always stop at the end of the month.
                    self.assertEqual(results[-1].get("date"), dh.this_month_end.date())
class OCPAWSQueryHandlerTestNoData(IamTestCase):
    """Tests for the OCP report query handler with no data."""
    def setUp(self):
        """Set up the customer view tests."""
        super().setUp()
        self.dh = DateHelper()

        self.this_month_filter = {'usage_start__gte': self.dh.this_month_start}
        self.ten_day_filter = {
            'usage_start__gte': self.dh.n_days_ago(self.dh.today, 9)
        }
        self.thirty_day_filter = {
            'usage_start__gte': self.dh.n_days_ago(self.dh.today, 29)
        }
        self.last_month_filter = {
            'usage_start__gte': self.dh.last_month_start,
            'usage_end__lte': self.dh.last_month_end
        }

    def test_execute_sum_query_instance_types(self):
        """Test that the sum query runs properly for instance-types."""
        # '?'
        query_params = FakeQueryParameters({},
                                           report_type='instance_type',
                                           tenant=self.tenant)
        handler = OCPAWSReportQueryHandler(query_params.mock_qp)
        query_output = handler.execute_query()
        self.assertIsNotNone(query_output.get('data'))
        self.assertIsNotNone(query_output.get('total'))
        total = query_output.get('total')
        self.assertIsNotNone(total.get('cost'))
        self.assertIsInstance(total.get('cost'), dict)
        self.assertEqual(total.get('cost').get('value'), 0)
        self.assertEqual(total.get('cost').get('units'), 'USD')
        self.assertIsNotNone(total.get('usage'))
        self.assertIsInstance(total.get('usage'), dict)
        self.assertEqual(total.get('usage').get('value'), 0)
        self.assertEqual(total.get('usage').get('units'), 'Hrs')
        self.assertIsNotNone(total.get('count'))
        self.assertIsInstance(total.get('count'), dict)
        self.assertEqual(total.get('count').get('value'), 0)
        self.assertEqual(total.get('count').get('units'), 'instances')
Esempio n. 7
0
class DataLoader(ABC):
    """Loads nise generated test data for different source types."""
    def __init__(self, schema, customer, num_days=40):
        """Initialize the data loader."""
        self.dh = DateHelper()
        self.schema = schema
        self.customer = customer
        self.dates = self.get_test_data_dates(num_days)
        self.first_start_date = self.dates[0][0]
        self.last_end_date = self.dates[1][1]

    def get_test_data_dates(self, num_days):
        """Return a list of tuples with dates for nise data."""
        end_date = self.dh.today
        if end_date.day == 1:
            end_date += relativedelta(days=1)
        n_days_ago = self.dh.n_days_ago(end_date, num_days)
        start_date = n_days_ago
        if self.dh.this_month_start > n_days_ago:
            start_date = self.dh.this_month_start

        prev_month_start = start_date - relativedelta(months=1)
        prev_month_end = end_date - relativedelta(months=1)
        days_of_data = prev_month_end.day - prev_month_start.day

        if days_of_data < num_days:
            extra_days = num_days - days_of_data
            prev_month_end = prev_month_end + relativedelta(days=extra_days)
            prev_month_end = min(prev_month_end, self.dh.last_month_end)
        return [
            (prev_month_start, prev_month_end, self.dh.last_month_start),
            (start_date, end_date, self.dh.this_month_start),
        ]

    @abstractmethod
    def load_openshift_data(self):
        """Load OpenShift test data"""
        pass

    @abstractmethod
    def load_aws_data(self):
        """Load AWS test data"""
        pass

    @abstractmethod
    def load_azure_data(self):
        """Load Azure test data"""
        pass

    @abstractmethod
    def load_gcp_data(self):
        """Load GCP test data"""
        pass
Esempio n. 8
0
class GCPTagsViewTest(IamTestCase):
    """Tests the report view."""
    def setUp(self):
        """Set up the customer view tests."""
        super().setUp()
        self.dh = DateHelper()
        self.ten_days_ago = self.dh.n_days_ago(self.dh.today, 9)

    def test_execute_tags_queries_keys_only(self):
        """Test that tag key data is for the correct time queries."""
        test_cases = [
            {
                "value": "-1",
                "unit": "month",
                "resolution": "monthly"
            },
            {
                "value": "-2",
                "unit": "month",
                "resolution": "monthly"
            },
            {
                "value": "-10",
                "unit": "day",
                "resolution": "daily"
            },
            {
                "value": "-30",
                "unit": "day",
                "resolution": "daily"
            },
        ]

        for case in test_cases:
            url = reverse("gcp-tags")
            client = APIClient()
            params = {
                "filter[resolution]": case.get("resolution"),
                "filter[time_scope_value]": case.get("value"),
                "filter[time_scope_units]": case.get("unit"),
                "key_only": True,
            }
            url = url + "?" + urlencode(params, quote_via=quote_plus)
            response = client.get(url, **self.headers)

            self.assertEqual(response.status_code, status.HTTP_200_OK)
            data = response.json().get("data")

            self.assertTrue(data)
            self.assertTrue(isinstance(data, list))
            for tag in data:
                self.assertTrue(isinstance(tag, str))
Esempio n. 9
0
    def _get_timeframe(self):
        """Obtain timeframe start and end dates.

        Returns:
            (DateTime): start datetime for query filter
            (DateTime): end datetime for query filter

        """
        self.get_resolution()
        time_scope_value = self.get_time_scope_value()
        time_scope_units = self.get_time_scope_units()
        start = None
        end = None
        dh = DateHelper()
        if time_scope_units == 'month':
            if time_scope_value == -1:
                # get current month
                start = dh.this_month_start
                end = dh.today
            else:
                # get previous month
                start = dh.last_month_start
                end = dh.last_month_end
        else:
            if time_scope_value == -10:
                # get last 10 days
                start = dh.n_days_ago(dh.this_hour, 9)
                end = dh.this_hour
            else:
                # get last 30 days
                start = dh.n_days_ago(dh.this_hour, 29)
                end = dh.this_hour

        self.start_datetime = start
        self.end_datetime = end
        self._create_time_interval()
        return (self.start_datetime, self.end_datetime, self.time_interval)
Esempio n. 10
0
    def test_predict_increasing(self):
        """Test that predict() returns expected values for increasing costs."""
        dh = DateHelper()

        expected = []
        for n in range(0, 10):
            # the test data needs to include some jitter to avoid
            # division-by-zero in the underlying dot-product maths.
            expected.append({
                "usage_start":
                dh.n_days_ago(dh.today, 10 - n).date(),
                "total_cost":
                5 + random.random(),
                "infrastructure_cost":
                3 + random.random(),
                "supplementary_cost":
                2 + random.random(),
            })
        mock_qset = MockQuerySet(expected)

        mocked_table = Mock()
        mocked_table.objects.filter.return_value.order_by.return_value.values.return_value.annotate.return_value = (  # noqa: E501
            mock_qset)
        mocked_table.len = mock_qset.len

        params = self.mocked_query_params("?", AWSCostForecastView)
        instance = AWSForecast(params)

        instance.cost_summary_table = mocked_table

        results = instance.predict()

        for result in results:
            for val in result.get("values", []):
                self.assertIsInstance(val.get("date"), date)

                item = val.get("cost")
                self.assertGreaterEqual(float(item.get("total").get("value")),
                                        0)
                self.assertGreaterEqual(
                    float(item.get("confidence_max").get("value")), 0)
                self.assertGreaterEqual(
                    float(item.get("confidence_min").get("value")), 0)
                self.assertGreaterEqual(
                    float(item.get("rsquared").get("value")), 0)
                for pval in item.get("pvalues").get("value"):
                    self.assertGreaterEqual(float(pval), 0)
Esempio n. 11
0
    def _get_timeframe(self):
        """Obtain timeframe start and end dates.

        Returns:
            (DateTime): start datetime for query filter
            (DateTime): end datetime for query filter

        """
        dh = DateHelper()
        self.end_datetime = dh.today
        if self.time_scope_units == 'month':
            self.start_datetime = dh.n_months_ago(dh.next_month_start,
                                                  abs(self.time_scope_value))
        else:
            self.start_datetime = dh.n_days_ago(dh.today,
                                                abs(self.time_scope_value))

        self._create_time_interval()
        return (self.start_datetime, self.end_datetime, self.time_interval)
Esempio n. 12
0
class QueryHandler:
    """Handles report queries and responses."""
    def __init__(self, parameters):
        """Establish query handler.

        Args:
            parameters    (QueryParameters): parameter object for query

        """
        LOG.debug(f"Query Params: {parameters}")
        self.dh = DateHelper()
        parameters = self.filter_to_order_by(parameters)
        self.tenant = parameters.tenant
        self.access = parameters.access
        self.parameters = parameters
        self.default_ordering = self._mapper._report_type_map.get(
            "default_ordering")
        self.time_interval = []
        self._max_rank = 0

        self.time_scope_units = self.parameters.get_filter("time_scope_units")
        if self.parameters.get_filter("time_scope_value"):
            self.time_scope_value = int(
                self.parameters.get_filter("time_scope_value"))
        # self.time_order = parameters["date"]

        # self.start_datetime = parameters["start_date"]
        # self.end_datetime = parameters["end_date"]
        for param, attr in [("start_date", "start_datetime"),
                            ("end_date", "end_datetime")]:
            p = self.parameters.get(param)
            if p:
                setattr(
                    self, attr,
                    datetime.datetime.combine(parser.parse(p).date(),
                                              self.dh.midnight,
                                              tzinfo=UTC))
            else:
                setattr(self, attr, None)

        if self.resolution == "monthly":
            self.date_to_string = lambda dt: dt.strftime("%Y-%m")
            self.string_to_date = lambda dt: datetime.datetime.strptime(
                dt, "%Y-%m").date()
            self.date_trunc = TruncMonthString
            self.gen_time_interval = DateHelper().list_months
        else:
            self.date_to_string = lambda dt: dt.strftime("%Y-%m-%d")
            self.string_to_date = lambda dt: datetime.datetime.strptime(
                dt, "%Y-%m-%d").date()
            self.date_trunc = TruncDayString
            self.gen_time_interval = DateHelper().list_days

        if not (self.start_datetime or self.end_datetime):
            self._get_timeframe()

        self._create_time_interval()

    # FIXME: move this to a standalone utility function.
    @staticmethod
    def has_wildcard(in_list):
        """Check if list has wildcard.

        Args:
            in_list (List[String]): List of strings to check for wildcard
        Return:
            (Boolean): if wildcard is present in list

        """
        if isinstance(in_list, bool):
            return False
        if not in_list:
            return False
        return any(WILDCARD == item for item in in_list)

    @property
    def order(self):
        """Extract order_by parameter and apply ordering to the appropriate field.

        Returns:
            (String): Ordering value. Default is '-total'

        Example:
            `order_by[total]=asc` returns `total`
            `order_by[total]=desc` returns `-total`

        """
        order_map = {"asc": "", "desc": "-"}
        order = []
        order_by = self.parameters.get("order_by", self.default_ordering)

        for order_field, order_direction in order_by.items():
            if order_direction not in order_map and order_field == "date":
                # We've overloaded date to hold a specific date, not asc/desc
                order.append(order_direction)
            else:
                order.append(f"{order_map[order_direction]}{order_field}")

        return order

    @property
    def order_field(self):
        """Order-by field name.

        The default is 'total'
        """
        order_by = self.parameters.get("order_by", self.default_ordering)
        return list(order_by.keys()).pop()

    @property
    def order_direction(self):
        """Order-by orientation value.

        Returns:
            (str) 'asc' or 'desc'; default is 'desc'

        """
        order_by = self.parameters.get("order_by", self.default_ordering)
        return list(order_by.values()).pop()

    @property
    def max_rank(self):
        """Return the max rank of a ranked list."""
        return self._max_rank

    @max_rank.setter
    def max_rank(self, max_rank):
        """Max rank setter."""
        self._max_rank = max_rank

    @property
    def resolution(self):
        """Extract resolution or provide default.

        Returns:
            (String): The value of how data will be sliced.

        """
        return self.parameters.get_filter("resolution", default="daily")

    def check_query_params(self, key, in_key):
        """Test if query parameters has a given key and key within it.

        Args:
        key     (String): key to check in query parameters
        in_key  (String): key to check if key is found in query parameters

        Returns:
            (Boolean): True if they keys given appear in given query parameters.

        """
        return self.parameters and key in self.parameters and in_key in self.parameters.get(
            key)  # noqa: W504

    def get_time_scope_units(self):
        """Extract time scope units or provide default.

        Returns:
            (String): The value of how data will be sliced.

        """
        if self.time_scope_units:
            return self.time_scope_units

        time_scope_units = self.parameters.get_filter("time_scope_units",
                                                      default="day")
        self.time_scope_units = time_scope_units
        return self.time_scope_units

    def get_time_scope_value(self):
        """Extract time scope value or provide default.

        Returns:
            (Integer): time relative value providing query scope

        """
        if self.time_scope_value:
            return self.time_scope_value

        time_scope_value = self.parameters.get_filter("time_scope_value",
                                                      default=-10)
        self.time_scope_value = int(time_scope_value)
        return self.time_scope_value

    def _get_timeframe(self):
        """Obtain timeframe start and end dates.

        Returns:
            (DateTime): start datetime for query filter
            (DateTime): end datetime for query filter

        """
        time_scope_value = self.get_time_scope_value()
        time_scope_units = self.get_time_scope_units()
        start = None
        end = None
        if time_scope_units == "month":
            if time_scope_value == -1:
                # get current month
                start = self.dh.this_month_start
                end = self.dh.today
            elif time_scope_value == -3:
                start = self.dh.relative_month_start(-2)
                end = self.dh.month_end(start)
            else:
                # get previous month
                start = self.dh.last_month_start
                end = self.dh.last_month_end
        else:
            if time_scope_value == -10:
                # get last 10 days
                start = self.dh.n_days_ago(self.dh.this_hour, 9)
                end = self.dh.this_hour
            elif time_scope_value == -90:
                start = self.dh.n_days_ago(self.dh.this_hour, 89)
                end = self.dh.this_hour
            else:
                # get last 30 days
                start = self.dh.n_days_ago(self.dh.this_hour, 29)
                end = self.dh.this_hour

        self.start_datetime = start
        self.end_datetime = end
        return (self.start_datetime, self.end_datetime, self.time_interval)

    def _create_time_interval(self):
        """Create list of date times in interval.

        Returns:
            (List[DateTime]): List of all interval slices by resolution

        """
        self.time_interval = sorted(
            self.gen_time_interval(self.start_datetime, self.end_datetime))
        return self.time_interval

    def _get_date_delta(self):
        """Return a time delta."""
        if self.time_scope_value in [-1, -2, -3]:
            date_delta = relativedelta.relativedelta(
                months=abs(self.time_scope_value))
        elif self.time_scope_value in (-90, -30, -10):
            date_delta = datetime.timedelta(days=abs(self.time_scope_value))
        else:
            date_delta = datetime.timedelta(days=10)
        return date_delta

    def _get_time_based_filters(self, delta=False):
        if delta:
            date_delta = self._get_date_delta()
            start = self.start_datetime - date_delta
            end = self.end_datetime - date_delta
        else:
            start = self.start_datetime
            end = self.end_datetime

        start_filter = QueryFilter(field="usage_start",
                                   operation="gte",
                                   parameter=start.date())
        end_filter = QueryFilter(field="usage_start",
                                 operation="lte",
                                 parameter=end.date())
        return start_filter, end_filter

    def _get_filter(self, delta=False):
        """Create dictionary for filter parameters.

        Args:
            delta (Boolean): Construct timeframe for delta
        Returns:
            (Dict): query filter dictionary

        """
        filters = QueryFilterCollection()

        # add time constraint filters
        start_filter, end_filter = self._get_time_based_filters(delta)
        filters.add(query_filter=start_filter)
        filters.add(query_filter=end_filter)

        return filters

    def _get_gcp_filter(self, delta=False):
        """Create dictionary for filter parameters for GCP.

        For the gcp filters when the time scope is -1 or -2 we remove
        the usage_start & usage_end filters and only use the invoice month.

        Args:
            delta (Boolean): Construct timeframe for delta
        Returns:
            (Dict): query filter dictionary
        """
        filters = QueryFilterCollection()
        if delta:
            date_delta = self._get_date_delta()
            start = self.start_datetime - date_delta
            end = self.end_datetime - date_delta
        else:
            start = self.start_datetime
            end = self.end_datetime
        start_filter = QueryFilter(field="usage_start",
                                   operation="gte",
                                   parameter=start.date())
        end_filter = QueryFilter(field="usage_start",
                                 operation="lte",
                                 parameter=end.date())

        invoice_months = self.dh.gcp_find_invoice_months_in_date_range(
            start.date(), end.date())
        invoice_filter = QueryFilter(field="invoice_month",
                                     operation="in",
                                     parameter=invoice_months)
        filters.add(invoice_filter)
        if self.parameters.get_filter(
                "time_scope_value") and self.time_scope_value in [-1, -2]:
            # we don't add the time filters to time scopes -1 or -2 unless they are using delta.
            if delta:
                filters.add(query_filter=start_filter)
                filters.add(query_filter=end_filter)
        else:
            filters.add(query_filter=start_filter)
            filters.add(query_filter=end_filter)
        return filters

    def filter_to_order_by(self, parameters):  # noqa: C901
        """Remove group_by[NAME]=* and replace it with group_by[NAME]=X.

        The parameters object contains a list of filters and a list of group_bys.

        For example, if the parameters object contained the following:
        group_by[X] = Y
        group_by[Z] = *     # removes this line
        filter[Z] = L
        filter[X] = Y

        The returned parameters object would contain lists that look like this:

        group_by[X] = Y
        group_by[Z] = L     # adds this line
        filter[Z] = L
        filter[X] = Y

        Thereby removing the star when there is a filter provided.

        Args:
            parameters (QueryParameters): The parameters object

        Returns:
            parameters (QueryParameters): The parameters object

        """
        # find if there is a filter[key]=value that matches this group_by[key]=value
        for key, value in parameters.parameters.get("group_by", {}).items():
            if self.has_wildcard(value):
                filter_value = parameters.parameters.get("filter", {}).get(key)
                if filter_value:
                    parameters.parameters["group_by"][key] = filter_value
        return parameters

    def set_access_filters(self, access, filt, filters):
        """
        Sets the access filters to ensure RBAC restrictions given the users access,
        the current filter and the filter collection
        Args:
            access (list) the list containing the users relevant access
            filt (list or dict) contains the filters that need
            filters (QueryFilterCollection) the filter collection to add the new filters to
        returns:
            None
        """
        for _filt in filt if isinstance(filt, list) else [filt]:
            check_field_type = None
            try:
                if hasattr(self, "query_table"):
                    # Reports APIs
                    check_field_type = self.query_table._meta.get_field(
                        _filt.get("field", "")).get_internal_type()
                elif hasattr(self, "data_sources"):
                    # Tags APIs
                    check_field_type = (
                        self.data_sources[0].get("db_table")._meta.get_field(
                            _filt.get("field", "")).get_internal_type())
            except FieldDoesNotExist:
                pass

            _filt[
                "operation"] = "contains" if check_field_type == "ArrayField" else "in"
            q_filter = QueryFilter(parameter=access, **_filt)
            filters.add(q_filter)
Esempio n. 13
0
class DateHelperTest(TestCase):
    """Test the DateHelper."""
    def setUp(self):
        """Test setup."""
        self.date_helper = DateHelper()
        self.date_helper._now = datetime.datetime(1970, 1, 10, 12, 59, 59)

    def test_this_hour(self):
        """Test this_hour property."""
        expected = datetime.datetime(1970, 1, 10, 12, 0, 0, 0)
        self.assertEqual(self.date_helper.this_hour, expected)

    def test_next_hour(self):
        """Test next_hour property."""
        expected = datetime.datetime(1970, 1, 10, 13, 0, 0, 0)
        self.assertEqual(self.date_helper.next_hour, expected)

    def test_prev_hour(self):
        """Test previous_hour property."""
        expected = datetime.datetime(1970, 1, 10, 11, 0, 0, 0)
        self.assertEqual(self.date_helper.previous_hour, expected)

    def test_today(self):
        """Test today property."""
        expected = datetime.datetime(1970, 1, 10, 0, 0, 0, 0)
        self.assertEqual(self.date_helper.today, expected)

    def test_yesterday(self):
        """Test yesterday property."""
        date_helper = DateHelper()
        date_helper._now = datetime.datetime(1970, 1, 1, 12, 59, 59)
        expected = datetime.datetime(1969, 12, 31, 0, 0, 0, 0)
        self.assertEqual(date_helper.yesterday, expected)

    def test_tomorrow(self):
        """Test tomorrow property."""
        expected = datetime.datetime(1970, 1, 11, 0, 0, 0, 0)
        self.assertEqual(self.date_helper.tomorrow, expected)

    def test_this_month_start(self):
        """Test this_month_start property."""
        expected = datetime.datetime(1970, 1, 1, 0, 0, 0, 0)
        self.assertEqual(self.date_helper.this_month_start, expected)

    def test_this_month_end(self):
        """Test this_month_end property."""
        expected = datetime.datetime(1970, 1, 31, 0, 0, 0, 0)
        self.assertEqual(self.date_helper.this_month_end, expected)

    def test_next_month_start(self):
        """Test next_month_start property."""
        expected = datetime.datetime(1970, 2, 1, 0, 0, 0, 0)
        self.assertEqual(self.date_helper.next_month_start, expected)

    def test_next_month_end(self):
        """Test next_month_end property."""
        expected = datetime.datetime(1970, 2, 28, 0, 0, 0, 0)
        self.assertEqual(self.date_helper.next_month_end, expected)

    def test_last_month_start(self):
        """Test last_month_start property."""
        expected = datetime.datetime(1969, 12, 1, 0, 0, 0, 0)
        self.assertEqual(self.date_helper.last_month_start, expected)

    def test_last_month_end(self):
        """Test last_month_end property."""
        expected = datetime.datetime(1969, 12, 31, 0, 0, 0, 0)
        self.assertEqual(self.date_helper.last_month_end, expected)

    def test_next_month(self):
        """Test the next_month method."""
        current_month = datetime.datetime.now().replace(microsecond=0,
                                                        second=0,
                                                        minute=0,
                                                        hour=0,
                                                        day=1)
        last_month = current_month - relativedelta(months=1)
        self.assertEqual(current_month, DateHelper().next_month(last_month))

    def test_previous_month(self):
        """Test the previous_month method."""
        current_month = datetime.datetime.now().replace(microsecond=0,
                                                        second=0,
                                                        minute=0,
                                                        hour=0,
                                                        day=1)
        last_month = current_month - relativedelta(months=1)
        self.assertEqual(last_month,
                         DateHelper().previous_month(current_month))

    def test_list_days(self):
        """Test the list_days method."""
        first = datetime.datetime.now().replace(microsecond=0,
                                                second=0,
                                                minute=0,
                                                hour=0,
                                                day=1)
        second = first.replace(day=2)
        third = first.replace(day=3)
        expected = [first, second, third]
        self.assertEqual(self.date_helper.list_days(first, third), expected)

    def test_list_months(self):
        """Test the list_months method."""
        first = datetime.datetime(1970, 1, 1)
        second = datetime.datetime(1970, 2, 1)
        third = datetime.datetime(1970, 3, 1)
        expected = [first, second, third]
        self.assertEqual(self.date_helper.list_months(first, third), expected)

    def test_n_days_ago(self):
        """Test the n_days_ago method."""
        delta_day = datetime.timedelta(days=1)
        today = timezone.now().replace(microsecond=0,
                                       second=0,
                                       minute=0,
                                       hour=0)
        two_days_ago = (today - delta_day) - delta_day
        self.assertEqual(self.date_helper.n_days_ago(today, 2), two_days_ago)

    def test_month_start(self):
        """Test month start method."""
        today = self.date_helper.today
        expected = datetime.datetime(1970, 1, 1, 0, 0, 0, 0)
        self.assertEqual(self.date_helper.month_start(today), expected)

    def test_month_end(self):
        """Test month end method."""
        today = self.date_helper.today
        expected = datetime.datetime(1970, 1, 31, 0, 0, 0, 0)
        self.assertEqual(self.date_helper.month_end(today), expected)

        today_date = today.date()
        expected = datetime.date(1970, 1, 31)
        self.assertEqual(self.date_helper.month_end(today_date), expected)

    def test_midnight(self):
        """Test midnight property."""
        expected = datetime.time(0, 0, 0, 0)
        self.assertEqual(self.date_helper.midnight, expected)
Esempio n. 14
0
class NiseDataLoader:
    """Loads nise generated test data for different source types."""
    def __init__(self, schema, num_days=10):
        """Initialize the data loader."""
        self.dh = DateHelper()
        self.schema = schema
        self.nise_data_path = Config.TMP_DIR
        if not os.path.exists(self.nise_data_path):
            os.makedirs(self.nise_data_path)
        self.dates = self.get_test_data_dates(num_days)

    def get_test_data_dates(self, num_days):
        """Return a list of tuples with dates for nise data."""
        end_date = self.dh.today
        if end_date.day == 1:
            end_date += relativedelta(days=1)
        n_days_ago = self.dh.n_days_ago(end_date, num_days)
        start_date = n_days_ago
        if self.dh.this_month_start > n_days_ago:
            start_date = self.dh.this_month_start

        prev_month_start = start_date - relativedelta(months=1)
        prev_month_end = end_date - relativedelta(months=1)
        days_of_data = prev_month_end.day - prev_month_start.day

        if days_of_data < num_days:
            extra_days = num_days - days_of_data
            prev_month_end = prev_month_end + relativedelta(days=extra_days)

        return [
            (prev_month_start, prev_month_end, self.dh.last_month_start),
            (start_date, end_date, self.dh.this_month_start),
        ]

    def prepare_template(self, provider_type, static_data_file):
        """Prepare the Jinja template for static data."""
        static_data = pkgutil.get_data("api.report.test", static_data_file)
        template = Template(static_data.decode("utf8"))
        static_data_path = f"/tmp/{provider_type}_static_data.yml"
        return template, static_data_path

    def build_report_path(self, provider_type, bill_date, base_path):
        """Build a path to report files."""
        this_month_str = bill_date.strftime("%Y%m%d")
        next_month = bill_date + relativedelta(months=1)
        if provider_type in (Provider.PROVIDER_AZURE,
                             Provider.PROVIDER_AZURE_LOCAL):
            next_month = next_month - relativedelta(days=1)
        next_month_str = next_month.strftime("%Y%m%d")
        return f"{base_path}/{this_month_str}-{next_month_str}"

    def process_report(self, report, compression, provider_type, provider,
                       manifest):
        """Run the report processor on a report."""
        status = baker.make("CostUsageReportStatus",
                            manifest=manifest,
                            report_name=report)
        status.last_started_datetime = self.dh.now
        ReportProcessor(self.schema, report, compression, provider_type,
                        provider.uuid, manifest.id).process()
        status.last_completed_datetime = self.dh.now
        status.save()

    def load_openshift_data(self, customer, static_data_file, cluster_id):
        """Load OpenShift data into the database."""
        provider_type = Provider.PROVIDER_OCP
        credentials = {"cluster_id": cluster_id}
        with override_settings(AUTO_DATA_INGEST=False):
            ocp_billing_source, _ = ProviderBillingSource.objects.get_or_create(
                data_source={})
            provider = baker.make(
                "Provider",
                type=provider_type,
                authentication__credentials=credentials,
                billing_source=ocp_billing_source,
                customer=customer,
            )
        template, static_data_path = self.prepare_template(
            provider_type, static_data_file)
        options = {
            "static_report_file": static_data_path,
            "insights_upload": self.nise_data_path,
            "ocp_cluster_id": cluster_id,
        }
        base_path = f"{self.nise_data_path}/{cluster_id}"

        for start_date, end_date, bill_date in self.dates:
            manifest = baker.make(
                "CostUsageReportManifest",
                _fill_optional=True,
                provider=provider,
                billing_period_start_datetime=bill_date,
                num_total_files=3,
            )
            with open(static_data_path, "w") as f:
                f.write(
                    template.render(start_date=start_date, end_date=end_date))

            run(provider_type.lower(), options)

            report_path = self.build_report_path(provider_type, bill_date,
                                                 base_path)
            for report in os.scandir(report_path):
                shutil.move(report.path, f"{base_path}/{report.name}")
            for report in [f.path for f in os.scandir(base_path)]:
                if os.path.isdir(report):
                    continue
                elif "manifest" in report.lower():
                    continue
                self.process_report(report, "PLAIN", provider_type, provider,
                                    manifest)
            with patch("masu.processor.tasks.chain"):
                update_summary_tables(
                    self.schema,
                    provider_type,
                    provider.uuid,
                    start_date,
                    end_date,
                    manifest_id=manifest.id,
                    synchronous=True,
                )
        update_cost_model_costs.s(self.schema,
                                  provider.uuid,
                                  self.dh.last_month_start,
                                  self.dh.today,
                                  synchronous=True).apply()
        refresh_materialized_views.s(self.schema,
                                     provider_type,
                                     provider_uuid=provider.uuid,
                                     synchronous=True).apply()
        shutil.rmtree(report_path, ignore_errors=True)

    def load_aws_data(self,
                      customer,
                      static_data_file,
                      account_id=None,
                      role_arn=None,
                      day_list=None):
        """Load AWS data into the database."""
        provider_type = Provider.PROVIDER_AWS_LOCAL
        if account_id is None:
            account_id = "9999999999999"
        if role_arn is None:
            role_arn = "arn:aws:iam::999999999999:role/CostManagement"
        nise_provider_type = provider_type.replace("-local", "")
        report_name = "Test"
        credentials = {"role_arn": role_arn}
        data_source = {"bucket": "test-bucket"}
        with patch.object(settings, "AUTO_DATA_INGEST", False):
            provider = baker.make(
                "Provider",
                type=provider_type,
                authentication__credentials=credentials,
                billing_source__data_source=data_source,
                customer=customer,
            )
        # chicken/egg probrem. I need the provider_uuid to upload aws org unit tree
        # but the tree needs to be created first in order to populate the org unit
        # foreign key on the daily summary table.
        if day_list:
            org_tree_obj = InsertAwsOrgTree(schema=self.schema,
                                            provider_uuid=provider.uuid,
                                            start_date=self.dates[0][0])
            org_tree_obj.insert_tree(day_list=day_list)
        template, static_data_path = self.prepare_template(
            provider_type, static_data_file)
        options = {
            "static_report_file": static_data_path,
            "aws_report_name": report_name,
            "aws_bucket_name": self.nise_data_path,
        }
        base_path = f"{self.nise_data_path}/{report_name}"

        with schema_context(self.schema):
            baker.make("AWSAccountAlias",
                       account_id=account_id,
                       account_alias="Test Account")

        for start_date, end_date, bill_date in self.dates:
            manifest = baker.make(
                "CostUsageReportManifest",
                _fill_optional=True,
                provider=provider,
                billing_period_start_datetime=bill_date,
            )
            with open(static_data_path, "w") as f:
                f.write(
                    template.render(start_date=start_date,
                                    end_date=end_date,
                                    account_id=account_id))

            run(nise_provider_type.lower(), options)

            report_path = self.build_report_path(provider_type, bill_date,
                                                 base_path)
            for report in os.scandir(report_path):
                if os.path.isdir(report):
                    for report in [
                            f.path
                            for f in os.scandir(f"{report_path}/{report.name}")
                    ]:
                        if os.path.isdir(report):
                            continue
                        elif "manifest" in report.lower():
                            continue
                        self.process_report(report, "GZIP", provider_type,
                                            provider, manifest)
            with patch("masu.processor.tasks.chain"), patch.object(
                    settings, "AUTO_DATA_INGEST", False):
                update_summary_tables(
                    self.schema,
                    provider_type,
                    provider.uuid,
                    start_date,
                    end_date,
                    manifest_id=manifest.id,
                    synchronous=True,
                )
        update_cost_model_costs.s(self.schema,
                                  provider.uuid,
                                  self.dh.last_month_start,
                                  self.dh.today,
                                  synchronous=True).apply()
        refresh_materialized_views.s(self.schema,
                                     provider_type,
                                     provider_uuid=provider.uuid,
                                     synchronous=True).apply()
        shutil.rmtree(base_path, ignore_errors=True)

    def load_azure_data(self,
                        customer,
                        static_data_file,
                        credentials=None,
                        data_source=None):
        """Load Azure data into the database."""
        provider_type = Provider.PROVIDER_AZURE_LOCAL
        nise_provider_type = provider_type.replace("-local", "")
        report_name = "Test"

        if credentials is None:
            credentials = {
                "subscription_id": "11111111-1111-1111-1111-11111111",
                "tenant_id": "22222222-2222-2222-2222-22222222",
                "client_id": "33333333-3333-3333-3333-33333333",
                "client_secret": "MyPassW0rd!",
            }
        if data_source is None:
            data_source = {
                "resource_group": "resourcegroup1",
                "storage_account": "storageaccount1"
            }

        with patch.object(settings, "AUTO_DATA_INGEST", False):
            provider = baker.make(
                "Provider",
                type=provider_type,
                authentication__credentials=credentials,
                billing_source__data_source=data_source,
                customer=customer,
            )
        template, static_data_path = self.prepare_template(
            provider_type, static_data_file)
        options = {
            "static_report_file": static_data_path,
            "azure_report_name": report_name,
            "azure_container_name": self.nise_data_path,
        }
        base_path = f"{self.nise_data_path}/{report_name}"

        for start_date, end_date, bill_date in self.dates:
            manifest = baker.make(
                "CostUsageReportManifest",
                _fill_optional=True,
                provider=provider,
                billing_period_start_datetime=bill_date,
            )
            with open(static_data_path, "w") as f:
                f.write(
                    template.render(start_date=start_date, end_date=end_date))

            run(nise_provider_type.lower(), options)

            report_path = self.build_report_path(provider_type, bill_date,
                                                 base_path)
            for report in os.scandir(report_path):
                if os.path.isdir(report):
                    continue
                elif "manifest" in report.name.lower():
                    continue
                self.process_report(report, "PLAIN", provider_type, provider,
                                    manifest)
            with patch("masu.processor.tasks.chain"), patch.object(
                    settings, "AUTO_DATA_INGEST", False):
                update_summary_tables(
                    self.schema,
                    provider_type,
                    provider.uuid,
                    start_date,
                    end_date,
                    manifest_id=manifest.id,
                    synchronous=True,
                )
        update_cost_model_costs.s(self.schema,
                                  provider.uuid,
                                  self.dh.last_month_start,
                                  self.dh.today,
                                  synchronous=True).apply()
        refresh_materialized_views.s(self.schema,
                                     provider_type,
                                     provider_uuid=provider.uuid,
                                     synchronous=True).apply()
        shutil.rmtree(base_path, ignore_errors=True)

    def load_gcp_data(self, customer, static_data_file):
        """Load GCP data into the database."""
        provider_type = Provider.PROVIDER_GCP_LOCAL
        nise_provider_type = provider_type.replace("-local", "")
        credentials = {"project_id": "test_project_id"}
        data_source = {"table_id": "test_table_id", "dataset": "test_dataset"}
        with patch.object(settings, "AUTO_DATA_INGEST", False):
            provider = baker.make(
                "Provider",
                type=provider_type,
                authentication__credentials=credentials,
                billing_source__data_source=data_source,
                customer=customer,
            )
        etag = uuid4()
        template, static_data_path = self.prepare_template(
            provider_type, static_data_file)
        options = {
            "static_report_file": static_data_path,
            "gcp_bucket_name": self.nise_data_path,
            "gcp_etag": etag
        }
        base_path = f"{self.nise_data_path}"
        for start_date, end_date, bill_date in self.dates:
            manifest = baker.make(
                "CostUsageReportManifest",
                _fill_optional=True,
                provider=provider,
                billing_period_start_datetime=bill_date,
            )
            with open(static_data_path, "w") as f:
                f.write(
                    template.render(start_date=start_date, end_date=end_date))

            run(nise_provider_type.lower(), options)

            report_path = f"{base_path}/{etag}"
            for report in os.scandir(report_path):
                if os.path.isdir(report):
                    continue
                self.process_report(report, "PLAIN", provider_type, provider,
                                    manifest)
            with patch("masu.processor.tasks.chain"), patch.object(
                    settings, "AUTO_DATA_INGEST", False):
                update_summary_tables(
                    self.schema,
                    provider_type,
                    provider.uuid,
                    start_date,
                    end_date,
                    manifest_id=manifest.id,
                    synchronous=True,
                )
        update_cost_model_costs.s(self.schema,
                                  provider.uuid,
                                  self.dh.last_month_start,
                                  self.dh.today,
                                  synchronous=True).apply()
        refresh_materialized_views.s(self.schema,
                                     provider_type,
                                     provider_uuid=provider.uuid,
                                     synchronous=True).apply()
        shutil.rmtree(base_path, ignore_errors=True)
Esempio n. 15
0
    def test_predict_few_values(self):
        """Test that predict() behaves well with a limited data set."""
        dh = DateHelper()

        num_elements = [1, 2, 3, 4, 5]

        for number in num_elements:
            with self.subTest(num_elements=number):
                expected = []
                for n in range(0, number):
                    expected.append({
                        "usage_start":
                        dh.n_days_ago(dh.today, 10 - n).date(),
                        "total_cost":
                        5,
                        "infrastructure_cost":
                        3,
                        "supplementary_cost":
                        2,
                    })
                mock_qset = MockQuerySet(expected)

                mocked_table = Mock()
                mocked_table.objects.filter.return_value.order_by.return_value.values.return_value.annotate.return_value = (  # noqa: E501
                    mock_qset)
                mocked_table.len = mock_qset.len

                params = self.mocked_query_params("?", AWSCostForecastView)
                instance = AWSForecast(params)

                instance.cost_summary_table = mocked_table
                if number == 1:
                    # forecasting isn't possible with only 1 data point.
                    with self.assertLogs(logger="forecast.forecast",
                                         level=logging.WARNING):
                        results = instance.predict()
                        self.assertEqual(results, [])
                else:
                    with self.assertLogs(logger="forecast.forecast",
                                         level=logging.WARNING):
                        results = instance.predict()
                        for result in results:
                            for val in result.get("values", []):
                                self.assertIsInstance(val.get("date"), date)

                                item = val.get("cost")
                                self.assertGreaterEqual(
                                    float(item.get("total").get("value")), 0)
                                self.assertGreaterEqual(
                                    float(
                                        item.get("confidence_max").get(
                                            "value")), 0)
                                self.assertGreaterEqual(
                                    float(
                                        item.get("confidence_min").get(
                                            "value")), 0)
                                self.assertGreaterEqual(
                                    float(item.get("rsquared").get("value")),
                                    0)
                                self.assertGreaterEqual(
                                    float(item.get("pvalues").get("value")), 0)
                        # test that the results always stop at the end of the month.
                        self.assertEqual(results[-1].get("date"),
                                         dh.this_month_end.date())
Esempio n. 16
0
class OCPAWSQueryHandlerTest(IamTestCase):
    """Tests for the OCP report query handler."""
    def setUp(self):
        """Set up the customer view tests."""
        super().setUp()
        self.dh = DateHelper()

        self.this_month_filter = {"usage_start__gte": self.dh.this_month_start}
        self.ten_day_filter = {
            "usage_start__gte": self.dh.n_days_ago(self.dh.today, 9)
        }
        self.thirty_day_filter = {
            "usage_start__gte": self.dh.n_days_ago(self.dh.today, 29)
        }
        self.last_month_filter = {
            "usage_start__gte": self.dh.last_month_start,
            "usage_end__lte": self.dh.last_month_end,
        }

        with tenant_context(self.tenant):
            self.services = OCPAWSCostLineItemDailySummary.objects.values(
                "product_code").distinct()
            self.services = [
                entry.get("product_code") for entry in self.services
            ]

    def get_totals_by_time_scope(self, aggregates, filters=None):
        """Return the total aggregates for a time period."""
        if filters is None:
            filters = self.ten_day_filter
        with tenant_context(self.tenant):
            return OCPAWSCostLineItemDailySummary.objects.filter(
                **filters).aggregate(**aggregates)

    def test_execute_sum_query_storage(self):
        """Test that the sum query runs properly."""
        url = "?"
        query_params = self.mocked_query_params(url, OCPAWSStorageView)
        handler = OCPAWSReportQueryHandler(query_params)
        filt = {"product_family__contains": "Storage"}
        filt.update(self.ten_day_filter)
        aggregates = handler._mapper.report_type_map.get("aggregates")
        current_totals = self.get_totals_by_time_scope(aggregates, filt)
        query_output = handler.execute_query()
        self.assertIsNotNone(query_output.get("data"))
        self.assertIsNotNone(query_output.get("total"))
        total = query_output.get("total")
        self.assertEqual(
            total.get("cost", {}).get("total", {}).get("value", 0),
            current_totals.get("cost_total", 1))

    def test_execute_query_current_month_daily(self):
        """Test execute_query for current month on daily breakdown."""
        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=daily"
        query_params = self.mocked_query_params(url, OCPAWSCostView)
        handler = OCPAWSReportQueryHandler(query_params)
        query_output = handler.execute_query()
        self.assertIsNotNone(query_output.get("data"))
        self.assertIsNotNone(query_output.get("total"))
        total = query_output.get("total")
        self.assertIsNotNone(total.get("cost"))

        aggregates = handler._mapper.report_type_map.get("aggregates")
        current_totals = self.get_totals_by_time_scope(aggregates,
                                                       self.this_month_filter)
        self.assertEqual(
            total.get("cost", {}).get("total", {}).get("value", 0),
            current_totals.get("cost_total", 1))

    def test_execute_query_current_month_monthly(self):
        """Test execute_query for current month on monthly breakdown."""
        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=daily"
        query_params = self.mocked_query_params(url, OCPAWSCostView)
        handler = OCPAWSReportQueryHandler(query_params)
        query_output = handler.execute_query()
        self.assertIsNotNone(query_output.get("data"))
        self.assertIsNotNone(query_output.get("total"))
        total = query_output.get("total")
        self.assertIsNotNone(total.get("cost"))

        aggregates = handler._mapper.report_type_map.get("aggregates")
        current_totals = self.get_totals_by_time_scope(aggregates,
                                                       self.this_month_filter)
        self.assertEqual(
            total.get("cost", {}).get("total", {}).get("value", 0),
            current_totals.get("cost_total", 1))

    def test_execute_query_current_month_by_service(self):
        """Test execute_query for current month on monthly breakdown by service."""
        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&group_by[service]=*"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPAWSCostView)
        handler = OCPAWSReportQueryHandler(query_params)
        query_output = handler.execute_query()
        data = query_output.get("data")
        self.assertIsNotNone(data)
        self.assertIsNotNone(query_output.get("total"))
        total = query_output.get("total")
        self.assertIsNotNone(total.get("cost"))

        aggregates = handler._mapper.report_type_map.get("aggregates")
        current_totals = self.get_totals_by_time_scope(aggregates,
                                                       self.this_month_filter)
        self.assertEqual(
            total.get("cost", {}).get("total", {}).get("value", 0),
            current_totals.get("cost_total", 1))

        cmonth_str = DateHelper().this_month_start.strftime("%Y-%m")
        for data_item in data:
            month_val = data_item.get("date")
            month_data = data_item.get("services")
            self.assertEqual(month_val, cmonth_str)
            self.assertIsInstance(month_data, list)
            for month_item in month_data:
                service = month_item.get("service")
                self.assertIn(service, self.services)
                self.assertIsInstance(month_item.get("values"), list)

    def test_execute_query_by_filtered_service(self):
        """Test execute_query monthly breakdown by filtered service."""
        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&group_by[service]=AmazonEC2"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPAWSCostView)
        handler = OCPAWSReportQueryHandler(query_params)
        query_output = handler.execute_query()
        data = query_output.get("data")
        self.assertIsNotNone(data)
        self.assertIsNotNone(query_output.get("total"))
        total = query_output.get("total")
        self.assertIsNotNone(total.get("cost"))

        aggregates = handler._mapper.report_type_map.get("aggregates")
        filt = copy.deepcopy(self.this_month_filter)
        filt["product_code"] = "AmazonEC2"
        current_totals = self.get_totals_by_time_scope(aggregates, filt)
        self.assertEqual(
            total.get("cost", {}).get("total", {}).get("value", 0),
            current_totals.get("cost_total", 1))

        cmonth_str = DateHelper().this_month_start.strftime("%Y-%m")
        for data_item in data:
            month_val = data_item.get("date")
            month_data = data_item.get("services")
            self.assertEqual(month_val, cmonth_str)
            self.assertIsInstance(month_data, list)
            for month_item in month_data:
                compute = month_item.get("service")
                self.assertEqual(compute, "AmazonEC2")
                self.assertIsInstance(month_item.get("values"), list)

    def test_query_by_partial_filtered_service(self):
        """Test execute_query monthly breakdown by filtered service."""
        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&group_by[service]=eC2"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPAWSCostView)
        handler = OCPAWSReportQueryHandler(query_params)
        query_output = handler.execute_query()
        data = query_output.get("data")
        self.assertIsNotNone(data)
        self.assertIsNotNone(query_output.get("total"))
        total = query_output.get("total")
        self.assertIsNotNone(total.get("cost"))
        filt = copy.deepcopy(self.this_month_filter)
        filt["product_code__icontains"] = "ec2"
        aggregates = handler._mapper.report_type_map.get("aggregates")
        current_totals = self.get_totals_by_time_scope(aggregates, filt)
        self.assertEqual(
            total.get("cost", {}).get("total", {}).get("value", 0),
            current_totals.get("cost_total", 1))

        cmonth_str = DateHelper().this_month_start.strftime("%Y-%m")
        for data_item in data:
            month_val = data_item.get("date")
            month_data = data_item.get("services")
            self.assertEqual(month_val, cmonth_str)
            self.assertIsInstance(month_data, list)
            for month_item in month_data:
                compute = month_item.get("service")
                self.assertEqual(compute, "AmazonEC2")
                self.assertIsInstance(month_item.get("values"), list)

    def test_execute_query_current_month_by_account(self):
        """Test execute_query for current month on monthly breakdown by account."""
        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&group_by[account]=*"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPAWSCostView)
        handler = OCPAWSReportQueryHandler(query_params)
        query_output = handler.execute_query()
        data = query_output.get("data")
        self.assertIsNotNone(data)
        self.assertIsNotNone(query_output.get("total"))
        total = query_output.get("total")
        self.assertIsNotNone(total.get("cost"))

        aggregates = handler._mapper.report_type_map.get("aggregates")
        current_totals = self.get_totals_by_time_scope(aggregates,
                                                       self.this_month_filter)
        self.assertEqual(
            total.get("cost", {}).get("total", {}).get("value", 0),
            current_totals.get("cost_total", 1))

        cmonth_str = DateHelper().this_month_start.strftime("%Y-%m")
        for data_item in data:
            month_val = data_item.get("date", "not-a-date")
            month_data = data_item.get("accounts", "not-a-list")
            self.assertEqual(month_val, cmonth_str)
            self.assertIsInstance(month_data, list)
            for month_item in month_data:
                self.assertIsInstance(month_item.get("values"), list)

    def test_execute_query_by_account_by_service(self):
        """Test execute_query for current month breakdown by account by service."""
        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&group_by[account]=*&group_by[service]=*"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPAWSCostView)
        handler = OCPAWSReportQueryHandler(query_params)
        query_output = handler.execute_query()
        data = query_output.get("data")
        self.assertIsNotNone(data)
        self.assertIsNotNone(query_output.get("total"))
        total = query_output.get("total")
        self.assertIsNotNone(total.get("cost"))

        aggregates = handler._mapper.report_type_map.get("aggregates")
        current_totals = self.get_totals_by_time_scope(aggregates,
                                                       self.this_month_filter)
        self.assertEqual(
            total.get("cost", {}).get("total", {}).get("value", 0),
            current_totals.get("cost_total", 1))

        cmonth_str = DateHelper().this_month_start.strftime("%Y-%m")
        for data_item in data:
            month_val = data_item.get("date", "not-a-date")
            month_data = data_item.get("accounts", "not-a-string")
            self.assertEqual(month_val, cmonth_str)
            self.assertIsInstance(month_data, list)
            for month_item in month_data:
                self.assertIsInstance(month_item.get("services"), list)

    def test_check_view_filter_and_group_by_criteria(self):
        """Test that all filter and group by checks return the correct result."""
        good_group_by_options = [
            "account", "service", "region", "cluster", "product_family"
        ]
        bad_group_by_options = ["project", "node"]

        for option in good_group_by_options:
            filter_keys = {option}
            group_by_keys = set()
            self.assertTrue(
                check_view_filter_and_group_by_criteria(
                    filter_keys, group_by_keys))

            filter_keys = set()
            group_by_keys = {option}
            self.assertTrue(
                check_view_filter_and_group_by_criteria(
                    filter_keys, group_by_keys))

        # Different group by and filter
        filter_keys = {"account"}
        group_by_keys = {"cluster"}
        self.assertTrue(
            check_view_filter_and_group_by_criteria(filter_keys,
                                                    group_by_keys))

        # Multiple group bys
        filter_keys = set()
        group_by_keys = {"cluster", "account"}
        self.assertTrue(
            check_view_filter_and_group_by_criteria(filter_keys,
                                                    group_by_keys))

        # Multiple filters
        filter_keys = {"cluster", "account"}
        group_by_keys = set()
        self.assertTrue(
            check_view_filter_and_group_by_criteria(filter_keys,
                                                    group_by_keys))

        # Project and node unsupported
        for option in bad_group_by_options:
            filter_keys = {option}
            group_by_keys = set()
            self.assertFalse(
                check_view_filter_and_group_by_criteria(
                    filter_keys, group_by_keys))

            filter_keys = set()
            group_by_keys = {option}
            self.assertFalse(
                check_view_filter_and_group_by_criteria(
                    filter_keys, group_by_keys))

    def test_query_table(self):
        """Test that the correct view is assigned by query table property."""
        url = "?"
        query_params = self.mocked_query_params(url, OCPAWSCostView)
        handler = OCPAWSReportQueryHandler(query_params)
        self.assertEqual(handler.query_table, OCPAWSCostSummary)

        url = "?group_by[account]=*"
        query_params = self.mocked_query_params(url, OCPAWSCostView)
        handler = OCPAWSReportQueryHandler(query_params)
        self.assertEqual(handler.query_table, OCPAWSCostSummaryByAccount)

        url = "?group_by[region]=*"
        query_params = self.mocked_query_params(url, OCPAWSCostView)
        handler = OCPAWSReportQueryHandler(query_params)
        self.assertEqual(handler.query_table, OCPAWSCostSummaryByRegion)

        url = "?group_by[region]=*&group_by[account]=*"
        query_params = self.mocked_query_params(url, OCPAWSCostView)
        handler = OCPAWSReportQueryHandler(query_params)
        self.assertEqual(handler.query_table, OCPAWSCostSummaryByRegion)

        url = "?group_by[service]=*"
        query_params = self.mocked_query_params(url, OCPAWSCostView)
        handler = OCPAWSReportQueryHandler(query_params)
        self.assertEqual(handler.query_table, OCPAWSCostSummaryByService)

        url = "?group_by[service]=*&group_by[account]=*"
        query_params = self.mocked_query_params(url, OCPAWSCostView)
        handler = OCPAWSReportQueryHandler(query_params)
        self.assertEqual(handler.query_table, OCPAWSCostSummaryByService)

        url = "?"
        query_params = self.mocked_query_params(url, OCPAWSInstanceTypeView)
        handler = OCPAWSReportQueryHandler(query_params)
        self.assertEqual(handler.query_table, OCPAWSComputeSummary)

        url = "?group_by[account]=*"
        query_params = self.mocked_query_params(url, OCPAWSInstanceTypeView)
        handler = OCPAWSReportQueryHandler(query_params)
        self.assertEqual(handler.query_table, OCPAWSComputeSummary)

        url = "?"
        query_params = self.mocked_query_params(url, OCPAWSStorageView)
        handler = OCPAWSReportQueryHandler(query_params)
        self.assertEqual(handler.query_table, OCPAWSStorageSummary)

        url = "?group_by[account]=*"
        query_params = self.mocked_query_params(url, OCPAWSStorageView)
        handler = OCPAWSReportQueryHandler(query_params)
        self.assertEqual(handler.query_table, OCPAWSStorageSummary)

        url = "?filter[service]=AmazonVPC,AmazonCloudFront,AmazonRoute53,AmazonAPIGateway"
        query_params = self.mocked_query_params(url, OCPAWSCostView)
        handler = OCPAWSReportQueryHandler(query_params)
        self.assertEqual(handler.query_table, OCPAWSNetworkSummary)

        url = "?filter[service]=AmazonVPC,AmazonCloudFront,AmazonRoute53,AmazonAPIGateway&group_by[account]=*"
        query_params = self.mocked_query_params(url, OCPAWSCostView)
        handler = OCPAWSReportQueryHandler(query_params)
        self.assertEqual(handler.query_table, OCPAWSNetworkSummary)

        url = (
            "?filter[service]=AmazonRDS,AmazonDynamoDB,AmazonElastiCache,AmazonNeptune,AmazonRedshift,AmazonDocumentDB"
        )
        query_params = self.mocked_query_params(url, OCPAWSCostView)
        handler = OCPAWSReportQueryHandler(query_params)
        self.assertEqual(handler.query_table, OCPAWSDatabaseSummary)

        url = (
            "?filter[service]=AmazonRDS,AmazonDynamoDB,AmazonElastiCache,AmazonNeptune,AmazonRedshift,AmazonDocumentDB"
            "&group_by[account]=*")
        query_params = self.mocked_query_params(url, OCPAWSCostView)
        handler = OCPAWSReportQueryHandler(query_params)
        self.assertEqual(handler.query_table, OCPAWSDatabaseSummary)

    def test_source_uuid_mapping(self):  # noqa: C901
        """Test source_uuid is mapped to the correct source."""
        endpoints = [OCPAWSCostView, OCPAWSInstanceTypeView, OCPAWSStorageView]
        with tenant_context(self.tenant):
            expected_source_uuids = list(
                AWSCostEntryBill.objects.distinct().values_list("provider_id",
                                                                flat=True))
        source_uuid_list = []
        for endpoint in endpoints:
            urls = ["?"]
            if endpoint == OCPAWSCostView:
                urls.extend([
                    "?group_by[account]=*", "?group_by[service]=*",
                    "?group_by[region]=*"
                ])
            for url in urls:
                query_params = self.mocked_query_params(url, endpoint)
                handler = OCPAWSReportQueryHandler(query_params)
                query_output = handler.execute_query()
                for dictionary in query_output.get("data"):
                    for _, value in dictionary.items():
                        if isinstance(value, list):
                            for item in value:
                                if isinstance(item, dict):
                                    if "values" in item.keys():
                                        value = item["values"][0]
                                        source_uuid_list.extend(
                                            value.get("source_uuid"))
        self.assertNotEquals(source_uuid_list, [])
        for source_uuid in source_uuid_list:
            self.assertIn(source_uuid, expected_source_uuids)
Esempio n. 17
0
class AzureTagsViewTest(IamTestCase):
    """Tests the report view."""
    def setUp(self):
        """Set up the customer view tests."""
        super().setUp()
        self.dh = DateHelper()
        self.ten_days_ago = self.dh.n_days_ago(self.dh.today, 9)
        _, self.provider = create_generic_provider(Provider.PROVIDER_AZURE,
                                                   self.headers)
        self.data_generator = AzureReportDataGenerator(self.tenant,
                                                       self.provider)
        self.data_generator.add_data_to_tenant()

    def test_execute_tags_queries_keys_only(self):
        """Test that tag key data is for the correct time queries."""
        test_cases = [{
            'value': '-1',
            'unit': 'month',
            'resolution': 'monthly'
        }, {
            'value': '-2',
            'unit': 'month',
            'resolution': 'monthly'
        }, {
            'value': '-10',
            'unit': 'day',
            'resolution': 'daily'
        }, {
            'value': '-30',
            'unit': 'day',
            'resolution': 'daily'
        }]

        for case in test_cases:
            url = reverse('azure-tags')
            client = APIClient()
            params = {
                'filter[resolution]': case.get('resolution'),
                'filter[time_scope_value]': case.get('value'),
                'filter[time_scope_units]': case.get('unit'),
                'key_only': True
            }
            url = url + '?' + urlencode(params, quote_via=quote_plus)
            response = client.get(url, **self.headers)

            self.assertEqual(response.status_code, status.HTTP_200_OK)
            data = response.json().get('data')

            self.assertTrue(data)
            self.assertTrue(isinstance(data, list))
            for tag in data:
                self.assertTrue(isinstance(tag, str))

    def test_execute_tags_queries(self):
        """Test that tag data is for the correct time queries."""
        test_cases = [{
            'value': '-1',
            'unit': 'month',
            'resolution': 'monthly'
        }, {
            'value': '-2',
            'unit': 'month',
            'resolution': 'monthly'
        }, {
            'value': '-10',
            'unit': 'day',
            'resolution': 'daily'
        }, {
            'value': '-30',
            'unit': 'day',
            'resolution': 'daily'
        }]

        for case in test_cases:
            url = reverse('azure-tags')
            client = APIClient()
            params = {
                'filter[resolution]': case.get('resolution'),
                'filter[time_scope_value]': case.get('value'),
                'filter[time_scope_units]': case.get('unit'),
                'key_only': False
            }
            url = url + '?' + urlencode(params, quote_via=quote_plus)
            response = client.get(url, **self.headers)
            self.assertEqual(response.status_code, status.HTTP_200_OK)
            data = response.json().get('data')

            self.assertTrue(data)
            self.assertTrue(isinstance(data, list))
            for tag in data:
                self.assertTrue(isinstance(tag, dict))
                self.assertIn('key', tag)
                self.assertIn('values', tag)
                self.assertIsNotNone(tag.get('key'))
                self.assertIn(tag.get('values').__class__, [list, str])
                self.assertTrue(tag.get('values'))

    def test_execute_tags_type_queries(self):
        """Test that tag data is for the correct type queries."""
        test_cases = [{
            'value':
            '-1',
            'unit':
            'month',
            'resolution':
            'monthly',
            'subscription_guid':
            self.data_generator.config.subscription_guid
        }, {
            'value':
            '-2',
            'unit':
            'month',
            'resolution':
            'monthly',
            'subscription_guid':
            self.data_generator.config.subscription_guid
        }, {
            'value':
            '-10',
            'unit':
            'day',
            'resolution':
            'daily',
            'subscription_guid':
            self.data_generator.config.subscription_guid
        }, {
            'value':
            '-30',
            'unit':
            'day',
            'resolution':
            'daily',
            'subscription_guid':
            self.data_generator.config.subscription_guid
        }]

        for case in test_cases:
            url = reverse('azure-tags')
            client = APIClient()
            params = {
                'filter[resolution]': case.get('resolution'),
                'filter[time_scope_value]': case.get('value'),
                'filter[time_scope_units]': case.get('unit'),
                'key_only': False,
                'filter[subscription_guid]': case.get('subscription_guid')
            }
            url = url + '?' + urlencode(params, quote_via=quote_plus)
            response = client.get(url, **self.headers)
            self.assertEqual(response.status_code, status.HTTP_200_OK)
            data = response.json().get('data')

            self.assertTrue(data)
            self.assertTrue(isinstance(data, list))
            for tag in data:
                self.assertTrue(isinstance(tag, dict))
                self.assertIn('key', tag)
                self.assertIn('values', tag)
                self.assertIsNotNone(tag.get('key'))
                self.assertIn(tag.get('values').__class__, [list, str])
                self.assertTrue(tag.get('values'))

    def test_execute_query_with_and_filter(self):
        """Test the filter[and:] param in the view."""
        AzureReportDataGenerator(self.tenant,
                                 self.provider).add_data_to_tenant()
        url = reverse('azure-tags')
        client = APIClient()

        with tenant_context(self.tenant):
            subs = AzureCostEntryLineItemDailySummary.objects\
                .filter(usage_start__gte=self.ten_days_ago)\
                .values('subscription_guid').distinct()
            subscription_guids = [sub.get('subscription_guid') for sub in subs]
        params = {
            'filter[resolution]': 'daily',
            'filter[time_scope_value]': '-10',
            'filter[time_scope_units]': 'day',
            'filter[and:subscription_guid]': subscription_guids
        }
        url = url + '?' + urlencode(params, quote_via=quote_plus)
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, status.HTTP_200_OK)

        data = response.json().get('data')
        self.assertEqual(data, [])
Esempio n. 18
0
class OCPReportQueryHandlerTest(IamTestCase):
    """Tests for the OCP report query handler."""
    def setUp(self):
        """Set up the customer view tests."""
        super().setUp()
        self.dh = DateHelper()

        self.this_month_filter = {'usage_start__gte': self.dh.this_month_start}
        self.ten_day_filter = {
            'usage_start__gte': self.dh.n_days_ago(self.dh.today, 9)
        }
        self.thirty_day_filter = {
            'usage_start__gte': self.dh.n_days_ago(self.dh.today, 29)
        }
        self.last_month_filter = {
            'usage_start__gte': self.dh.last_month_start,
            'usage_end__lte': self.dh.last_month_end
        }
        OCPReportDataGenerator(self.tenant).add_data_to_tenant()

    def get_totals_by_time_scope(self, aggregates, filters=None):
        """Return the total aggregates for a time period."""
        if filters is None:
            filters = self.ten_day_filter
        with tenant_context(self.tenant):
            return OCPUsageLineItemDailySummary.objects\
                .filter(**filters)\
                .aggregate(**aggregates)

    def get_totals_costs_by_time_scope(self, aggregates, filters=None):
        """Return the total costs aggregates for a time period."""
        if filters is None:
            filters = self.this_month_filter
        with tenant_context(self.tenant):
            return OCPUsageLineItemDailySummary.objects\
                .filter(**filters)\
                .aggregate(**aggregates)

    def test_execute_sum_query(self):
        """Test that the sum query runs properly."""
        # '?'
        query_params = FakeQueryParameters({},
                                           report_type='cpu',
                                           tenant=self.tenant)
        handler = OCPReportQueryHandler(query_params.mock_qp)
        aggregates = handler._mapper.report_type_map.get('aggregates')
        current_totals = self.get_totals_by_time_scope(aggregates)
        query_output = handler.execute_query()
        self.assertIsNotNone(query_output.get('data'))
        self.assertIsNotNone(query_output.get('total'))
        total = query_output.get('total')

        self.assertEqual(
            total.get('usage', {}).get('value'), current_totals.get('usage'))
        self.assertEqual(
            total.get('request', {}).get('value'),
            current_totals.get('request'))
        self.assertEqual(
            total.get('cost', {}).get('value'), current_totals.get('cost'))
        self.assertEqual(
            total.get('limit', {}).get('value'), current_totals.get('limit'))

    def test_execute_sum_query_costs(self):
        """Test that the sum query runs properly for the costs endpoint."""
        # '?'
        query_params = FakeQueryParameters({}, tenant=self.tenant)
        handler = OCPReportQueryHandler(query_params.mock_qp)
        aggregates = handler._mapper.report_type_map.get('aggregates')
        current_totals = self.get_totals_costs_by_time_scope(
            aggregates, self.ten_day_filter)
        query_output = handler.execute_query()
        self.assertIsNotNone(query_output.get('data'))
        self.assertIsNotNone(query_output.get('total'))
        total = query_output.get('total')
        self.assertEqual(
            total.get('cost', {}).get('value'), current_totals.get('cost'))

    def test_get_cluster_capacity_monthly_resolution(self):
        """Test that cluster capacity returns a full month's capacity."""
        # '?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly'
        params = {
            'filter': {
                'resolution': 'monthly',
                'time_scope_value': -1,
                'time_scope_units': 'month'
            }
        }
        query_params = FakeQueryParameters(params,
                                           report_type='cpu',
                                           tenant=self.tenant)
        handler = OCPReportQueryHandler(query_params.mock_qp)
        query_data = [{'row': 1}]
        query_data, total_capacity = handler.get_cluster_capacity(query_data)
        self.assertTrue('capacity' in total_capacity)
        self.assertTrue(isinstance(total_capacity['capacity'], Decimal))
        self.assertTrue('capacity' in query_data[0])
        self.assertEqual(query_data[0].get('capacity'),
                         total_capacity.get('capacity'))

    def test_get_cluster_capacity_monthly_resolution_group_by_cluster(self):
        """Test that cluster capacity returns capacity by cluster."""
        # Add data for a second cluster
        OCPReportDataGenerator(self.tenant).add_data_to_tenant()

        # '?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&group_by[cluster]=*'
        params = {
            'filter': {
                'resolution': 'monthly',
                'time_scope_value': -1,
                'time_scope_units': 'month'
            },
            'group_by': {
                'cluster': ['*']
            }
        }
        query_params = FakeQueryParameters(params,
                                           report_type='cpu',
                                           tenant=self.tenant)
        handler = OCPReportQueryHandler(query_params.mock_qp)
        query_data = handler.execute_query()

        capacity_by_cluster = defaultdict(Decimal)
        total_capacity = Decimal(0)
        query_filter = handler.query_filter
        query_group_by = ['usage_start', 'cluster_id']
        annotations = {'capacity': Max('cluster_capacity_cpu_core_hours')}
        cap_key = list(annotations.keys())[0]

        q_table = handler._mapper.provider_map.get('tables').get('query')
        query = q_table.objects.filter(query_filter)

        with tenant_context(self.tenant):
            cap_data = query.values(*query_group_by).annotate(**annotations)
            for entry in cap_data:
                cluster_id = entry.get('cluster_id', '')
                capacity_by_cluster[cluster_id] += entry.get(cap_key, 0)
                total_capacity += entry.get(cap_key, 0)

        for entry in query_data.get('data', []):
            for cluster in entry.get('clusters', []):
                cluster_name = cluster.get('cluster', '')
                capacity = cluster.get('values')[0].get('capacity',
                                                        {}).get('value')
                self.assertEqual(capacity, capacity_by_cluster[cluster_name])

        self.assertEqual(
            query_data.get('total', {}).get('capacity', {}).get('value'),
            total_capacity)

    def test_get_cluster_capacity_daily_resolution(self):
        """Test that total capacity is returned daily resolution."""
        # '?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=daily'
        params = {
            'filter': {
                'resolution': 'daily',
                'time_scope_value': -1,
                'time_scope_units': 'month'
            }
        }
        query_params = FakeQueryParameters(params,
                                           report_type='cpu',
                                           tenant=self.tenant)
        handler = OCPReportQueryHandler(query_params.mock_qp)
        query_data = handler.execute_query()

        daily_capacity = defaultdict(Decimal)
        total_capacity = Decimal(0)
        query_filter = handler.query_filter
        query_group_by = ['usage_start']
        annotations = {'capacity': Max('total_capacity_cpu_core_hours')}
        cap_key = list(annotations.keys())[0]

        q_table = handler._mapper.provider_map.get('tables').get('query')
        query = q_table.objects.filter(query_filter)

        with tenant_context(self.tenant):
            cap_data = query.values(*query_group_by).annotate(**annotations)
            for entry in cap_data:
                date = handler.date_to_string(entry.get('usage_start'))
                daily_capacity[date] += entry.get(cap_key, 0)
            # This is a hack because the total capacity in the test data
            # is artificial but the total should still be a sum of
            # cluster capacities
            annotations = {'capacity': Max('cluster_capacity_cpu_core_hours')}
            cap_data = query.values(*query_group_by).annotate(**annotations)
            for entry in cap_data:
                total_capacity += entry.get(cap_key, 0)

        self.assertEqual(
            query_data.get('total', {}).get('capacity', {}).get('value'),
            total_capacity)
        for entry in query_data.get('data', []):
            date = entry.get('date')
            values = entry.get('values')
            if values:
                capacity = values[0].get('capacity', {}).get('value')
                self.assertEqual(capacity, daily_capacity[date])

    def test_get_cluster_capacity_daily_resolution_group_by_clusters(self):
        """Test that cluster capacity returns daily capacity by cluster."""
        # '?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=daily&group_by[cluster]=*'
        params = {
            'filter': {
                'resolution': 'daily',
                'time_scope_value': -1,
                'time_scope_units': 'month'
            },
            'group_by': {
                'cluster': ['*']
            }
        }
        query_params = FakeQueryParameters(params,
                                           report_type='cpu',
                                           tenant=self.tenant)
        handler = OCPReportQueryHandler(query_params.mock_qp)
        query_data = handler.execute_query()

        daily_capacity_by_cluster = defaultdict(dict)
        total_capacity = Decimal(0)
        query_filter = handler.query_filter
        query_group_by = ['usage_start', 'cluster_id']
        annotations = {'capacity': Max('cluster_capacity_cpu_core_hours')}
        cap_key = list(annotations.keys())[0]

        q_table = handler._mapper.query_table
        query = q_table.objects.filter(query_filter)

        with tenant_context(self.tenant):
            cap_data = query.values(*query_group_by).annotate(**annotations)
            for entry in cap_data:
                date = handler.date_to_string(entry.get('usage_start'))
                cluster_id = entry.get('cluster_id', '')
                if cluster_id in daily_capacity_by_cluster[date]:
                    daily_capacity_by_cluster[date][cluster_id] += entry.get(
                        cap_key, 0)
                else:
                    daily_capacity_by_cluster[date][cluster_id] = entry.get(
                        cap_key, 0)
                total_capacity += entry.get(cap_key, 0)

        for entry in query_data.get('data', []):
            date = entry.get('date')
            for cluster in entry.get('clusters', []):
                cluster_name = cluster.get('cluster', '')
                capacity = cluster.get('values')[0].get('capacity',
                                                        {}).get('value')
                self.assertEqual(capacity,
                                 daily_capacity_by_cluster[date][cluster_name])

        self.assertEqual(
            query_data.get('total', {}).get('capacity', {}).get('value'),
            total_capacity)

    @patch('api.report.ocp.query_handler.ReportQueryHandler.add_deltas')
    @patch(
        'api.report.ocp.query_handler.OCPReportQueryHandler.add_current_month_deltas'
    )
    def test_add_deltas_current_month(self, mock_current_deltas, mock_deltas):
        """Test that the current month method is called for deltas."""
        # '?'
        query_params = FakeQueryParameters({},
                                           report_type='cpu',
                                           tenant=self.tenant)
        handler = OCPReportQueryHandler(query_params.mock_qp)
        handler._delta = 'usage__request'
        handler.add_deltas([], [])
        mock_current_deltas.assert_called()
        mock_deltas.assert_not_called()

    @patch('api.report.ocp.query_handler.ReportQueryHandler.add_deltas')
    @patch(
        'api.report.ocp.query_handler.OCPReportQueryHandler.add_current_month_deltas'
    )
    def test_add_deltas_super_delta(self, mock_current_deltas, mock_deltas):
        """Test that the super delta method is called for deltas."""
        # '?'
        query_params = FakeQueryParameters({},
                                           report_type='cpu',
                                           tenant=self.tenant)
        handler = OCPReportQueryHandler(query_params.mock_qp)
        handler._delta = 'usage'

        handler.add_deltas([], [])

        mock_current_deltas.assert_not_called()
        mock_deltas.assert_called()

    def test_add_current_month_deltas(self):
        """Test that current month deltas are calculated."""
        # '?'
        query_params = FakeQueryParameters({},
                                           report_type='cpu',
                                           tenant=self.tenant)
        handler = OCPReportQueryHandler(query_params.mock_qp)
        handler._delta = 'usage__request'

        q_table = handler._mapper.provider_map.get('tables').get('query')
        with tenant_context(self.tenant):
            query = q_table.objects.filter(handler.query_filter)
            query_data = query.annotate(**handler.annotations)
            group_by_value = handler._get_group_by()
            query_group_by = ['date'] + group_by_value
            query_order_by = ('-date', )
            query_order_by += (handler.order, )

            annotations = handler.report_annotations
            query_data = query_data.values(*query_group_by).annotate(
                **annotations)

            aggregates = handler._mapper.report_type_map.get('aggregates')
            metric_sum = query.aggregate(**aggregates)
            query_sum = {key: metric_sum.get(key) for key in aggregates}

            result = handler.add_current_month_deltas(query_data, query_sum)

            delta_field_one, delta_field_two = handler._delta.split('__')
            field_one_total = Decimal(0)
            field_two_total = Decimal(0)
            for entry in result:
                field_one_total += entry.get(delta_field_one, 0)
                field_two_total += entry.get(delta_field_two, 0)
                delta_percent = entry.get('delta_percent')
                expected = (entry.get(delta_field_one, 0) / entry.get(delta_field_two, 0) * 100) \
                    if entry.get(delta_field_two) else 0
                self.assertEqual(delta_percent, expected)

            expected_total = field_one_total / field_two_total * 100 if field_two_total != 0 else 0

            self.assertEqual(handler.query_delta.get('percent'),
                             expected_total)

    def test_add_current_month_deltas_no_previous_data_wo_query_data(self):
        """Test that current month deltas are calculated with no previous month data."""
        OCPReportDataGenerator(self.tenant).remove_data_from_tenant()
        OCPReportDataGenerator(self.tenant,
                               current_month_only=True).add_data_to_tenant()

        # '?filter[time_scope_value]=-2&filter[resolution]=monthly&filter[time_scope_units]=month&filter[limit]=1&delta=usage__request'
        params = {
            'filter': {
                'resolution': 'monthly',
                'time_scope_value': -2,
                'time_scope_units': 'month',
                'limit': 1
            },
            'delta': 'usage__request'
        }
        query_params = FakeQueryParameters(params,
                                           report_type='cpu',
                                           tenant=self.tenant)
        handler = OCPReportQueryHandler(query_params.mock_qp)

        q_table = handler._mapper.provider_map.get('tables').get('query')
        with tenant_context(self.tenant):
            query = q_table.objects.filter(handler.query_filter)
            query_data = query.annotate(**handler.annotations)
            group_by_value = handler._get_group_by()
            query_group_by = ['date'] + group_by_value
            query_order_by = ('-date', )
            query_order_by += (handler.order, )

            annotations = annotations = handler.report_annotations
            query_data = query_data.values(*query_group_by).annotate(
                **annotations)

            aggregates = handler._mapper.report_type_map.get('aggregates')
            metric_sum = query.aggregate(**aggregates)
            query_sum = {
                key: metric_sum.get(key) if metric_sum.get(key) else Decimal(0)
                for key in aggregates
            }

            result = handler.add_current_month_deltas(query_data, query_sum)

            self.assertEqual(result, query_data)
            self.assertEqual(handler.query_delta['value'], Decimal(0))
            self.assertIsNone(handler.query_delta['percent'])

    def test_add_current_month_deltas_no_previous_data_w_query_data(self):
        """Test that current month deltas are calculated with no previous data for field two."""
        OCPReportDataGenerator(self.tenant).remove_data_from_tenant()
        OCPReportDataGenerator(self.tenant,
                               current_month_only=True).add_data_to_tenant()

        # '?filter[time_scope_value]=-1&filter[resolution]=monthly&filter[limit]=1'
        params = {
            'filter': {
                'resolution': 'monthly',
                'time_scope_value': -1,
                'limit': 1
            }
        }
        query_params = FakeQueryParameters(params,
                                           report_type='cpu',
                                           tenant=self.tenant)
        handler = OCPReportQueryHandler(query_params.mock_qp)
        handler._delta = 'usage__foo'

        q_table = handler._mapper.provider_map.get('tables').get('query')
        with tenant_context(self.tenant):
            query = q_table.objects.filter(handler.query_filter)
            query_data = query.annotate(**handler.annotations)
            group_by_value = handler._get_group_by()
            query_group_by = ['date'] + group_by_value
            query_order_by = ('-date', )
            query_order_by += (handler.order, )

            annotations = annotations = handler.report_annotations
            query_data = query_data.values(*query_group_by).annotate(
                **annotations)

            aggregates = handler._mapper.report_type_map.get('aggregates')
            metric_sum = query.aggregate(**aggregates)
            query_sum = {
                key: metric_sum.get(key) if metric_sum.get(key) else Decimal(0)
                for key in aggregates
            }

            result = handler.add_current_month_deltas(query_data, query_sum)

            self.assertEqual(result, query_data)
            self.assertIsNotNone(handler.query_delta['value'])
            self.assertIsNone(handler.query_delta['percent'])

    def test_strip_label_column_name(self):
        """Test that the tag column name is stripped from results."""
        # '?'
        query_params = FakeQueryParameters({},
                                           report_type='cpu',
                                           tenant=self.tenant)
        handler = OCPReportQueryHandler(query_params.mock_qp)
        tag_column = handler._mapper.provider_map.get('tag_column')
        data = [{
            f'{tag_column}__tag_key1': 'value'
        }, {
            f'{tag_column}__tag_key2': 'value'
        }]
        group_by = [
            'date', f'{tag_column}__tag_key1', f'{tag_column}__tag_key2'
        ]

        expected_data = [{'tag_key1': 'value'}, {'tag_key2': 'value'}]
        expected_group_by = ['date', 'tag_key1', 'tag_key2']

        result_data, result_group_by = handler.strip_label_column_name(
            data, group_by)

        self.assertEqual(result_data, expected_data)
        self.assertEqual(result_group_by, expected_group_by)

    def test_get_tag_filter_keys(self):
        """Test that filter params with tag keys are returned."""
        # '?'
        query_params = FakeQueryParameters({}, tenant=self.tenant)
        handler = OCPTagQueryHandler(query_params.mock_qp)
        tag_keys = handler.get_tag_keys(filters=False)

        # '?filter[time_scope_value]=-1&filter[resolution]=monthly&filter[time_scope_units]=month&filter[tag:some_tag]=*'
        params = {
            'filter': {
                'resolution': 'monthly',
                'time_scope_value': -1,
                'time_scope_units': 'month',
                tag_keys[0]: ['*']
            }
        }
        query_params = FakeQueryParameters(params,
                                           report_type='cpu',
                                           tag_keys=tag_keys,
                                           tenant=self.tenant)
        handler = OCPReportQueryHandler(query_params.mock_qp)
        results = handler.get_tag_filter_keys()
        self.assertEqual(results, [tag_keys[0]])

    def test_get_tag_group_by_keys(self):
        """Test that group_by params with tag keys are returned."""
        # '?'
        query_params = FakeQueryParameters({}, tenant=self.tenant)
        handler = OCPTagQueryHandler(query_params.mock_qp)
        tag_keys = handler.get_tag_keys(filters=False)

        params = {'group_by': {tag_keys[0]: ['*']}}
        query_params = FakeQueryParameters(params,
                                           report_type='cpu',
                                           tag_keys=tag_keys,
                                           tenant=self.tenant)
        handler = OCPReportQueryHandler(query_params.mock_qp)
        results = handler.get_tag_group_by_keys()
        self.assertEqual(results, [tag_keys[0]])

    def test_set_tag_filters(self):
        """Test that tag filters are created properly."""
        filters = QueryFilterCollection()

        # '?'
        query_params = FakeQueryParameters({}, tenant=self.tenant)
        handler = OCPTagQueryHandler(query_params.mock_qp)
        tag_keys = handler.get_tag_keys(filters=False)

        filter_key = tag_keys[0]

        filter_value = 'filter'
        group_by_key = tag_keys[1]

        group_by_value = 'group_By'

        # '?filter[tag:some_key]=some_value&group_by[tag:some_key]=some_value'
        params = {
            'filter': {
                filter_key: [filter_value]
            },
            'group_by': {
                group_by_key: [group_by_value]
            }
        }
        query_params = FakeQueryParameters(params,
                                           report_type='cpu',
                                           tag_keys=tag_keys,
                                           tenant=self.tenant)
        handler = OCPReportQueryHandler(query_params.mock_qp)
        filters = handler._set_tag_filters(filters)

        expected = f"""<class 'api.query_filter.QueryFilterCollection'>: (AND: ('pod_labels__{filter_key}__icontains', '{filter_value}')), (AND: ('pod_labels__{group_by_key}__icontains', '{group_by_value}')), """  # noqa: E501

        self.assertEqual(repr(filters), expected)

    def test_get_exclusions(self):
        """Test that exclusions are properly set."""
        # '?'
        query_params = FakeQueryParameters({}, tenant=self.tenant)
        handler = OCPTagQueryHandler(query_params.mock_qp)
        tag_keys = handler.get_tag_keys(filters=False)

        group_by_key = tag_keys[0]
        group_by_value = 'group_By'
        # '?group_by[tag:some_key]=some_value'
        params = {'group_by': {group_by_key: [group_by_value]}}
        query_params = FakeQueryParameters(params,
                                           report_type='cpu',
                                           tag_keys=tag_keys,
                                           tenant=self.tenant)
        handler = OCPReportQueryHandler(query_params.mock_qp)
        exclusions = handler._get_exclusions()
        expected = f"<Q: (AND: ('pod_labels__{group_by_key}__isnull', True))>"
        self.assertEqual(repr(exclusions), expected)

    def test_get_tag_group_by(self):
        """Test that tag based group bys work."""
        # '?'
        query_params = FakeQueryParameters({}, tenant=self.tenant)
        handler = OCPTagQueryHandler(query_params.mock_qp)
        tag_keys = handler.get_tag_keys(filters=False)

        group_by_key = tag_keys[0]
        group_by_value = 'group_by'
        # '?group_by[tag:some_key]=some_value'
        params = {'group_by': {group_by_key: [group_by_value]}}
        query_params = FakeQueryParameters(params,
                                           report_type='cpu',
                                           tag_keys=tag_keys,
                                           tenant=self.tenant)
        handler = OCPReportQueryHandler(query_params.mock_qp)
        group_by = handler._get_tag_group_by()
        group = group_by[0]
        expected = 'pod_labels__' + group_by_key
        self.assertEqual(len(group_by), 1)
        self.assertEqual(group[0], expected)

    def test_get_tag_order_by(self):
        """Verify that a propery order by is returned."""
        tag = 'pod_labels__key'
        expected_param = (tag.split('__')[1], )

        # '?'
        query_params = FakeQueryParameters({}, tenant=self.tenant)
        handler = OCPReportQueryHandler(query_params.mock_qp)
        result = handler.get_tag_order_by(tag)
        expression = result.expression

        self.assertIsInstance(result, OrderBy)
        self.assertEqual(expression.sql, 'pod_labels -> %s')
        self.assertEqual(expression.params, expected_param)

    def test_filter_by_infrastructure_ocp_on_aws(self):
        """Test that filter by infrastructure for ocp on aws."""
        data_generator = OCPAWSReportDataGenerator(self.tenant,
                                                   current_month_only=True)
        data_generator.add_data_to_tenant()

        # '?filter[resolution]=monthly&filter[time_scope_value]=-1&filter[time_scope_units]=month&filter[infrastructures]=aws'
        params = {
            'filter': {
                'resolution': 'monthly',
                'time_scope_value': -1,
                'time_scope_units': 'month',
                'infrastructures': ['aws']
            }
        }
        query_params = FakeQueryParameters(params, tenant=self.tenant)
        handler = OCPReportQueryHandler(query_params.mock_qp)
        query_data = handler.execute_query()

        for entry in query_data.get('data', []):
            for value in entry.get('values', []):
                self.assertIsNotNone(value.get('usage').get('value'))
                self.assertIsNotNone(value.get('request').get('value'))
        data_generator.remove_data_from_tenant()

    def test_filter_by_infrastructure_ocp(self):
        """Test that filter by infrastructure for ocp not on aws."""
        data_generator = OCPReportDataGenerator(self.tenant,
                                                current_month_only=True)
        data_generator.add_data_to_tenant()

        # '?filter[resolution]=monthly&filter[time_scope_value]=-1&filter[time_scope_units]=month&filter[infrastructures]=aws'
        params = {
            'filter': {
                'resolution': 'monthly',
                'time_scope_value': -1,
                'time_scope_units': 'month',
                'infrastructures': ['AWS']
            }
        }
        query_params = FakeQueryParameters(params, tenant=self.tenant)
        handler = OCPReportQueryHandler(query_params.mock_qp)
        query_data = handler.execute_query()

        for entry in query_data.get('data', []):
            for value in entry.get('values', []):
                self.assertEqual(value.get('usage').get('value'), 0)
                self.assertEqual(value.get('request').get('value'), 0)
        data_generator.remove_data_from_tenant()

    def test_order_by_null_values(self):
        """Test that order_by returns properly sorted data with null data."""
        # '?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly'
        params = {
            'filter': {
                'resolution': 'monthly',
                'time_scope_value': -1,
                'time_scope_units': 'month'
            }
        }
        query_params = FakeQueryParameters(params)
        handler = OCPReportQueryHandler(query_params.mock_qp)

        unordered_data = [{
            'node': None,
            'cluster': 'cluster-1'
        }, {
            'node': 'alpha',
            'cluster': 'cluster-2'
        }, {
            'node': 'bravo',
            'cluster': 'cluster-3'
        }, {
            'node': 'oscar',
            'cluster': 'cluster-4'
        }]

        order_fields = ['node']
        expected = [{
            'node': 'alpha',
            'cluster': 'cluster-2'
        }, {
            'node': 'bravo',
            'cluster': 'cluster-3'
        }, {
            'node': 'no-node',
            'cluster': 'cluster-1'
        }, {
            'node': 'oscar',
            'cluster': 'cluster-4'
        }]
        ordered_data = handler.order_by(unordered_data, order_fields)
        self.assertEqual(ordered_data, expected)

    def test_ocp_cpu_query_group_by_cluster(self):
        """Test that group by cluster includes cluster and cluster_alias."""
        for _ in range(1, 5):
            OCPReportDataGenerator(self.tenant).add_data_to_tenant()

        # '?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&group_by[cluster]=*'
        params = {
            'filter': {
                'resolution': 'monthly',
                'time_scope_value': -1,
                'time_scope_units': 'month',
                'limit': 3
            },
            'group_by': {
                'cluster': ['*']
            }
        }
        query_params = FakeQueryParameters(params,
                                           report_type='cpu',
                                           tenant=self.tenant)
        handler = OCPReportQueryHandler(query_params.mock_qp)

        query_data = handler.execute_query()
        for data in query_data.get('data'):
            self.assertIn('clusters', data)
            for cluster_data in data.get('clusters'):
                self.assertIn('cluster', cluster_data)
                self.assertIn('values', cluster_data)
                for cluster_value in cluster_data.get('values'):
                    self.assertIn('cluster', cluster_value)
                    self.assertIn('cluster_alias', cluster_value)
                    self.assertIsNotNone('cluster', cluster_value)
                    self.assertIsNotNone('cluster_alias', cluster_value)
Esempio n. 19
0
class OCPAWSQueryHandlerTest(IamTestCase):
    """Tests for the OCP report query handler."""
    def setUp(self):
        """Set up the customer view tests."""
        super().setUp()
        self.dh = DateHelper()
        _, self.provider = create_generic_provider('OCP', self.headers)

        self.this_month_filter = {'usage_start__gte': self.dh.this_month_start}
        self.ten_day_filter = {
            'usage_start__gte': self.dh.n_days_ago(self.dh.today, 9)
        }
        self.thirty_day_filter = {
            'usage_start__gte': self.dh.n_days_ago(self.dh.today, 29)
        }
        self.last_month_filter = {
            'usage_start__gte': self.dh.last_month_start,
            'usage_end__lte': self.dh.last_month_end,
        }
        OCPAWSReportDataGenerator(self.tenant,
                                  self.provider).add_data_to_tenant()

    def get_totals_by_time_scope(self, aggregates, filters=None):
        """Return the total aggregates for a time period."""
        if filters is None:
            filters = self.ten_day_filter
        with tenant_context(self.tenant):
            return OCPAWSCostLineItemDailySummary.objects.filter(
                **filters).aggregate(**aggregates)

    def test_execute_sum_query_storage(self):
        """Test that the sum query runs properly."""
        url = '?'
        query_params = self.mocked_query_params(url, OCPAWSStorageView)
        handler = OCPAWSReportQueryHandler(query_params)
        filt = {'product_family__contains': 'Storage'}
        filt.update(self.ten_day_filter)
        aggregates = handler._mapper.report_type_map.get('aggregates')
        current_totals = self.get_totals_by_time_scope(aggregates, filt)
        query_output = handler.execute_query()
        self.assertIsNotNone(query_output.get('data'))
        self.assertIsNotNone(query_output.get('total'))
        total = query_output.get('total')
        self.assertEqual(total.get('total'), current_totals.get('total'))

    def test_execute_query_current_month_daily(self):
        """Test execute_query for current month on daily breakdown."""
        url = '?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=daily'
        query_params = self.mocked_query_params(url, OCPAWSCostView)
        handler = OCPAWSReportQueryHandler(query_params)
        query_output = handler.execute_query()
        self.assertIsNotNone(query_output.get('data'))
        self.assertIsNotNone(query_output.get('total'))
        total = query_output.get('total')
        self.assertIsNotNone(total.get('cost'))

        aggregates = handler._mapper.report_type_map.get('aggregates')
        current_totals = self.get_totals_by_time_scope(aggregates,
                                                       self.this_month_filter)
        self.assertEqual(
            total.get('cost', {}).get('value'), current_totals.get('cost'))

    def test_execute_query_current_month_monthly(self):
        """Test execute_query for current month on monthly breakdown."""
        url = '?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=daily'
        query_params = self.mocked_query_params(url, OCPAWSCostView)
        handler = OCPAWSReportQueryHandler(query_params)
        query_output = handler.execute_query()
        self.assertIsNotNone(query_output.get('data'))
        self.assertIsNotNone(query_output.get('total'))
        total = query_output.get('total')
        self.assertIsNotNone(total.get('cost'))

        aggregates = handler._mapper.report_type_map.get('aggregates')
        current_totals = self.get_totals_by_time_scope(aggregates,
                                                       self.this_month_filter)
        self.assertEqual(
            total.get('cost', {}).get('value'), current_totals.get('cost'))

    def test_execute_query_current_month_by_service(self):
        """Test execute_query for current month on monthly breakdown by service."""
        url = '?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&group_by[service]=*'  # noqa: E501
        query_params = self.mocked_query_params(url, OCPAWSCostView)
        handler = OCPAWSReportQueryHandler(query_params)
        query_output = handler.execute_query()
        data = query_output.get('data')
        self.assertIsNotNone(data)
        self.assertIsNotNone(query_output.get('total'))
        total = query_output.get('total')
        self.assertIsNotNone(total.get('cost'))

        aggregates = handler._mapper.report_type_map.get('aggregates')
        current_totals = self.get_totals_by_time_scope(aggregates,
                                                       self.this_month_filter)
        self.assertEqual(
            total.get('cost', {}).get('value'), current_totals.get('cost'))

        cmonth_str = DateHelper().this_month_start.strftime('%Y-%m')
        for data_item in data:
            month_val = data_item.get('date')
            month_data = data_item.get('services')
            self.assertEqual(month_val, cmonth_str)
            self.assertIsInstance(month_data, list)
            for month_item in month_data:
                compute = month_item.get('service')
                self.assertEqual(compute, 'AmazonEC2')
                self.assertIsInstance(month_item.get('values'), list)

    def test_execute_query_by_filtered_service(self):
        """Test execute_query monthly breakdown by filtered service."""
        url = '?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&group_by[service]=AmazonEC2'  # noqa: E501
        query_params = self.mocked_query_params(url, OCPAWSCostView)
        handler = OCPAWSReportQueryHandler(query_params)
        query_output = handler.execute_query()
        data = query_output.get('data')
        self.assertIsNotNone(data)
        self.assertIsNotNone(query_output.get('total'))
        total = query_output.get('total')
        self.assertIsNotNone(total.get('cost'))

        aggregates = handler._mapper.report_type_map.get('aggregates')
        current_totals = self.get_totals_by_time_scope(aggregates,
                                                       self.this_month_filter)
        self.assertEqual(
            total.get('cost', {}).get('value'), current_totals.get('cost'))

        cmonth_str = DateHelper().this_month_start.strftime('%Y-%m')
        for data_item in data:
            month_val = data_item.get('date')
            month_data = data_item.get('services')
            self.assertEqual(month_val, cmonth_str)
            self.assertIsInstance(month_data, list)
            for month_item in month_data:
                compute = month_item.get('service')
                self.assertEqual(compute, 'AmazonEC2')
                self.assertIsInstance(month_item.get('values'), list)

    def test_query_by_partial_filtered_service(self):
        """Test execute_query monthly breakdown by filtered service."""
        url = '?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&group_by[service]=eC2'  # noqa: E501
        query_params = self.mocked_query_params(url, OCPAWSCostView)
        handler = OCPAWSReportQueryHandler(query_params)
        query_output = handler.execute_query()
        data = query_output.get('data')
        self.assertIsNotNone(data)
        self.assertIsNotNone(query_output.get('total'))
        total = query_output.get('total')
        self.assertIsNotNone(total.get('cost'))

        aggregates = handler._mapper.report_type_map.get('aggregates')
        current_totals = self.get_totals_by_time_scope(aggregates,
                                                       self.this_month_filter)
        self.assertEqual(
            total.get('cost', {}).get('value'), current_totals.get('cost'))

        cmonth_str = DateHelper().this_month_start.strftime('%Y-%m')
        for data_item in data:
            month_val = data_item.get('date')
            month_data = data_item.get('services')
            self.assertEqual(month_val, cmonth_str)
            self.assertIsInstance(month_data, list)
            for month_item in month_data:
                compute = month_item.get('service')
                self.assertEqual(compute, 'AmazonEC2')
                self.assertIsInstance(month_item.get('values'), list)

    def test_execute_query_current_month_by_account(self):
        """Test execute_query for current month on monthly breakdown by account."""
        url = '?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&group_by[account]=*'  # noqa: E501
        query_params = self.mocked_query_params(url, OCPAWSCostView)
        handler = OCPAWSReportQueryHandler(query_params)
        query_output = handler.execute_query()
        data = query_output.get('data')
        self.assertIsNotNone(data)
        self.assertIsNotNone(query_output.get('total'))
        total = query_output.get('total')
        self.assertIsNotNone(total.get('cost'))

        aggregates = handler._mapper.report_type_map.get('aggregates')
        current_totals = self.get_totals_by_time_scope(aggregates,
                                                       self.this_month_filter)
        self.assertEqual(
            total.get('cost', {}).get('value'), current_totals.get('cost'))

        cmonth_str = DateHelper().this_month_start.strftime('%Y-%m')
        for data_item in data:
            month_val = data_item.get('date')
            month_data = data_item.get('accounts')
            self.assertEqual(month_val, cmonth_str)
            self.assertIsInstance(month_data, list)
            for month_item in month_data:
                self.assertIsInstance(month_item.get('values'), list)

    def test_execute_query_by_account_by_service(self):
        """Test execute_query for current month breakdown by account by service."""
        url = '?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&group_by[account]=*&group_by[service]=*'  # noqa: E501
        query_params = self.mocked_query_params(url, OCPAWSCostView)
        handler = OCPAWSReportQueryHandler(query_params)
        query_output = handler.execute_query()
        data = query_output.get('data')
        self.assertIsNotNone(data)
        self.assertIsNotNone(query_output.get('total'))
        total = query_output.get('total')
        self.assertIsNotNone(total.get('cost'))

        aggregates = handler._mapper.report_type_map.get('aggregates')
        current_totals = self.get_totals_by_time_scope(aggregates,
                                                       self.this_month_filter)
        self.assertEqual(
            total.get('cost', {}).get('value'), current_totals.get('cost'))

        cmonth_str = DateHelper().this_month_start.strftime('%Y-%m')
        for data_item in data:
            month_val = data_item.get('date')
            month_data = data_item.get('accounts')
            self.assertEqual(month_val, cmonth_str)
            self.assertIsInstance(month_data, list)
            for month_item in month_data:
                self.assertIsInstance(month_item.get('services'), list)
Esempio n. 20
0
class OCPReportQueryHandlerTest(IamTestCase):
    """Tests for the OCP report query handler."""
    def setUp(self):
        """Set up the customer view tests."""
        super().setUp()
        self.dh = DateHelper()

        self.this_month_filter = {"usage_start__gte": self.dh.this_month_start}
        self.ten_day_filter = {
            "usage_start__gte": self.dh.n_days_ago(self.dh.today, 9)
        }
        self.thirty_day_filter = {
            "usage_start__gte": self.dh.n_days_ago(self.dh.today, 29)
        }
        self.last_month_filter = {
            "usage_start__gte": self.dh.last_month_start,
            "usage_end__lte": self.dh.last_month_end,
        }

    def get_totals_by_time_scope(self, aggregates, filters=None):
        """Return the total aggregates for a time period."""
        if filters is None:
            filters = self.ten_day_filter
        with tenant_context(self.tenant):
            return OCPUsageLineItemDailySummary.objects.filter(
                **filters).aggregate(**aggregates)

    def get_totals_costs_by_time_scope(self, aggregates, filters=None):
        """Return the total costs aggregates for a time period."""
        if filters is None:
            filters = self.this_month_filter
        with tenant_context(self.tenant):
            return OCPUsageLineItemDailySummary.objects.filter(
                **filters).aggregate(**aggregates)

    def test_execute_sum_query(self):
        """Test that the sum query runs properly."""
        url = "?"
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        aggregates = handler._mapper.report_type_map.get("aggregates")
        current_totals = self.get_totals_by_time_scope(aggregates)
        query_output = handler.execute_query()
        self.assertIsNotNone(query_output.get("data"))
        self.assertIsNotNone(query_output.get("total"))
        total = query_output.get("total")

        self.assertEqual(
            total.get("usage", {}).get("value"), current_totals.get("usage"))
        self.assertEqual(
            total.get("request", {}).get("value"),
            current_totals.get("request"))
        self.assertEqual(
            total.get("cost", {}).get("value"), current_totals.get("cost"))
        self.assertEqual(
            total.get("limit", {}).get("value"), current_totals.get("limit"))

    def test_execute_sum_query_costs(self):
        """Test that the sum query runs properly for the costs endpoint."""
        url = "?"
        query_params = self.mocked_query_params(url, OCPCostView)
        handler = OCPReportQueryHandler(query_params)
        aggregates = handler._mapper.report_type_map.get("aggregates")
        current_totals = self.get_totals_costs_by_time_scope(
            aggregates, self.ten_day_filter)
        expected_cost_total = current_totals.get("cost_total")
        self.assertIsNotNone(expected_cost_total)
        query_output = handler.execute_query()
        self.assertIsNotNone(query_output.get("data"))
        self.assertIsNotNone(query_output.get("total"))
        total = query_output.get("total")
        result_cost_total = total.get("cost", {}).get("total", {}).get("value")
        self.assertIsNotNone(result_cost_total)
        self.assertEqual(result_cost_total, expected_cost_total)

    def test_get_cluster_capacity_monthly_resolution(self):
        """Test that cluster capacity returns a full month's capacity."""
        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly"
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        query_data = [{"row": 1}]
        query_data, total_capacity = handler.get_cluster_capacity(query_data)
        self.assertTrue("capacity" in total_capacity)
        self.assertTrue(isinstance(total_capacity["capacity"], Decimal))
        self.assertTrue("capacity" in query_data[0])
        self.assertIsNotNone(query_data[0].get("capacity"))
        self.assertIsNotNone(total_capacity.get("capacity"))
        self.assertEqual(query_data[0].get("capacity"),
                         total_capacity.get("capacity"))

    def test_get_cluster_capacity_monthly_resolution_group_by_cluster(self):
        """Test that cluster capacity returns capacity by cluster."""
        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&group_by[cluster]=*"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        query_data = handler.execute_query()

        capacity_by_cluster = defaultdict(Decimal)
        total_capacity = Decimal(0)
        query_filter = handler.query_filter
        query_group_by = ["usage_start", "cluster_id"]
        annotations = {"capacity": Max("cluster_capacity_cpu_core_hours")}
        cap_key = list(annotations.keys())[0]

        q_table = handler._mapper.provider_map.get("tables").get("query")
        query = q_table.objects.filter(query_filter)

        with tenant_context(self.tenant):
            cap_data = query.values(*query_group_by).annotate(**annotations)
            for entry in cap_data:
                cluster_id = entry.get("cluster_id", "")
                capacity_by_cluster[cluster_id] += entry.get(cap_key, 0)
                total_capacity += entry.get(cap_key, 0)

        for entry in query_data.get("data", []):
            for cluster in entry.get("clusters", []):
                cluster_name = cluster.get("cluster", "")
                capacity = cluster.get("values")[0].get("capacity",
                                                        {}).get("value")
                self.assertEqual(capacity, capacity_by_cluster[cluster_name])

        self.assertEqual(
            query_data.get("total", {}).get("capacity", {}).get("value"),
            total_capacity)

    def test_get_cluster_capacity_daily_resolution(self):
        """Test that total capacity is returned daily resolution."""
        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=daily"
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        query_data = handler.execute_query()

        daily_capacity = defaultdict(Decimal)
        total_capacity = Decimal(0)
        query_filter = handler.query_filter
        query_group_by = ["usage_start", "cluster_id"]
        annotations = {"capacity": Max("cluster_capacity_cpu_core_hours")}
        cap_key = list(annotations.keys())[0]

        q_table = handler._mapper.provider_map.get("tables").get("query")
        query = q_table.objects.filter(query_filter)

        with tenant_context(self.tenant):
            cap_data = query.values(*query_group_by).annotate(**annotations)
            for entry in cap_data:
                date = handler.date_to_string(entry.get("usage_start"))
                daily_capacity[date] += entry.get(cap_key, 0)
            cap_data = query.values(*query_group_by).annotate(**annotations)
            for entry in cap_data:
                total_capacity += entry.get(cap_key, 0)

        self.assertEqual(
            query_data.get("total", {}).get("capacity", {}).get("value"),
            total_capacity)
        for entry in query_data.get("data", []):
            date = entry.get("date")
            values = entry.get("values")
            if values:
                capacity = values[0].get("capacity", {}).get("value")
                self.assertEqual(capacity, daily_capacity[date])

    def test_get_cluster_capacity_daily_resolution_group_by_clusters(self):
        """Test that cluster capacity returns daily capacity by cluster."""
        url = (
            "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=daily&group_by[cluster]=*"
        )
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        query_data = handler.execute_query()

        daily_capacity_by_cluster = defaultdict(dict)
        total_capacity = Decimal(0)
        query_filter = handler.query_filter
        query_group_by = ["usage_start", "cluster_id"]
        annotations = {"capacity": Max("cluster_capacity_cpu_core_hours")}
        cap_key = list(annotations.keys())[0]

        q_table = handler._mapper.query_table
        query = q_table.objects.filter(query_filter)

        with tenant_context(self.tenant):
            cap_data = query.values(*query_group_by).annotate(**annotations)
            for entry in cap_data:
                date = handler.date_to_string(entry.get("usage_start"))
                cluster_id = entry.get("cluster_id", "")
                if cluster_id in daily_capacity_by_cluster[date]:
                    daily_capacity_by_cluster[date][cluster_id] += entry.get(
                        cap_key, 0)
                else:
                    daily_capacity_by_cluster[date][cluster_id] = entry.get(
                        cap_key, 0)
                total_capacity += entry.get(cap_key, 0)

        for entry in query_data.get("data", []):
            date = entry.get("date")
            for cluster in entry.get("clusters", []):
                cluster_name = cluster.get("cluster", "")
                capacity = cluster.get("values")[0].get("capacity",
                                                        {}).get("value")
                self.assertEqual(capacity,
                                 daily_capacity_by_cluster[date][cluster_name])

        self.assertEqual(
            query_data.get("total", {}).get("capacity", {}).get("value"),
            total_capacity)

    @patch("api.report.ocp.query_handler.ReportQueryHandler.add_deltas")
    @patch(
        "api.report.ocp.query_handler.OCPReportQueryHandler.add_current_month_deltas"
    )
    def test_add_deltas_current_month(self, mock_current_deltas, mock_deltas):
        """Test that the current month method is called for deltas."""
        url = "?"
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        handler._delta = "usage__request"
        handler.add_deltas([], [])
        mock_current_deltas.assert_called()
        mock_deltas.assert_not_called()

    @patch("api.report.ocp.query_handler.ReportQueryHandler.add_deltas")
    @patch(
        "api.report.ocp.query_handler.OCPReportQueryHandler.add_current_month_deltas"
    )
    def test_add_deltas_super_delta(self, mock_current_deltas, mock_deltas):
        """Test that the super delta method is called for deltas."""
        url = "?"
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        handler._delta = "usage"

        handler.add_deltas([], [])

        mock_current_deltas.assert_not_called()
        mock_deltas.assert_called()

    def test_add_current_month_deltas(self):
        """Test that current month deltas are calculated."""
        url = "?"
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        handler._delta = "usage__request"

        q_table = handler._mapper.provider_map.get("tables").get("query")
        with tenant_context(self.tenant):
            query = q_table.objects.filter(handler.query_filter)
            query_data = query.annotate(**handler.annotations)
            group_by_value = handler._get_group_by()
            query_group_by = ["date"] + group_by_value
            query_order_by = ("-date", )
            query_order_by += (handler.order, )

            annotations = handler.report_annotations
            query_data = query_data.values(*query_group_by).annotate(
                **annotations)

            aggregates = handler._mapper.report_type_map.get("aggregates")
            metric_sum = query.aggregate(**aggregates)
            query_sum = {key: metric_sum.get(key) for key in aggregates}

            result = handler.add_current_month_deltas(query_data, query_sum)

            delta_field_one, delta_field_two = handler._delta.split("__")
            field_one_total = Decimal(0)
            field_two_total = Decimal(0)
            for entry in result:
                field_one_total += entry.get(delta_field_one, 0)
                field_two_total += entry.get(delta_field_two, 0)
                delta_percent = entry.get("delta_percent")
                expected = ((entry.get(delta_field_one, 0) /
                             entry.get(delta_field_two, 0) *
                             100) if entry.get(delta_field_two) else 0)
                self.assertEqual(delta_percent, expected)

            expected_total = field_one_total / field_two_total * 100 if field_two_total != 0 else 0

            self.assertEqual(handler.query_delta.get("percent"),
                             expected_total)

    def test_add_current_month_deltas_no_previous_data_w_query_data(self):
        """Test that current month deltas are calculated with no previous data for field two."""
        url = "?filter[time_scope_value]=-1&filter[resolution]=monthly&filter[limit]=1"
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        handler._delta = "usage__foo"

        q_table = handler._mapper.provider_map.get("tables").get("query")
        with tenant_context(self.tenant):
            query = q_table.objects.filter(handler.query_filter)
            query_data = query.annotate(**handler.annotations)
            group_by_value = handler._get_group_by()
            query_group_by = ["date"] + group_by_value
            query_order_by = ("-date", )
            query_order_by += (handler.order, )

            annotations = annotations = handler.report_annotations
            query_data = query_data.values(*query_group_by).annotate(
                **annotations)

            aggregates = handler._mapper.report_type_map.get("aggregates")
            metric_sum = query.aggregate(**aggregates)
            query_sum = {
                key: metric_sum.get(key) if metric_sum.get(key) else Decimal(0)
                for key in aggregates
            }

            result = handler.add_current_month_deltas(query_data, query_sum)

            self.assertEqual(result, query_data)
            self.assertIsNotNone(handler.query_delta["value"])
            self.assertIsNone(handler.query_delta["percent"])

    def test_get_tag_filter_keys(self):
        """Test that filter params with tag keys are returned."""
        url = "?"
        query_params = self.mocked_query_params(url, OCPTagView)
        handler = OCPTagQueryHandler(query_params)
        tag_keys = handler.get_tag_keys(filters=False)

        url = f"?filter[tag:{tag_keys[0]}]=*"
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        results = handler.get_tag_filter_keys()
        self.assertEqual(results, ["tag:" + tag_keys[0]])

    def test_get_tag_group_by_keys(self):
        """Test that group_by params with tag keys are returned."""
        url = "?"
        query_params = self.mocked_query_params(url, OCPTagView)
        handler = OCPTagQueryHandler(query_params)
        tag_keys = handler.get_tag_keys(filters=False)
        group_by_key = tag_keys[0]

        url = f"?group_by[tag:{group_by_key}]=*"
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        results = handler.get_tag_group_by_keys()
        self.assertEqual(results, ["tag:" + group_by_key])

    def test_set_tag_filters(self):
        """Test that tag filters are created properly."""
        filters = QueryFilterCollection()

        url = "?"
        query_params = self.mocked_query_params(url, OCPTagView)
        handler = OCPTagQueryHandler(query_params)
        tag_keys = handler.get_tag_keys(filters=False)

        filter_key = tag_keys[0]

        filter_value = "filter"
        group_by_key = tag_keys[1]

        group_by_value = "group_By"

        url = f"?filter[tag:{filter_key}]={filter_value}&group_by[tag:{group_by_key}]={group_by_value}"
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        filters = handler._set_tag_filters(filters)

        expected = f"""<class 'api.query_filter.QueryFilterCollection'>: (AND: ('pod_labels__{filter_key}__icontains', '{filter_value}')), (AND: ('pod_labels__{group_by_key}__icontains', '{group_by_value}')), """  # noqa: E501

        self.assertEqual(repr(filters), expected)

    def test_get_tag_group_by(self):
        """Test that tag based group bys work."""
        url = "?"
        query_params = self.mocked_query_params(url, OCPTagView)
        handler = OCPTagQueryHandler(query_params)
        tag_keys = handler.get_tag_keys(filters=False)

        group_by_key = tag_keys[0]
        group_by_value = "group_by"
        url = f"?group_by[tag:{group_by_key}]={group_by_value}"
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        group_by = handler._get_tag_group_by()
        group = group_by[0]
        expected = "pod_labels__" + group_by_key
        self.assertEqual(len(group_by), 1)
        self.assertEqual(group[0], expected)

    def test_get_tag_order_by(self):
        """Verify that a propery order by is returned."""
        tag = "pod_labels__key"
        expected_param = (tag.split("__")[1], )

        url = "?"
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        result = handler.get_tag_order_by(tag)
        expression = result.expression

        self.assertIsInstance(result, OrderBy)
        self.assertEqual(expression.sql, "pod_labels -> %s")
        self.assertEqual(expression.params, expected_param)

    def test_filter_by_infrastructure_ocp_on_aws(self):
        """Test that filter by infrastructure for ocp on aws."""
        url = "?filter[resolution]=monthly&filter[time_scope_value]=-1&filter[time_scope_units]=month&filter[infrastructures]=aws"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        query_data = handler.execute_query()

        self.assertTrue(
            query_data.get("data"))  # check that returned list is not empty
        for entry in query_data.get("data"):
            self.assertTrue(entry.get("values"))
            for value in entry.get("values"):
                self.assertIsNotNone(value.get("usage").get("value"))
                self.assertIsNotNone(value.get("request").get("value"))

    def test_filter_by_infrastructure_ocp_on_azure(self):
        """Test that filter by infrastructure for ocp on azure."""
        url = "?filter[resolution]=monthly&filter[time_scope_value]=-1&filter[time_scope_units]=month&filter[infrastructures]=azure"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        query_data = handler.execute_query()

        self.assertTrue(
            query_data.get("data"))  # check that returned list is not empty
        for entry in query_data.get("data"):
            self.assertTrue(entry.get("values"))
            for value in entry.get("values"):
                self.assertIsNotNone(value.get("usage").get("value"))
                self.assertIsNotNone(value.get("request").get("value"))

    def test_filter_by_infrastructure_ocp(self):
        """Test that filter by infrastructure for ocp not on aws."""

        url = "?filter[resolution]=monthly&filter[time_scope_value]=-1&filter[time_scope_units]=month&filter[cluster]=OCP-On-Azure&filter[infrastructures]=aws"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        query_data = handler.execute_query()

        self.assertTrue(
            query_data.get("data"))  # check that returned list is not empty
        for entry in query_data.get("data"):
            for value in entry.get("values"):
                self.assertEqual(value.get("usage").get("value"), 0)
                self.assertEqual(value.get("request").get("value"), 0)

    def test_order_by_null_values(self):
        """Test that order_by returns properly sorted data with null data."""
        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)

        unordered_data = [
            {
                "node": None,
                "cluster": "cluster-1"
            },
            {
                "node": "alpha",
                "cluster": "cluster-2"
            },
            {
                "node": "bravo",
                "cluster": "cluster-3"
            },
            {
                "node": "oscar",
                "cluster": "cluster-4"
            },
        ]

        order_fields = ["node"]
        expected = [
            {
                "node": "alpha",
                "cluster": "cluster-2"
            },
            {
                "node": "bravo",
                "cluster": "cluster-3"
            },
            {
                "node": "no-node",
                "cluster": "cluster-1"
            },
            {
                "node": "oscar",
                "cluster": "cluster-4"
            },
        ]
        ordered_data = handler.order_by(unordered_data, order_fields)
        self.assertEqual(ordered_data, expected)

    def test_ocp_cpu_query_group_by_cluster(self):
        """Test that group by cluster includes cluster and cluster_alias."""
        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&filter[limit]=3&group_by[cluster]=*"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)

        query_data = handler.execute_query()
        for data in query_data.get("data"):
            self.assertIn("clusters", data)
            for cluster_data in data.get("clusters"):
                self.assertIn("cluster", cluster_data)
                self.assertIn("values", cluster_data)
                for cluster_value in cluster_data.get("values"):
                    # cluster_value is a dictionary
                    self.assertIn("cluster", cluster_value.keys())
                    self.assertIn("clusters", cluster_value.keys())
                    self.assertIsNotNone(cluster_value["cluster"])
                    self.assertIsNotNone(cluster_value["clusters"])

    def test_subtotals_add_up_to_total(self):
        """Test the apply_group_by handles different grouping scenerios."""
        group_by_list = [
            ("project", "cluster", "node"),
            ("project", "node", "cluster"),
            ("cluster", "project", "node"),
            ("cluster", "node", "project"),
            ("node", "cluster", "project"),
            ("node", "project", "cluster"),
        ]
        base_url = (
            "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&filter[limit]=3"
        )  # noqa: E501
        tolerance = 1
        for group_by in group_by_list:
            sub_url = "&group_by[%s]=*&group_by[%s]=*&group_by[%s]=*" % group_by
            url = base_url + sub_url
            query_params = self.mocked_query_params(url, OCPCpuView)
            handler = OCPReportQueryHandler(query_params)
            query_data = handler.execute_query()
            the_sum = handler.query_sum
            data = query_data["data"][0]
            result_cost, result_infra, result_sup = _calculate_subtotals(
                data, [], [], [])
            test_dict = {
                "cost": {
                    "expected":
                    the_sum.get("cost", {}).get("total", {}).get("value"),
                    "result":
                    sum(result_cost),
                },
                "infra": {
                    "expected":
                    the_sum.get("infrastructure", {}).get("total",
                                                          {}).get("value"),
                    "result":
                    sum(result_infra),
                },
                "sup": {
                    "expected":
                    the_sum.get("supplementary", {}).get("total",
                                                         {}).get("value"),
                    "result":
                    sum(result_sup),
                },
            }
            for _, data in test_dict.items():
                expected = data["expected"]
                result = data["result"]
                self.assertIsNotNone(expected)
                self.assertIsNotNone(result)
                self.assertLessEqual(abs(expected - result), tolerance)

    def test_source_uuid_mapping(self):  # noqa: C901
        """Test source_uuid is mapped to the correct source."""
        endpoints = [OCPCostView, OCPCpuView, OCPVolumeView, OCPMemoryView]
        with tenant_context(self.tenant):
            expected_source_uuids = list(
                OCPUsageReportPeriod.objects.all().values_list("provider_id",
                                                               flat=True))
        source_uuid_list = []
        for endpoint in endpoints:
            urls = ["?", "?group_by[project]=*"]
            if endpoint == OCPCostView:
                urls.append("?group_by[node]=*")
            for url in urls:
                query_params = self.mocked_query_params(url, endpoint)
                handler = OCPReportQueryHandler(query_params)
                query_output = handler.execute_query()
                for dictionary in query_output.get("data"):
                    for _, value in dictionary.items():
                        if isinstance(value, list):
                            for item in value:
                                if isinstance(item, dict):
                                    if "values" in item.keys():
                                        self.assertEqual(
                                            len(item["values"]), 1)
                                        value = item["values"][0]
                                        source_uuid_list.extend(
                                            value.get("source_uuid"))
        self.assertNotEquals(source_uuid_list, [])
        for source_uuid in source_uuid_list:
            self.assertIn(source_uuid, expected_source_uuids)
class OCPAzureQueryHandlerTest(IamTestCase):
    """Tests for the OCP report query handler."""

    def setUp(self):
        """Set up the customer view tests."""
        super().setUp()
        self.dh = DateHelper()
        _, self.provider = create_generic_provider(Provider.PROVIDER_OCP, self.headers)

        self.this_month_filter = {"usage_start__gte": self.dh.this_month_start}
        self.ten_day_filter = {"usage_start__gte": self.dh.n_days_ago(self.dh.today, 9)}
        self.thirty_day_filter = {"usage_start__gte": self.dh.n_days_ago(self.dh.today, 29)}
        self.last_month_filter = {
            "usage_start__gte": self.dh.last_month_start,
            "usage_end__lte": self.dh.last_month_end,
        }
        self.generator = OCPAzureReportDataGenerator(self.tenant, self.provider)

    def get_totals_by_time_scope(self, aggregates, filters=None):
        """Return the total aggregates for a time period."""
        if filters is None:
            filters = self.ten_day_filter
        with tenant_context(self.tenant):
            return OCPAzureCostLineItemDailySummary.objects.filter(**filters).aggregate(**aggregates)

    def test_execute_sum_query_storage(self):
        """Test that the sum query runs properly."""
        self.generator.add_data_to_tenant(service_name="Storage")
        url = "?"
        query_params = self.mocked_query_params(url, OCPAzureStorageView)
        handler = OCPAzureReportQueryHandler(query_params)
        query_output = handler.execute_query()
        self.assertIsNotNone(query_output.get("data"))
        self.assertIsNotNone(query_output.get("total"))

        aggregates = handler._mapper.report_type_map.get("aggregates")
        filt = {"service_name__contains": "Storage"}
        filt.update(self.ten_day_filter)
        current_totals = self.get_totals_by_time_scope(aggregates, filt)
        total = query_output.get("total")
        self.assertEqual(total.get("cost", {}).get("value", 0), current_totals.get("cost", 1))

    def test_execute_sum_query_instance_types(self):
        """Test that the sum query runs properly."""
        self.generator.add_data_to_tenant()
        url = "?"
        query_params = self.mocked_query_params(url, OCPAzureInstanceTypeView)
        handler = OCPAzureReportQueryHandler(query_params)
        query_output = handler.execute_query()
        self.assertIsNotNone(query_output.get("data"))
        self.assertIsNotNone(query_output.get("total"))

        aggregates = handler._mapper.report_type_map.get("aggregates")
        current_totals = self.get_totals_by_time_scope(aggregates, self.ten_day_filter)
        total = query_output.get("total")
        self.assertEqual(total.get("cost", {}).get("value", 0), current_totals.get("cost", 1))

    def test_execute_query_current_month_daily(self):
        """Test execute_query for current month on daily breakdown."""
        self.generator.add_data_to_tenant()
        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=daily"
        query_params = self.mocked_query_params(url, OCPAzureCostView)
        handler = OCPAzureReportQueryHandler(query_params)
        query_output = handler.execute_query()
        self.assertIsNotNone(query_output.get("data"))
        self.assertIsNotNone(query_output.get("total"))
        total = query_output.get("total")
        self.assertIsNotNone(total.get("cost"))

        aggregates = handler._mapper.report_type_map.get("aggregates")
        current_totals = self.get_totals_by_time_scope(aggregates, self.this_month_filter)
        self.assertEqual(total.get("cost", {}).get("value", 0), current_totals.get("cost", 1))

    def test_execute_query_current_month_by_account(self):
        """Test execute_query for current month on monthly breakdown by account."""
        self.generator.add_data_to_tenant()
        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&group_by[subscription_guid]=*"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPAzureCostView)
        handler = OCPAzureReportQueryHandler(query_params)
        query_output = handler.execute_query()
        data = query_output.get("data")
        self.assertIsNotNone(data)
        self.assertIsNotNone(query_output.get("total"))
        total = query_output.get("total")
        self.assertIsNotNone(total.get("cost"))

        aggregates = handler._mapper.report_type_map.get("aggregates")
        current_totals = self.get_totals_by_time_scope(aggregates, self.this_month_filter)
        self.assertEqual(total.get("cost", {}).get("value", 0), current_totals.get("cost", 1))

        cmonth_str = DateHelper().this_month_start.strftime("%Y-%m")
        for data_item in data:
            month_val = data_item.get("date", "not-a-date")
            month_data = data_item.get("subscription_guids")
            self.assertEqual(month_val, cmonth_str)
            self.assertIsInstance(month_data, list)
            for month_item in month_data:
                self.assertIsInstance(month_item.get("values"), list)

    def test_execute_query_current_month_by_service(self):
        """Test execute_query for current month on monthly breakdown by service."""
        self.generator.add_data_to_tenant()

        valid_services = list(AZURE_SERVICES.keys())
        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&group_by[service_name]=*"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPAzureCostView)
        handler = OCPAzureReportQueryHandler(query_params)
        query_output = handler.execute_query()
        data = query_output.get("data")
        self.assertIsNotNone(data)
        self.assertIsNotNone(query_output.get("total"))
        total = query_output.get("total")
        aggregates = handler._mapper.report_type_map.get("aggregates")
        current_totals = self.get_totals_by_time_scope(aggregates, self.this_month_filter)
        self.assertIsNotNone(total.get("cost"))
        self.assertEqual(total.get("cost", {}).get("value", 0), current_totals.get("cost", 1))

        cmonth_str = self.dh.this_month_start.strftime("%Y-%m")
        for data_item in data:
            month_val = data_item.get("date", "not-a-date")
            month_data = data_item.get("service_names")
            self.assertEqual(month_val, cmonth_str)
            self.assertIsInstance(month_data, list)
            for month_item in month_data:
                name = month_item.get("service_name")
                self.assertIn(name, valid_services)
                self.assertIsInstance(month_item.get("values"), list)

    def test_execute_query_by_filtered_service(self):
        """Test execute_query monthly breakdown by filtered service."""
        self.generator.add_data_to_tenant(
            fixed_fields=["subscription_guid", "resource_location", "tags", "service_name"]
        )

        valid_services = list(AZURE_SERVICES.keys())
        service = self.generator.config.service_name
        url = f"?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&group_by[service_name]={service}"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPAzureCostView)
        handler = OCPAzureReportQueryHandler(query_params)
        query_output = handler.execute_query()
        data = query_output.get("data")
        self.assertIsNotNone(data)
        self.assertIsNotNone(query_output.get("total"))
        total = query_output.get("total")
        aggregates = handler._mapper.report_type_map.get("aggregates")
        filters = {**self.this_month_filter, "service_name__icontains": service}
        for filt in handler._mapper.report_type_map.get("filter"):
            if filt:
                qf = QueryFilter(**filt)
                filters.update({qf.composed_query_string(): qf.parameter})
        current_totals = self.get_totals_by_time_scope(aggregates, filters)
        self.assertIsNotNone(total.get("cost"))
        self.assertEqual(total.get("cost", {}).get("value", 0), current_totals.get("cost", 1))

        cmonth_str = self.dh.this_month_start.strftime("%Y-%m")
        for data_item in data:
            month_val = data_item.get("date", "not-a-date")
            month_data = data_item.get("service_names")
            self.assertEqual(month_val, cmonth_str)
            self.assertIsInstance(month_data, list)
            for month_item in month_data:
                name = month_item.get("service_name")
                self.assertIn(name, valid_services)
                self.assertIsInstance(month_item.get("values"), list)

    def test_execute_query_curr_month_by_subscription_guid_w_limit(self):
        """Test execute_query for current month on monthly breakdown by subscription_guid with limit."""
        self.generator.add_data_to_tenant()
        OCPAzureReportDataGenerator(self.tenant, self.provider).add_data_to_tenant()

        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&filter[limit]=2&group_by[subscription_guid]=*"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPAzureCostView)
        handler = OCPAzureReportQueryHandler(query_params)
        query_output = handler.execute_query()
        data = query_output.get("data")
        self.assertIsNotNone(data)
        self.assertIsNotNone(query_output.get("total"))
        total = query_output.get("total")
        aggregates = handler._mapper.report_type_map.get("aggregates")
        current_totals = self.get_totals_by_time_scope(aggregates, self.this_month_filter)
        self.assertIsNotNone(total.get("cost"))
        self.assertEqual(total.get("cost", {}).get("value", 0), current_totals.get("cost", 1))

        cmonth_str = DateHelper().this_month_start.strftime("%Y-%m")
        for data_item in data:
            month_val = data_item.get("date", "not-a-date")
            month_data = data_item.get("subscription_guids")
            self.assertEqual(month_val, cmonth_str)
            self.assertIsInstance(month_data, list)
            self.assertEqual(2, len(month_data))
            for month_item in month_data:
                self.assertIsInstance(month_item.get("subscription_guid"), str)
                self.assertIsInstance(month_item.get("values"), list)

    def test_execute_query_curr_month_by_subscription_guid_w_order(self):
        """Test execute_query for current month on monthly breakdown by subscription_guid with asc order."""
        self.generator.add_data_to_tenant()
        OCPAzureReportDataGenerator(self.tenant, self.provider).add_data_to_tenant()

        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&order_by[cost]=asc&group_by[subscription_guid]=*"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPAzureCostView)
        handler = OCPAzureReportQueryHandler(query_params)
        query_output = handler.execute_query()
        data = query_output.get("data")
        self.assertIsNotNone(data)
        self.assertIsNotNone(query_output.get("total"))
        total = query_output.get("total")
        aggregates = handler._mapper.report_type_map.get("aggregates")
        current_totals = self.get_totals_by_time_scope(aggregates, self.this_month_filter)
        self.assertIsNotNone(total.get("cost"))
        self.assertEqual(total.get("cost", {}).get("value", 0), current_totals.get("cost", 1))

        cmonth_str = self.dh.this_month_start.strftime("%Y-%m")
        for data_item in data:
            month_val = data_item.get("date", "not-a-date")
            month_data = data_item.get("subscription_guids")
            self.assertEqual(month_val, cmonth_str)
            self.assertIsInstance(month_data, list)
            self.assertEqual(len(month_data), 2)
            current_total = 0
            for month_item in month_data:
                self.assertIsInstance(month_item.get("subscription_guid"), str)
                self.assertIsInstance(month_item.get("values"), list)
                self.assertIsNotNone(month_item.get("values")[0].get("cost", {}).get("value"))
                data_point_total = month_item.get("values")[0].get("cost", {}).get("value")
                self.assertLess(current_total, data_point_total)
                current_total = data_point_total

    def test_execute_query_curr_month_by_subscription_guid_w_order_by_subscription_guid(self):
        """Test execute_query for current month on monthly breakdown by subscription_guid with asc order."""
        self.generator.add_data_to_tenant()
        OCPAzureReportDataGenerator(self.tenant, self.provider).add_data_to_tenant()

        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&order_by[subscription_guid]=asc&group_by[subscription_guid]=*"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPAzureCostView)
        handler = OCPAzureReportQueryHandler(query_params)
        query_output = handler.execute_query()
        data = query_output.get("data")
        self.assertIsNotNone(data)
        self.assertIsNotNone(query_output.get("total"))
        total = query_output.get("total")
        aggregates = handler._mapper.report_type_map.get("aggregates")
        current_totals = self.get_totals_by_time_scope(aggregates, self.this_month_filter)
        self.assertIsNotNone(total.get("cost"))
        self.assertEqual(total.get("cost", {}).get("value", 0), current_totals.get("cost", 1))

        cmonth_str = self.dh.this_month_start.strftime("%Y-%m")
        for data_item in data:
            month_val = data_item.get("date", "not-a-date")
            month_data = data_item.get("subscription_guids")
            self.assertEqual(month_val, cmonth_str)
            self.assertIsInstance(month_data, list)
            self.assertEqual(len(month_data), 2)
            current = "0"
            for month_item in month_data:
                self.assertIsInstance(month_item.get("subscription_guid"), str)
                self.assertIsInstance(month_item.get("values"), list)
                self.assertIsNotNone(month_item.get("values")[0].get("subscription_guid"))
                data_point = month_item.get("values")[0].get("subscription_guid")
                if data_point == "1 Other":
                    continue
                self.assertLess(current, data_point)
                current = data_point

    def test_execute_query_curr_month_by_cluster(self):
        """Test execute_query for current month on monthly breakdown by group_by cluster."""
        self.generator.add_data_to_tenant()
        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&group_by[cluster]=*"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPAzureCostView)
        handler = OCPAzureReportQueryHandler(query_params)
        query_output = handler.execute_query()
        data = query_output.get("data")
        self.assertIsNotNone(data)
        self.assertIsNotNone(query_output.get("total"))
        total = query_output.get("total")
        aggregates = handler._mapper.report_type_map.get("aggregates")
        current_totals = self.get_totals_by_time_scope(aggregates, self.this_month_filter)
        self.assertIsNotNone(total.get("cost"))
        self.assertEqual(total.get("cost", {}).get("value", 0), current_totals.get("cost", 1))

        cmonth_str = DateHelper().this_month_start.strftime("%Y-%m")
        for data_item in data:
            month_val = data_item.get("date", "not-a-date")
            month_data = data_item.get("clusters")
            self.assertEqual(month_val, cmonth_str)
            self.assertIsInstance(month_data, list)
            for month_item in month_data:
                self.assertIsInstance(month_item.get("cluster"), str)
                self.assertIsInstance(month_item.get("values"), list)
                self.assertIsNotNone(month_item.get("values")[0].get("cost"))

    def test_execute_query_by_filtered_cluster(self):
        """Test execute_query monthly breakdown by filtered cluster."""
        self.generator.add_data_to_tenant()

        cluster = self.generator.cluster_id
        url = f"?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&group_by[cluster]={cluster}"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPAzureCostView)
        handler = OCPAzureReportQueryHandler(query_params)
        query_output = handler.execute_query()
        data = query_output.get("data")
        self.assertIsNotNone(data)
        self.assertIsNotNone(query_output.get("total"))
        total = query_output.get("total")
        aggregates = handler._mapper.report_type_map.get("aggregates")
        filters = {**self.this_month_filter, "cluster_id__icontains": cluster}
        for filt in handler._mapper.report_type_map.get("filter"):
            if filt:
                qf = QueryFilter(**filt)
                filters.update({qf.composed_query_string(): qf.parameter})
        current_totals = self.get_totals_by_time_scope(aggregates, filters)
        self.assertIsNotNone(total.get("cost"))
        self.assertEqual(total.get("cost", {}).get("value", 0), current_totals.get("cost", 1))

        cmonth_str = self.dh.this_month_start.strftime("%Y-%m")
        for data_item in data:
            month_val = data_item.get("date", "not-a-date")
            month_data = data_item.get("clusters")
            self.assertEqual(month_val, cmonth_str)
            self.assertIsInstance(month_data, list)
            for month_item in month_data:
                self.assertIsInstance(month_item.get("cluster"), str)
                self.assertIsInstance(month_item.get("values"), list)
                self.assertIsNotNone(month_item.get("values")[0].get("cost"))

    def test_execute_query_curr_month_by_filtered_resource_location(self):
        """Test execute_query for current month on monthly breakdown by filtered resource_location."""
        self.generator.add_data_to_tenant()
        location = self.generator.config.resource_location
        url = f"?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&group_by[resource_location]={location}"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPAzureCostView)
        handler = OCPAzureReportQueryHandler(query_params)
        query_output = handler.execute_query()
        data = query_output.get("data")
        self.assertIsNotNone(data)
        self.assertIsNotNone(query_output.get("total"))
        total = query_output.get("total")
        aggregates = handler._mapper.report_type_map.get("aggregates")
        current_totals = self.get_totals_by_time_scope(aggregates, self.this_month_filter)
        self.assertIsNotNone(total.get("cost"))
        self.assertEqual(total.get("cost", {}).get("value", 0), current_totals.get("cost", 1))

        cmonth_str = DateHelper().this_month_start.strftime("%Y-%m")
        for data_item in data:
            month_val = data_item.get("date", "not-a-date")
            month_data = data_item.get("resource_locations")
            self.assertEqual(month_val, cmonth_str)
            self.assertIsInstance(month_data, list)
            for month_item in month_data:
                self.assertIsInstance(month_item.get("resource_location"), str)
                self.assertIsInstance(month_item.get("values"), list)
                self.assertIsNotNone(month_item.get("values")[0].get("cost"))

    def test_execute_query_current_month_filter_subscription_guid(self):
        """Test execute_query for current month on monthly filtered by subscription_guid."""
        self.generator.add_data_to_tenant()
        guid = self.generator.config.subscription_guid
        url = f"?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&filter[subscription_guid]={guid}"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPAzureCostView)
        handler = OCPAzureReportQueryHandler(query_params)
        query_output = handler.execute_query()
        data = query_output.get("data")
        self.assertIsNotNone(data)
        self.assertIsNotNone(query_output.get("total"))
        total = query_output.get("total")
        aggregates = handler._mapper.report_type_map.get("aggregates")
        current_totals = self.get_totals_by_time_scope(aggregates, self.this_month_filter)
        self.assertIsNotNone(total.get("cost"))
        self.assertEqual(total.get("cost", {}).get("value", 0), current_totals.get("cost", 1))

        cmonth_str = self.dh.this_month_start.strftime("%Y-%m")
        for data_item in data:
            month_val = data_item.get("date", "not-a-date")
            month_data = data_item.get("values")
            self.assertEqual(month_val, cmonth_str)
            self.assertIsInstance(month_data, list)

    def test_execute_query_current_month_filter_service(self):
        """Test execute_query for current month on monthly filtered by service."""
        self.generator = OCPAzureReportDataGenerator(self.tenant, self.provider, current_month_only=True)
        self.generator.add_data_to_tenant(
            fixed_fields=["subscription_guid", "resource_location", "tags", "service_name"]
        )

        service = self.generator.config.service_name
        url = f"?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&filter[service_name]={service}"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPAzureCostView)
        handler = OCPAzureReportQueryHandler(query_params)
        query_output = handler.execute_query()

        data = query_output.get("data")
        self.assertIsNotNone(data)
        self.assertIsNotNone(query_output.get("total"))

        total = query_output.get("total")
        aggregates = handler._mapper.report_type_map.get("aggregates")
        filters = {**self.this_month_filter, "service_name__icontains": service}
        for filt in handler._mapper.report_type_map.get("filter"):
            if filt:
                qf = QueryFilter(**filt)
                filters.update({qf.composed_query_string(): qf.parameter})
        current_totals = self.get_totals_by_time_scope(aggregates, filters)
        self.assertIsNotNone(total.get("cost"))
        self.assertEqual(total.get("cost", {}).get("value", 0), current_totals.get("cost", 1))

        cmonth_str = self.dh.this_month_start.strftime("%Y-%m")
        for data_item in data:
            month_val = data_item.get("date", "not-a-date")
            month_data = data_item.get("values")
            self.assertEqual(month_val, cmonth_str)
            self.assertIsInstance(month_data, list)

    def test_execute_query_current_month_filter_resource_location(self):
        """Test execute_query for current month on monthly filtered by resource_location."""
        self.generator.add_data_to_tenant()
        location = self.generator.config.resource_location
        url = f"?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&filter[resource_location]={location}"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPAzureCostView)
        handler = OCPAzureReportQueryHandler(query_params)
        query_output = handler.execute_query()
        data = query_output.get("data")
        self.assertIsNotNone(data)
        self.assertIsNotNone(query_output.get("total"))
        total = query_output.get("total")
        aggregates = handler._mapper.report_type_map.get("aggregates")
        current_totals = self.get_totals_by_time_scope(aggregates, self.this_month_filter)
        self.assertIsNotNone(total.get("cost"))
        self.assertEqual(total.get("cost", {}).get("value", 0), current_totals.get("cost", 1))

        cmonth_str = DateHelper().this_month_start.strftime("%Y-%m")
        for data_item in data:
            month_val = data_item.get("date", "not-a-date")
            month_data = data_item.get("values")
            self.assertEqual(month_val, cmonth_str)
            self.assertIsInstance(month_data, list)

    @patch("api.query_params.QueryParameters.accept_type", new_callable=PropertyMock)
    def test_execute_query_current_month_filter_resource_location_csv(self, mock_accept):
        """Test execute_query on monthly filtered by resource_location for csv."""
        self.generator.add_data_to_tenant()
        mock_accept.return_value = "text/csv"
        url = f"?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&filter[resource_location]={self.generator.config.resource_location}"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPAzureCostView)
        handler = OCPAzureReportQueryHandler(query_params)
        query_output = handler.execute_query()
        data = query_output.get("data")
        self.assertIsNotNone(data)
        self.assertIsNotNone(query_output.get("total"))
        total = query_output.get("total")
        aggregates = handler._mapper.report_type_map.get("aggregates")
        current_totals = self.get_totals_by_time_scope(aggregates, self.this_month_filter)
        self.assertIsNotNone(total.get("cost"))
        self.assertEqual(total.get("cost", {}).get("value", 0), current_totals.get("cost", 1))

        cmonth_str = DateHelper().this_month_start.strftime("%Y-%m")
        self.assertEqual(len(data), 1)
        for data_item in data:
            month_val = data_item.get("date", "not-a-date")
            self.assertEqual(month_val, cmonth_str)

    @patch("api.query_params.QueryParameters.accept_type", new_callable=PropertyMock)
    def test_execute_query_curr_month_by_subscription_guid_w_limit_csv(self, mock_accept):
        """Test execute_query for current month on monthly by subscription_guid with limt as csv."""
        mock_accept.return_value = "text/csv"
        self.generator.add_data_to_tenant()
        OCPAzureReportDataGenerator(self.tenant, self.provider).add_data_to_tenant()

        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&filter[limit]=2&group_by[subscription_guid]=*"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPAzureCostView)
        handler = OCPAzureReportQueryHandler(query_params)
        query_output = handler.execute_query()
        data = query_output.get("data")

        self.assertIsNotNone(data)
        self.assertIsNotNone(query_output.get("total"))
        total = query_output.get("total")
        aggregates = handler._mapper.report_type_map.get("aggregates")
        current_totals = self.get_totals_by_time_scope(aggregates, self.this_month_filter)
        self.assertIsNotNone(total.get("cost"))
        self.assertEqual(total.get("cost", {}).get("value", 0), current_totals.get("cost", 1))

        cmonth_str = self.dh.this_month_start.strftime("%Y-%m")
        self.assertEqual(len(data), 2)
        for data_item in data:
            month = data_item.get("date", "not-a-date")
            self.assertEqual(month, cmonth_str)

    def test_execute_query_w_delta(self):
        """Test grouped by deltas."""
        self.generator.add_data_to_tenant()
        OCPAzureReportDataGenerator(self.tenant, self.provider).add_data_to_tenant()

        path = reverse("reports-openshift-azure-costs")
        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&group_by[subscription_guid]=*&delta=cost"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPAzureCostView, path)
        handler = OCPAzureReportQueryHandler(query_params)
        # test the calculations
        query_output = handler.execute_query()
        data = query_output.get("data")
        self.assertIsNotNone(data)

        subs = data[0].get("subscription_guids", [{}])
        for sub in subs:
            current_total = Decimal(0)
            prev_total = Decimal(0)

            # fetch the expected sums from the DB.
            with tenant_context(self.tenant):
                curr = OCPAzureCostLineItemDailySummary.objects.filter(
                    usage_start__date__gte=self.dh.this_month_start,
                    usage_start__date__lte=self.dh.today,
                    subscription_guid=sub.get("subscription_guid"),
                ).aggregate(value=Sum(F("pretax_cost") + F("markup_cost")))
                current_total = Decimal(curr.get("value"))

                prev = OCPAzureCostLineItemDailySummary.objects.filter(
                    usage_start__date__gte=self.dh.last_month_start,
                    usage_start__date__lte=self.dh.today - relativedelta(months=1),
                    subscription_guid=sub.get("subscription_guid"),
                ).aggregate(value=Sum(F("pretax_cost") + F("markup_cost")))
                prev_total = Decimal(prev.get("value", Decimal(0)))

            expected_delta_value = Decimal(current_total - prev_total)
            expected_delta_percent = Decimal((current_total - prev_total) / prev_total * 100)

            values = sub.get("values", [{}])[0]
            self.assertIn("delta_value", values)
            self.assertIn("delta_percent", values)
            self.assertEqual(values.get("delta_value", "str"), expected_delta_value)
            self.assertEqual(values.get("delta_percent", "str"), expected_delta_percent)

        current_total = Decimal(0)
        prev_total = Decimal(0)

        # fetch the expected sums from the DB.
        with tenant_context(self.tenant):
            curr = OCPAzureCostLineItemDailySummary.objects.filter(
                usage_start__gte=self.dh.this_month_start, usage_start__lte=self.dh.today
            ).aggregate(value=Sum(F("pretax_cost") + F("markup_cost")))
            current_total = Decimal(curr.get("value"))

            prev = OCPAzureCostLineItemDailySummary.objects.filter(
                usage_start__gte=self.dh.last_month_start, usage_start__lte=self.dh.today - relativedelta(months=1)
            ).aggregate(value=Sum(F("pretax_cost") + F("markup_cost")))
            prev_total = Decimal(prev.get("value"))

        expected_delta_value = Decimal(current_total - prev_total)
        expected_delta_percent = Decimal((current_total - prev_total) / prev_total * 100)

        delta = query_output.get("delta")
        self.assertIsNotNone(delta.get("value"))
        self.assertIsNotNone(delta.get("percent"))
        self.assertEqual(delta.get("value", "str"), expected_delta_value)
        self.assertEqual(delta.get("percent", "str"), expected_delta_percent)

    def test_execute_query_w_delta_no_previous_data(self):
        """Test deltas with no previous data."""
        self.generator = OCPAzureReportDataGenerator(self.tenant, self.provider, current_month_only=True)
        self.generator.add_data_to_tenant()

        url = "?filter[time_scope_value]=-1&delta=cost"
        path = reverse("reports-openshift-azure-costs")
        query_params = self.mocked_query_params(url, OCPAzureCostView, path)
        handler = OCPAzureReportQueryHandler(query_params)
        query_output = handler.execute_query()
        data = query_output.get("data")
        self.assertIsNotNone(data)
        total_cost = query_output.get("total", {}).get("cost", {}).get("value", 1)
        delta = query_output.get("delta")
        self.assertIsNotNone(delta.get("value"))
        self.assertIsNone(delta.get("percent", 0))
        self.assertEqual(delta.get("value", 0), total_cost)

    def test_execute_query_orderby_delta(self):
        """Test execute_query with ordering by delta ascending."""
        self.generator.add_data_to_tenant()
        OCPAzureReportDataGenerator(self.tenant, self.provider).add_data_to_tenant()

        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&order_by[delta]=asc&group_by[subscription_guid]=*&delta=cost"  # noqa: E501
        path = reverse("reports-openshift-azure-costs")
        query_params = self.mocked_query_params(url, OCPAzureCostView, path)
        handler = OCPAzureReportQueryHandler(query_params)
        query_output = handler.execute_query()
        data = query_output.get("data")
        self.assertIsNotNone(data)
        cmonth_str = self.dh.this_month_start.strftime("%Y-%m")
        for data_item in data:
            month_val = data_item.get("date", "not-a-date")
            month_data = data_item.get("subscription_guids")
            self.assertEqual(month_val, cmonth_str)
            self.assertIsInstance(month_data, list)
            for month_item in month_data:
                self.assertIsInstance(month_item.get("subscription_guid"), str)
                self.assertIsInstance(month_item.get("values"), list)
                self.assertIsInstance(month_item.get("values")[0].get("delta_value"), Decimal)

    def test_calculate_total(self):
        """Test that calculated totals return correctly."""
        self.generator.add_data_to_tenant()
        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly"
        query_params = self.mocked_query_params(url, OCPAzureCostView)
        handler = OCPAzureReportQueryHandler(query_params)
        expected_units = "USD"
        with tenant_context(self.tenant):
            result = handler.calculate_total(**{"cost_units": expected_units})

        aggregates = handler._mapper.report_type_map.get("aggregates")
        current_totals = self.get_totals_by_time_scope(aggregates, self.this_month_filter)
        self.assertEqual(result.get("cost", {}).get("value", 0), current_totals.get("cost", 1))
        self.assertEqual(result.get("cost", {}).get("units", "not-USD"), expected_units)

    def test_percent_delta(self):
        """Test _percent_delta() utility method."""
        url = "?"
        query_params = self.mocked_query_params(url, OCPAzureCostView)
        handler = OCPAzureReportQueryHandler(query_params)
        self.assertEqual(handler._percent_delta(10, 5), 100)

    def test_rank_list_by_subscription_guid(self):
        """Test rank list limit with subscription_guid alias."""
        # No need to fill db
        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&filter[limit]=2&group_by[subscription_guid]=*"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPAzureCostView)
        handler = OCPAzureReportQueryHandler(query_params)
        data_list = [
            {"subscription_guid": "1", "total": 5, "rank": 1},
            {"subscription_guid": "2", "total": 4, "rank": 2},
            {"subscription_guid": "3", "total": 3, "rank": 3},
            {"subscription_guid": "4", "total": 2, "rank": 4},
        ]
        expected = [
            {"subscription_guid": "1", "total": 5, "rank": 1},
            {"subscription_guid": "2", "total": 4, "rank": 2},
            {
                "subscription_guid": "2 Others",
                "cost": 0,
                "markup_cost": 0,
                "derived_cost": 0,
                "infrastructure_cost": 0,
                "total": 5,
                "rank": 3,
            },
        ]
        ranked_list = handler._ranked_list(data_list)
        self.assertEqual(ranked_list, expected)

    def test_rank_list_by_service_name(self):
        """Test rank list limit with service_name grouping."""
        # No need to fill db
        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&filter[limit]=2&group_by[service_name]=*"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPAzureCostView)
        handler = OCPAzureReportQueryHandler(query_params)
        data_list = [
            {"service_name": "1", "total": 5, "rank": 1},
            {"service_name": "2", "total": 4, "rank": 2},
            {"service_name": "3", "total": 3, "rank": 3},
            {"service_name": "4", "total": 2, "rank": 4},
        ]
        expected = [
            {"service_name": "1", "total": 5, "rank": 1},
            {"service_name": "2", "total": 4, "rank": 2},
            {
                "cost": 0,
                "derived_cost": 0,
                "infrastructure_cost": 0,
                "markup_cost": 0,
                "service_name": "2 Others",
                "total": 5,
                "rank": 3,
            },
        ]
        ranked_list = handler._ranked_list(data_list)
        self.assertEqual(ranked_list, expected)

    def test_rank_list_with_offset(self):
        """Test rank list limit and offset with subscription_guid alias."""
        # No need to fill db
        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&filter[limit]=1&filter[offset]=1&group_by[subscription_guid]=*"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPAzureCostView)
        handler = OCPAzureReportQueryHandler(query_params)
        data_list = [
            {"subscription_guid": "1", "total": 5, "rank": 1},
            {"subscription_guid": "2", "total": 4, "rank": 2},
            {"subscription_guid": "3", "total": 3, "rank": 3},
            {"subscription_guid": "4", "total": 2, "rank": 4},
        ]
        expected = [{"subscription_guid": "2", "total": 4, "rank": 2}]
        ranked_list = handler._ranked_list(data_list)
        self.assertEqual(ranked_list, expected)

    def test_query_costs_with_totals(self):
        """Test execute_query() - costs with totals.

        Query for instance_types, validating that cost totals are present.

        """
        self.generator.add_data_to_tenant()
        OCPAzureReportDataGenerator(self.tenant, self.provider).add_data_to_tenant()

        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&group_by[subscription_guid]=*"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPAzureCostView)
        handler = OCPAzureReportQueryHandler(query_params)
        query_output = handler.execute_query()
        data = query_output.get("data")
        self.assertIsNotNone(data)

        for data_item in data:
            subscription_guids = data_item.get("subscription_guids")
            for subscription_guid in subscription_guids:
                self.assertIsNotNone(subscription_guid.get("values"))
                self.assertGreater(len(subscription_guid.get("values")), 0)
                for value in subscription_guid.get("values"):
                    self.assertIsInstance(value.get("cost", {}).get("value"), Decimal)
                    self.assertGreater(value.get("cost", {}).get("value"), Decimal(0))

    def test_query_instance_types_with_totals(self):
        """Test execute_query() - instance types with totals.

        Query for instance_types, validating that cost totals are present.

        """
        self.generator.add_data_to_tenant()
        OCPAzureReportDataGenerator(self.tenant, self.provider).add_data_to_tenant()

        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&group_by[instance_type]=*"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPAzureInstanceTypeView)
        handler = OCPAzureReportQueryHandler(query_params)
        query_output = handler.execute_query()
        data = query_output.get("data")
        self.assertIsNotNone(data)

        for data_item in data:
            instance_types = data_item.get("instance_types")
            for it in instance_types:
                self.assertIsNotNone(it.get("values"))
                self.assertGreater(len(it.get("values")), 0)
                for value in it.get("values"):
                    self.assertIsInstance(value.get("cost", {}).get("value"), Decimal)
                    self.assertGreaterEqual(
                        value.get("cost", {}).get("value").quantize(Decimal(".0001"), ROUND_HALF_UP), Decimal(0)
                    )
                    # FIXME: usage doesn't have units yet. waiting on MSFT
                    # self.assertIsInstance(value.get('usage', {}).get('value'), Decimal)
                    # self.assertGreater(value.get('usage', {}).get('value'), Decimal(0))
                    self.assertIsInstance(value.get("usage", {}), dict)
                    self.assertGreaterEqual(
                        value.get("usage", {}).get("value", {}).quantize(Decimal(".0001"), ROUND_HALF_UP), Decimal(0)
                    )

    def test_query_storage_with_totals(self):
        """Test execute_query() - storage with totals.

        Query for storage, validating that cost totals are present.

        """
        self.generator.add_data_to_tenant(service_name="Storage")
        OCPAzureReportDataGenerator(self.tenant, self.provider).add_data_to_tenant(service_name="Storage")

        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&group_by[service_name]=*"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPAzureStorageView)
        handler = OCPAzureReportQueryHandler(query_params)
        query_output = handler.execute_query()
        data = query_output.get("data")
        self.assertIsNotNone(data)

        for data_item in data:
            services = data_item.get("service_names")
            self.assertIsNotNone(services)
            for srv in services:
                self.assertIsNotNone(srv.get("values"))
                self.assertGreater(len(srv.get("values")), 0)
                for value in srv.get("values"):
                    self.assertIsInstance(value.get("cost", {}).get("value"), Decimal)
                    self.assertGreater(value.get("cost", {}).get("value"), Decimal(0))
                    # FIXME: usage doesn't have units yet. waiting on MSFT
                    # self.assertIsInstance(value.get('usage', {}).get('value'), Decimal)
                    # self.assertGreater(value.get('usage', {}).get('value'), Decimal(0))
                    self.assertIsInstance(value.get("usage", {}), dict)
                    self.assertGreater(value.get("usage", {}).get("value", {}), Decimal(0))

    def test_order_by(self):
        """Test that order_by returns properly sorted data."""
        # Do not need to fill db
        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly"
        query_params = self.mocked_query_params(url, OCPAzureCostView)
        handler = OCPAzureReportQueryHandler(query_params)

        unordered_data = [
            {"date": self.dh.today, "delta_percent": 8, "total": 6.2, "rank": 2},
            {"date": self.dh.yesterday, "delta_percent": 4, "total": 2.2, "rank": 1},
            {"date": self.dh.today, "delta_percent": 7, "total": 8.2, "rank": 1},
            {"date": self.dh.yesterday, "delta_percent": 4, "total": 2.2, "rank": 2},
        ]

        order_fields = ["date", "rank"]
        expected = [
            {"date": self.dh.yesterday, "delta_percent": 4, "total": 2.2, "rank": 1},
            {"date": self.dh.yesterday, "delta_percent": 4, "total": 2.2, "rank": 2},
            {"date": self.dh.today, "delta_percent": 7, "total": 8.2, "rank": 1},
            {"date": self.dh.today, "delta_percent": 8, "total": 6.2, "rank": 2},
        ]

        ordered_data = handler.order_by(unordered_data, order_fields)
        self.assertEqual(ordered_data, expected)

        order_fields = ["date", "-delta"]
        expected = [
            {"date": self.dh.yesterday, "delta_percent": 4, "total": 2.2, "rank": 1},
            {"date": self.dh.yesterday, "delta_percent": 4, "total": 2.2, "rank": 2},
            {"date": self.dh.today, "delta_percent": 8, "total": 6.2, "rank": 2},
            {"date": self.dh.today, "delta_percent": 7, "total": 8.2, "rank": 1},
        ]

        ordered_data = handler.order_by(unordered_data, order_fields)
        self.assertEqual(ordered_data, expected)

    def test_order_by_null_values(self):
        """Test that order_by returns properly sorted data with null data."""
        # Do not need to fill db
        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly"
        query_params = self.mocked_query_params(url, OCPAzureCostView)
        handler = OCPAzureReportQueryHandler(query_params)

        unordered_data = [
            {"node": None, "cluster": "cluster-1"},
            {"node": "alpha", "cluster": "cluster-2"},
            {"node": "bravo", "cluster": "cluster-3"},
            {"node": "oscar", "cluster": "cluster-4"},
        ]

        order_fields = ["node"]
        expected = [
            {"node": "alpha", "cluster": "cluster-2"},
            {"node": "bravo", "cluster": "cluster-3"},
            {"node": "no-node", "cluster": "cluster-1"},
            {"node": "oscar", "cluster": "cluster-4"},
        ]
        ordered_data = handler.order_by(unordered_data, order_fields)
        self.assertEqual(ordered_data, expected)
Esempio n. 22
0
class Forecast:
    """Base forecasting class."""

    # the minimum number of data points needed to use the current month's data.
    # if we have fewer than this many data points, fall back to using the previous month's data.
    #
    # this number is chosen in part because statsmodels.stats.stattools.omni_normtest() needs at least eight data
    # points to test for normal distribution.
    MINIMUM = 8

    # the precision of the floats returned in the forecast response.
    PRECISION = 8

    REPORT_TYPE = "costs"

    def __init__(self, query_params):  # noqa: C901
        """Class Constructor.

        Instance Attributes:
            - cost_summary_table (Model)
            - aggregates (dict)
            - filters (QueryFilterCollection)
            - query_range (tuple)
        """
        self.dh = DateHelper()
        self.params = query_params

        # select appropriate model based on access
        access = query_params.get("access", {})
        access_key = "default"
        self.cost_summary_table = self.provider_map.views.get("costs").get(access_key)
        if access:
            access_key = tuple(access.keys())
            filter_fields = self.provider_map.provider_map.get("filters")
            materialized_view = self.provider_map.views.get("costs").get(access_key)
            if materialized_view:
                # We found a matching materialized view, use that
                self.cost_summary_table = materialized_view
            else:
                # We have access constraints, but no view to accomodate, default to daily summary table
                self.cost_summary_table = self.provider_map.query_table

        self.forecast_days_required = (self.dh.this_month_end - self.dh.yesterday).days

        # forecasts use a rolling window
        self.query_range = (self.dh.n_days_ago(self.dh.yesterday, 30), self.dh.yesterday)

        self.filters = QueryFilterCollection()
        self.filters.add(field="usage_start", operation="gte", parameter=self.query_range[0])
        self.filters.add(field="usage_end", operation="lte", parameter=self.query_range[1])

        # filter queries based on access
        if access_key != "default":
            for q_param, filt in filter_fields.items():
                access = query_params.get_access(q_param, list())
                if access:
                    self.set_access_filters(access, filt, self.filters)

    @property
    def provider_map(self):
        """Return the provider map instance."""
        return self.provider_map_class(self.provider, self.REPORT_TYPE)

    @property
    def total_cost_term(self):
        """Return the provider map value for total cost."""
        return self.provider_map.report_type_map.get("aggregates", {}).get("cost_total")

    @property
    def supplementary_cost_term(self):
        """Return the provider map value for total supplemenatry cost."""
        return self.provider_map.report_type_map.get("aggregates", {}).get("sup_total")

    @property
    def infrastructure_cost_term(self):
        """Return the provider map value for total inftrastructure cost."""
        return self.provider_map.report_type_map.get("aggregates", {}).get("infra_total")

    def predict(self):
        """Define ORM query to run forecast and return prediction."""
        cost_predictions = {}
        with tenant_context(self.params.tenant):
            data = (
                self.cost_summary_table.objects.filter(self.filters.compose())
                .order_by("usage_start")
                .values("usage_start")
                .annotate(
                    total_cost=self.total_cost_term,
                    supplementary_cost=self.supplementary_cost_term,
                    infrastructure_cost=self.infrastructure_cost_term,
                )
            )

            for fieldname in ["total_cost", "infrastructure_cost", "supplementary_cost"]:
                uniq_data = self._uniquify_qset(data.values("usage_start", fieldname), field=fieldname)
                cost_predictions[fieldname] = self._predict(uniq_data)

            cost_predictions = self._key_results_by_date(cost_predictions)
            return self.format_result(cost_predictions)

    def _predict(self, data):
        """Handle pre and post prediction work.

        This function handles arranging incoming data to conform with statsmodels requirements.
        Then after receiving the forecast output, this function handles formatting to conform to
        API reponse requirements.

        Args:
            data (list) a list of (datetime, float) tuples

        Returns:
            (LinearForecastResult) linear forecast results object
        """
        LOG.debug("Forecast input data: %s", data)

        if len(data) < self.MINIMUM:
            LOG.warning(
                "Number of data elements (%s) is fewer than the minimum (%s). Unable to generate forecast.",
                len(data),
                self.MINIMUM,
            )
            return []

        dates, costs = zip(*data)

        X = self._enumerate_dates(dates)
        Y = [float(c) for c in costs]

        # calculate x-values for the prediction range
        pred_x = [i for i in range(X[-1] + 1, X[-1] + 1 + self.forecast_days_required)]

        # run the forecast
        results = self._run_forecast(X, Y, to_predict=pred_x)

        result_dict = {}
        for i, value in enumerate(results.prediction):
            # extrapolate confidence intervals to align with prediction.
            # this reduces the confidence interval below 95th percentile, but is a better UX.
            if i < len(results.confidence_lower):
                lower = results.confidence_lower[i]
            else:
                lower = results.confidence_lower[-1] + results.slope * (i - len(results.confidence_lower))

            if i < len(results.confidence_upper):
                upper = results.confidence_upper[i]
            else:
                upper = results.confidence_upper[-1] + results.slope * (i - len(results.confidence_upper))

            # ensure that there are no negative numbers.
            result_dict[self.dh.today.date() + timedelta(days=i)] = {
                "total_cost": max((value, 0)),
                "confidence_min": max((lower, 0)),
                "confidence_max": max((upper, 0)),
            }

        return (result_dict, results.rsquared, results.pvalues)

    def _enumerate_dates(self, date_list):
        """Given a list of dates, return a list of integers.

        This method works in conjunction with _remove_outliers(). This method works to preserve any gaps
        in the data created by _remove_outliers() so that the integers used for the X-axis are aligned
        appropriately.

        Example:
            If _remove_outliers() returns {"2000-01-01": 1.0, "2000-01-03": 1.5}
            then _enumerate_dates() returns [0, 2]
        """
        days = self.dh.list_days(
            datetime.combine(date_list[0], self.dh.midnight), datetime.combine(date_list[-1], self.dh.midnight)
        )
        out = [i for i, day in enumerate(days) if day.date() in date_list]
        return out

    def _remove_outliers(self, data):
        """Remove outliers from our dateset before predicting.

        We use a box plot method without plotting the box.
        """
        values = list(data.values())
        if values:
            third_quartile, first_quartile = np.percentile(values, [Decimal(75), Decimal(25)])
            interquartile_range = third_quartile - first_quartile

            upper_boundary = third_quartile + (Decimal(1.5) * interquartile_range)
            lower_boundary = first_quartile - (Decimal(1.5) * interquartile_range)

            return {key: value for key, value in data.items() if (value >= lower_boundary and value <= upper_boundary)}
        return data

    def _key_results_by_date(self, results, check_term="total_cost"):
        """Take results formatted by cost type, and return results keyed by date."""
        results_by_date = defaultdict(dict)
        date_based_dict = results[check_term][0] if results[check_term] else []
        for date in date_based_dict:
            for cost_term in results:
                if results[cost_term][0].get(date):
                    results_by_date[date][cost_term] = (
                        results[cost_term][0][date],
                        {"rsquared": results[cost_term][1]},
                        {"pvalues": results[cost_term][2]},
                    )
        return results_by_date

    def format_result(self, results):
        """Format results for API consumption."""
        f_format = f"%.{self.PRECISION}f"  # avoid converting floats to e-notation
        units = "USD"

        response = []
        for key in results:
            if key > self.dh.this_month_end.date():
                continue
            dikt = {
                "date": key,
                "values": [
                    {
                        "date": key,
                        "infrastructure": {
                            "total": {
                                "value": round(results[key]["infrastructure_cost"][0]["total_cost"], 3),
                                "units": units,
                            },
                            "confidence_max": {
                                "value": round(results[key]["infrastructure_cost"][0]["confidence_max"], 3),
                                "units": units,
                            },
                            "confidence_min": {
                                "value": round(max(results[key]["infrastructure_cost"][0]["confidence_min"], 0), 3),
                                "units": units,
                            },
                            "rsquared": {
                                "value": f_format % results[key]["infrastructure_cost"][1]["rsquared"],
                                "units": None,
                            },
                            "pvalues": {"value": results[key]["infrastructure_cost"][2]["pvalues"], "units": None},
                        },
                        "supplementary": {
                            "total": {
                                "value": round(results[key]["supplementary_cost"][0]["total_cost"], 3),
                                "units": units,
                            },
                            "confidence_max": {
                                "value": round(results[key]["supplementary_cost"][0]["confidence_max"], 3),
                                "units": units,
                            },
                            "confidence_min": {
                                "value": round(max(results[key]["supplementary_cost"][0]["confidence_min"], 0), 3),
                                "units": units,
                            },
                            "rsquared": {
                                "value": f_format % results[key]["supplementary_cost"][1]["rsquared"],
                                "units": None,
                            },
                            "pvalues": {"value": results[key]["supplementary_cost"][2]["pvalues"], "units": None},
                        },
                        "cost": {
                            "total": {"value": round(results[key]["total_cost"][0]["total_cost"], 3), "units": units},
                            "confidence_max": {
                                "value": round(results[key]["total_cost"][0]["confidence_max"], 3),
                                "units": units,
                            },
                            "confidence_min": {
                                "value": round(max(results[key]["total_cost"][0]["confidence_min"], 0), 3),
                                "units": units,
                            },
                            "rsquared": {"value": f_format % results[key]["total_cost"][1]["rsquared"], "units": None},
                            "pvalues": {"value": results[key]["total_cost"][2]["pvalues"], "units": None},
                        },
                    }
                ],
            }
            response.append(dikt)
        return response

    def _run_forecast(self, x, y, to_predict=None):
        """Apply the forecast model.

        Args:
            x (list) a list of exogenous variables
            y (list) a list of endogenous variables
            to_predict (list) a list of exogenous variables used in the forecast results

        Note:
            both x and y MUST be the same number of elements

        Returns:
            (tuple)
                (numpy.ndarray) prediction values
                (numpy.ndarray) confidence interval lower bound
                (numpy.ndarray) confidence interval upper bound
                (float) R-squared value
                (list) P-values
        """
        x = sm.add_constant(x)
        to_predict = sm.add_constant(to_predict)
        model = sm.OLS(y, x)
        results = model.fit()
        return LinearForecastResult(results, exog=to_predict)

    def _uniquify_qset(self, qset, field="total_cost"):
        """Take a QuerySet list, sum costs within the same day, and arrange it into a list of tuples.

        Args:
            qset (QuerySet)
            field (str) - field name in the QuerySet to be summed

        Returns:
            [(date, cost), ...]
        """
        # FIXME: this QuerySet->dict->list conversion probably isn't ideal.
        # FIXME: there's probably a way to aggregate multiple sources by date using just the ORM.
        result = defaultdict(Decimal)
        for item in qset:
            result[item.get("usage_start")] += Decimal(item.get(field, 0.0))
        result = self._remove_outliers(result)
        out = [(k, v) for k, v in result.items()]
        return out

    def set_access_filters(self, access, filt, filters):
        """Set access filters to ensure RBAC restrictions adhere to user's access and filters.

        Args:
            access (list) the list containing the users relevant access
            filt (list or dict) contains the filters to be updated
            filters (QueryFilterCollection) the filter collection to add the new filters to
        returns:
            None
        """
        if isinstance(filt, list):
            for _filt in filt:
                _filt["operation"] = "in"
                q_filter = QueryFilter(parameter=access, **_filt)
                filters.add(q_filter)
        else:
            filt["operation"] = "in"
            q_filter = QueryFilter(parameter=access, **filt)
            filters.add(q_filter)
Esempio n. 23
0
class AWSReportViewTest(IamTestCase):
    """Tests the report view."""
    def setUp(self):
        """Set up the customer view tests."""
        super().setUp()
        self.client = APIClient()
        self.dh = DateHelper()
        self.ten_days_ago = self.dh.n_days_ago(self.dh.today, 9)

        self.report = {
            "group_by": {
                "account": ["*"]
            },
            "filter": {
                "resolution": "monthly",
                "time_scope_value": -1,
                "time_scope_units": "month",
                "resource_scope": [],
            },
            "data": [{
                "date":
                "2018-07",
                "accounts": [
                    {
                        "account":
                        "4418636104713",
                        "values": [{
                            "date": "2018-07",
                            "units": "GB-Mo",
                            "account": "4418636104713",
                            "total": 1826.74238146924,
                        }],
                    },
                    {
                        "account":
                        "8577742690384",
                        "values": [{
                            "date": "2018-07",
                            "units": "GB-Mo",
                            "account": "8577742690384",
                            "total": 1137.74036198065,
                        }],
                    },
                    {
                        "account":
                        "3474227945050",
                        "values": [{
                            "date": "2018-07",
                            "units": "GB-Mo",
                            "account": "3474227945050",
                            "total": 1045.80659412797,
                        }],
                    },
                    {
                        "account":
                        "7249815104968",
                        "values": [{
                            "date": "2018-07",
                            "units": "GB-Mo",
                            "account": "7249815104968",
                            "total": 807.326470618818,
                        }],
                    },
                    {
                        "account":
                        "9420673783214",
                        "values": [{
                            "date": "2018-07",
                            "units": "GB-Mo",
                            "account": "9420673783214",
                            "total": 658.306642830709,
                        }],
                    },
                ],
            }],
            "total": {
                "value": 5475.922451027388,
                "units": "GB-Mo"
            },
        }

    def test_execute_query_w_delta_total(self):
        """Test that delta=total returns deltas."""
        qs = "delta=cost"
        url = reverse("reports-aws-costs") + "?" + qs
        response = self.client.get(url, **self.headers)
        self.assertEqual(response.status_code, status.HTTP_200_OK)

    def test_execute_query_w_delta_bad_choice(self):
        """Test invalid delta value."""
        bad_delta = "Invalid"
        expected = f'"{bad_delta}" is not a valid choice.'
        qs = f"group_by[account]=*&filter[limit]=2&delta={bad_delta}"
        url = reverse("reports-aws-costs") + "?" + qs

        response = self.client.get(url, **self.headers)
        result = str(response.data.get("delta")[0])
        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
        self.assertEqual(result, expected)

    def test_execute_query_w_valid_cost_type(self):
        """Test that delta=total returns deltas."""
        qs = "cost_type=unblended_cost"
        url = reverse("reports-aws-costs") + "?" + qs
        response = self.client.get(url, **self.headers)
        self.assertEqual(response.status_code, status.HTTP_200_OK)

    def test_execute_query_w_invalid_cost_type(self):
        """Test invalid delta value."""
        invalid_cost_type = "Invalid"
        expected = f'"{invalid_cost_type}" is not a valid choice.'
        qs = f"group_by[account]=*&filter[limit]=2&cost_type={invalid_cost_type}"
        url = reverse("reports-aws-costs") + "?" + qs

        response = self.client.get(url, **self.headers)
        result = str(response.data.get("cost_type")[0])
        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
        self.assertEqual(result, expected)

    def test_convert_units_success(self):
        """Test unit conversion succeeds."""
        converter = UnitConverter()
        to_unit = "byte"
        expected_unit = f"{to_unit}-Mo"
        report_total = self.report.get("total", {}).get("value")

        result = _convert_units(converter, self.report, to_unit)
        result_unit = result.get("total", {}).get("units")
        result_total = result.get("total", {}).get("value")

        self.assertEqual(expected_unit, result_unit)
        self.assertEqual(report_total * 1e9, result_total)

    def test_convert_units_list(self):
        """Test that the list check is hit."""
        converter = UnitConverter()
        to_unit = "byte"
        expected_unit = f"{to_unit}-Mo"
        report_total = self.report.get("total", {}).get("value")

        report = [self.report]
        result = _convert_units(converter, report, to_unit)
        result_unit = result[0].get("total", {}).get("units")
        result_total = result[0].get("total", {}).get("value")

        self.assertEqual(expected_unit, result_unit)
        self.assertEqual(report_total * 1e9, result_total)

    def test_convert_units_total_not_dict(self):
        """Test that the total not dict block is hit."""
        converter = UnitConverter()
        to_unit = "byte"
        expected_unit = f"{to_unit}-Mo"

        report = self.report["data"][0]["accounts"][0]["values"][0]
        report_total = report.get("total")
        result = _convert_units(converter, report, to_unit)
        result_unit = result.get("units")
        result_total = result.get("total")

        self.assertEqual(expected_unit, result_unit)
        self.assertEqual(report_total * 1e9, result_total)

    @RbacPermissions({
        "aws.account": {
            "read": ["*"]
        },
        "aws.organizational_unit": {
            "read":
            ["R_001", "OU_001", "OU_002", "OU_003", "OU_004", "OU_005"]
        },
    })
    def test_execute_query_csv_w_multi_group_by_rbac_explicit_access(self):
        """Test that a csv will be returned with an account group-by AND an org_unit group-by."""
        qs = "?group_by[org_unit_id]=OU_001&group_by[account]=9999999999990"
        url = reverse("reports-aws-costs") + qs
        client = APIClient(HTTP_ACCEPT="text/csv")
        response = client.get(url, **self.headers)
        self.assertEqual(response.status_code, status.HTTP_200_OK)
        self.assertEqual(response.accepted_media_type, "text/csv")
        self.assertIsInstance(response.accepted_renderer, CSVRenderer)
        self.assertTrue(0 < len(response.data))

    @RbacPermissions({
        "aws.account": {
            "read": ["*"]
        },
        "aws.organizational_unit": {
            "read":
            ["R_001", "OU_001", "OU_002", "OU_003", "OU_004", "OU_005"]
        },
    })
    def test_execute_query_w_group_by_rbac_explicit_access(self):
        """Test that explicit access results in all accounts/orgs listed."""
        ou_to_account_subou_map = {
            "R_001": {
                "accounts": ["9999999999990"],
                "org_units": ["OU_001"]
            },
            "OU_001": {
                "accounts": ["9999999999991", "9999999999992"],
                "org_units": []
            },
            "OU_002": {
                "accounts": [],
                "org_units": ["OU_003"]
            },
            "OU_003": {
                "accounts": ["9999999999993"],
                "org_units": []
            },
            "OU_004": {
                "accounts": [],
                "org_units": []
            },
            "OU_005": {
                "accounts": [],
                "org_units": []
            },
        }
        for org_unit in list(ou_to_account_subou_map):
            qs = f"?group_by[org_unit_id]={org_unit}"
            url = reverse("reports-aws-costs") + qs
            response = self.client.get(url, **self.headers)
            self.assertEqual(response.status_code, status.HTTP_200_OK)
            accounts_and_subous = _calculate_accounts_and_subous(
                response.data.get("data"))
            # These accounts are tied to this org unit inside of the
            # aws_org_tree.yml that populates the data for tests
            for account in ou_to_account_subou_map.get(org_unit).get(
                    "accounts"):
                self.assertIn(account, accounts_and_subous)
            for ou in ou_to_account_subou_map.get(org_unit).get("org_units"):
                self.assertIn(ou, accounts_and_subous)

    @RbacPermissions({
        "aws.account": {
            "read": ["*"]
        },
        "aws.organizational_unit": {
            "read": ["R_001"]
        }
    })
    def test_rbac_org_unit_root_node_provides_access_to_tree(self):
        """Test that total account access/restricted org results in all accounts/ accessible orgs."""
        ou_to_account_subou_map = {
            "R_001": {
                "accounts": ["9999999999990"],
                "org_units": ["OU_001", "OU_002"]
            },
            "OU_001": {
                "accounts": ["9999999999991", "9999999999992"],
                "org_units": ["OU_005"]
            },
            "OU_002": {
                "accounts": [],
                "org_units": ["OU_003"]
            },
            "OU_003": {
                "accounts": ["9999999999993"],
                "org_units": []
            },
            "OU_005": {
                "accounts": [],
                "org_units": []
            },
        }
        for org_unit in list(ou_to_account_subou_map):
            qs = f"?group_by[org_unit_id]={org_unit}"
            url = reverse("reports-aws-costs") + qs
            response = self.client.get(url, **self.headers)
            self.assertEqual(response.status_code, status.HTTP_200_OK)
            accounts_and_subous = _calculate_accounts_and_subous(
                response.data.get("data"))
            # These accounts are tied to this org unit inside of the
            # aws_org_tree.yml that populates the data for tests
            for account in ou_to_account_subou_map.get(org_unit).get(
                    "accounts"):
                self.assertIn(account, accounts_and_subous)
            for ou in ou_to_account_subou_map.get(org_unit).get("org_units"):
                self.assertIn(ou, accounts_and_subous)

    @RbacPermissions({
        "aws.account": {
            "read": ["*"]
        },
        "aws.organizational_unit": {
            "read": ["OU_001"]
        }
    })
    def test_rbac_org_unit_limited_access(self):
        """Test that total account access/restricted org results in all accounts/ accessible orgs."""
        ou_to_account_subou_map = {
            "OU_001": {
                "accounts": ["9999999999991", "9999999999992"],
                "org_units": ["OU_005"]
            },
            "OU_005": {
                "accounts": [],
                "org_units": []
            },
        }
        for org_unit in list(ou_to_account_subou_map):
            qs = f"?group_by[org_unit_id]={org_unit}"
            url = reverse("reports-aws-costs") + qs
            response = self.client.get(url, **self.headers)
            self.assertEqual(response.status_code, status.HTTP_200_OK)
            accounts_and_subous = _calculate_accounts_and_subous(
                response.data.get("data"))
            for account in ou_to_account_subou_map.get(org_unit).get(
                    "accounts"):
                self.assertIn(account, accounts_and_subous)
            for ou in ou_to_account_subou_map.get(org_unit).get("org_units"):
                self.assertIn(ou, accounts_and_subous)

        access_denied_list = ["R_001", "OU_002", "OU_003"]
        for ou_id in access_denied_list:
            qs = f"?group_by[org_unit_id]={ou_id}"
            url = reverse("reports-aws-costs") + qs
            response = self.client.get(url, **self.headers)
            self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)

    @RbacPermissions({
        "aws.account": {
            "read": ["*"]
        },
        "aws.organizational_unit": {
            "read": ["R_001"]
        }
    })
    def test_rbac_org_unit_root_node_multiple_group_by(self):
        """Test that total account access/restricted org results in all accounts/ accessible orgs."""
        expected_combined_accounts = ["9999999999991", "9999999999992"]
        expected_combined_ous = ["OU_003", "OU_005"]
        qs = "?group_by[or:org_unit_id]=OU_001&group_by[or:org_unit_id]=OU_002"
        url = reverse("reports-aws-costs") + qs
        response = self.client.get(url, **self.headers)
        self.assertEqual(response.status_code, status.HTTP_200_OK)
        accounts_and_subous = _calculate_accounts_and_subous(
            response.data.get("data"))
        # These accounts are tied to this org unit inside of the
        # aws_org_tree.yml that populates the data for tests
        for account in expected_combined_accounts:
            self.assertIn(account, accounts_and_subous)
        for ou in expected_combined_ous:
            self.assertIn(ou, accounts_and_subous)

    @RbacPermissions({
        "aws.account": {
            "read": ["*"]
        },
        "aws.organizational_unit": {
            "read": ["OU_001", "OU_003"]
        }
    })
    def test_rbac_org_unit_limited_access_multiple_group_by(self):
        """Test that total account access/restricted org results in all accounts/ accessible orgs."""
        expected_combined_accounts = [
            "9999999999991", "9999999999992", "9999999999993"
        ]
        expected_combined_ous = ["OU_005"]
        qs = "?group_by[or:org_unit_id]=OU_001&group_by[or:org_unit_id]=OU_003"
        url = reverse("reports-aws-costs") + qs
        response = self.client.get(url, **self.headers)
        self.assertEqual(response.status_code, status.HTTP_200_OK)
        accounts_and_subous = _calculate_accounts_and_subous(
            response.data.get("data"))
        # These accounts are tied to this org unit inside of the
        # aws_org_tree.yml that populates the data for tests
        for account in expected_combined_accounts:
            self.assertIn(account, accounts_and_subous)
        for ou in expected_combined_ous:
            self.assertIn(ou, accounts_and_subous)

    @RbacPermissions({
        "aws.account": {
            "read": ["*"]
        },
        "aws.organizational_unit": {
            "read": ["OU_001", "OU_003"]
        }
    })
    def test_rbac_org_unit_access_denied_with_multiple_group_by(self):
        """Test that total account access/restricted org results in all accounts/ accessible orgs."""
        qs = "?group_by[or:org_unit_id]=OU_001&group_by[or:org_unit_id]=OU_002"
        url = reverse("reports-aws-costs") + qs
        response = self.client.get(url, **self.headers)
        self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)

    @RbacPermissions({
        "aws.account": {
            "read": ["*"]
        },
        "aws.organizational_unit": {
            "read": ["*"]
        }
    })
    def test_execute_query_w_group_by_rbac_no_restrictions(self):
        """Test that total access results in all accounts and orgs."""
        ou_to_account_subou_map = {
            "R_001": {
                "accounts": ["9999999999990"],
                "org_units": ["OU_001"]
            },
            "OU_001": {
                "accounts": ["9999999999991", "9999999999992"],
                "org_units": []
            },
            "OU_002": {
                "accounts": [],
                "org_units": ["OU_003"]
            },
            "OU_003": {
                "accounts": ["9999999999993"],
                "org_units": []
            },
            "OU_004": {
                "accounts": [],
                "org_units": []
            },
            "OU_005": {
                "accounts": [],
                "org_units": []
            },
        }
        for org_unit in list(ou_to_account_subou_map):
            qs = f"?group_by[org_unit_id]={org_unit}"
            url = reverse("reports-aws-costs") + qs
            response = self.client.get(url, **self.headers)
            self.assertEqual(response.status_code, status.HTTP_200_OK)
            accounts_and_subous = _calculate_accounts_and_subous(
                response.data.get("data"))
            # These accounts are tied to this org unit inside of the
            # aws_org_tree.yml that populates the data for tests
            for account in ou_to_account_subou_map.get(org_unit).get(
                    "accounts"):
                self.assertIn(account, accounts_and_subous)
            for ou in ou_to_account_subou_map.get(org_unit).get("org_units"):
                self.assertIn(ou, accounts_and_subous)

            # test filter
            qs = f"?filter[org_unit_id]={org_unit}"
            url = reverse("reports-aws-costs") + qs
            response = self.client.get(url, **self.headers)
            self.assertEqual(response.status_code, status.HTTP_200_OK)

    @RbacPermissions({
        "aws.account": {
            "read": ["9999999999990"]
        },
        "aws.organizational_unit": {
            "read": ["*"]
        }
    })
    def test_execute_query_w_group_by_rbac_account_restrictions(self):
        """Test that restricted access results in the accessible orgs/accounts."""
        ou_to_account_subou_map = {
            "R_001": {
                "accounts": ["9999999999990"],
                "org_units": []
            }
        }
        # since we only have access to the account directly under root - no org units will show up
        # because they only show up when they have costs associated with the accounts under them
        for org_unit in list(ou_to_account_subou_map):
            qs = f"?group_by[org_unit_id]={org_unit}"
            url = reverse("reports-aws-costs") + qs
            response = self.client.get(url, **self.headers)
            self.assertEqual(response.status_code, status.HTTP_200_OK)
            accounts_and_subous = _calculate_accounts_and_subous(
                response.data.get("data"))
            # These accounts are tied to this org unit inside of the
            # aws_org_tree.yml that populates the data for tests
            for account in ou_to_account_subou_map.get(org_unit).get(
                    "accounts"):
                self.assertIn(account, accounts_and_subous)
            for ou in ou_to_account_subou_map.get(org_unit).get("org_units"):
                self.assertIn(ou, accounts_and_subous)

    @RbacPermissions({
        "aws.account": {
            "read": ["9999999999991"]
        },
        "aws.organizational_unit": {
            "read": ["*"]
        }
    })
    def test_execute_query_w_group_by_rbac_restriction(self):
        """Test limited access results in only the account that the user can see."""
        qs = "group_by[org_unit_id]=OU_001"
        url = reverse("reports-aws-costs") + "?" + qs
        response = self.client.get(url, **self.headers)
        self.assertEqual(response.status_code, status.HTTP_200_OK)
        accounts_and_subous = _calculate_accounts_and_subous(
            response.data.get("data"))
        self.assertEqual(accounts_and_subous, ["9999999999991"])

    @RbacPermissions({
        "aws.account": {
            "read": ["fakeaccount"]
        },
        "aws.organizational_unit": {
            "read": ["fake_org"]
        }
    })
    def test_execute_query_w_group_by_rbac_no_accounts_or_orgs(self):
        """Test that no access to relevant results in a 403."""
        for org in ["R_001", "OU_001", "OU_002", "OU_003", "OU_004", "OU_005"]:
            qs = f"?group_by[org_unit_id]={org}"
            url = reverse("reports-aws-costs") + qs
            response = self.client.get(url, **self.headers)
            self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
            # test filters
            qs = f"?filter[org_unit_id]={org}"
            url = reverse("reports-aws-costs") + qs
            response = self.client.get(url, **self.headers)
            self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)

    def test_group_by_org_unit_non_costs_reports(self):
        """Test that grouping by org unit on non costs reports raises a validation error."""
        qs = "?group_by[org_unit_id]=*"
        url = reverse("reports-aws-storage") + qs
        response = self.client.get(url, **self.headers)
        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)

    def test_group_by_org_unit_wildcard_costs_reports(self):
        """Test that grouping by org unit with a wildcard raises a validation error."""
        qs = "?group_by[org_unit_id]=*"
        url = reverse("reports-aws-costs") + qs
        response = self.client.get(url, **self.headers)
        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)

    def test_ou_group_by_default_pagination(self):
        """Test that the default pagination works."""
        qs = "?group_by[org_unit_id]=R_001&filter[resolution]=monthly&filter[time_scope_value]=-1&filter[time_scope_units]=month"  # noqa: E501
        url = reverse("reports-aws-costs") + qs
        response = self.client.get(url, **self.headers)
        self.assertEqual(response.status_code, status.HTTP_200_OK)

        response_data = response.json()
        data = response_data.get("data", [])
        meta = response_data.get("meta", {})
        count = meta.get("count", 0)

        self.assertIn("total", meta)
        self.assertIn("filter", meta)
        self.assertIn("count", meta)

        for entry in data:
            org_entities = entry.get("org_entities", [])
            self.assertEqual(len(org_entities), count)

    def test_ou_group_by_filter_limit_offset_pagination(self):
        """Test that the ranked group pagination works."""
        limit = 1
        offset = 0

        qs = f"?group_by[org_unit_id]=R_001&filter[resolution]=monthly&filter[time_scope_value]=-1&filter[time_scope_units]=month&filter[limit]={limit}&filter[offset]={offset}"  # noqa: E501
        url = reverse("reports-aws-costs") + qs
        response = self.client.get(url, **self.headers)
        self.assertEqual(response.status_code, status.HTTP_200_OK)

        response_data = response.json()
        data = response_data.get("data", [])
        meta = response_data.get("meta", {})
        count = meta.get("count", 0)

        self.assertIn("total", meta)
        self.assertIn("filter", meta)
        self.assertIn("count", meta)

        for entry in data:
            org_entities = entry.get("org_entities", [])
            if limit + offset > count:
                self.assertEqual(len(org_entities), max((count - offset), 0))
            else:
                self.assertEqual(len(org_entities), limit)

    def test_ou_group_by_filter_limit_high_offset_pagination(self):
        """Test that high offset pagination works."""
        limit = 1
        offset = 10

        qs = f"?group_by[org_unit_id]=R_001&filter[resolution]=monthly&filter[time_scope_value]=-1&filter[time_scope_units]=month&filter[limit]={limit}&filter[offset]={offset}"  # noqa: E501
        url = reverse("reports-aws-costs") + qs
        response = self.client.get(url, **self.headers)
        self.assertEqual(response.status_code, status.HTTP_200_OK)

        response_data = response.json()
        data = response_data.get("data", [])
        meta = response_data.get("meta", {})
        count = meta.get("count", 0)

        self.assertIn("total", meta)
        self.assertIn("filter", meta)
        self.assertIn("count", meta)

        for entry in data:
            org_entities = entry.get("org_entities", [])
            if limit + offset > count:
                self.assertEqual(len(org_entities), max((count - offset), 0))
            else:
                self.assertEqual(len(org_entities), limit)

    def test_group_by_org_unit_order_by_cost_asc(self):
        """Test that ordering by cost=asc works as expected"""
        qs = "?group_by[org_unit_id]=R_001&order_by[cost]=asc"
        url = reverse("reports-aws-costs") + qs
        response = self.client.get(url, **self.headers)
        data = response.data.get("data", [])
        self.assertEqual(response.status_code, status.HTTP_200_OK)
        # Now we need to loop through the results and make sure that
        # the org units are in asc order according to cost
        for entry in data:
            org_entities = entry.get("org_entities", [])
            sorted_org_entities = copy.deepcopy(org_entities)
            sorted_org_entities.sort(
                key=lambda e: e["values"][0]["cost"]["total"]["value"],
                reverse=False)
            self.assertEqual(org_entities, sorted_org_entities)

    def test_group_by_org_unit_order_by_cost_desc(self):
        """Test that ordering by cost=descworks as expected"""
        qs = "?group_by[org_unit_id]=R_001&order_by[cost]=desc"
        url = reverse("reports-aws-costs") + qs
        response = self.client.get(url, **self.headers)
        data = response.data.get("data")
        self.assertEqual(response.status_code, status.HTTP_200_OK)
        # Now we need to loop through the results and make sure that
        # the org units are in desc order according to cost
        for entry in data:
            org_entities = entry.get("org_entities", [])
            sorted_org_entities = copy.deepcopy(org_entities)
            sorted_org_entities.sort(
                key=lambda e: e["values"][0]["cost"]["total"]["value"],
                reverse=True)
            self.assertEqual(org_entities, sorted_org_entities)

    def test_multiple_and_group_by_org_unit_bad_request(self):
        """Test that grouping by org unit on non costs reports raises a validation error."""
        qs = "?group_by[org_unit_id]=R_001&group_by[org_unit_id]=OU_001"
        url = reverse("reports-aws-costs") + qs
        response = self.client.get(url, **self.headers)
        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)

    def test_multiple_mixed_group_by_org_unit_bad_request(self):
        """Test that grouping by org unit on non costs reports raises a validation error."""
        qs = "?group_by[org_unit_id]=R_001&group_by[or:org_unit_id]=OU_001"
        url = reverse("reports-aws-costs") + qs
        response = self.client.get(url, **self.headers)
        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)

    def test_group_by_org_unit_or_wildcard_bad_request(self):
        """Test that grouping by org unit on non costs reports raises a validation error."""
        qs = "?group_by[or:org_unit_id]=*"
        url = reverse("reports-aws-costs") + qs
        response = self.client.get(url, **self.headers)
        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)

    def test_group_by_org_unit_id_and_wildcard_region(self):
        """Test multiple group by with org unit id and region."""
        # The ui team uses these to populate graphs
        qs = "?group_by[or:org_unit_id]=R_001&group_by[region]=*"
        url = reverse("reports-aws-costs") + qs
        response = self.client.get(url, **self.headers)
        self.assertEqual(response.status_code, status.HTTP_200_OK)

    def test_group_by_org_unit_id_and_wildcard_account(self):
        """Test multiple group by with org unit id and account."""
        qs = "?group_by[or:org_unit_id]=R_001&group_by[account]=*"
        # The ui team uses these to populate graphs
        url = reverse("reports-aws-costs") + qs
        response = self.client.get(url, **self.headers)
        self.assertEqual(response.status_code, status.HTTP_200_OK)

    def test_order_by_delta(self):
        """Test that the order_by delta with pagination does not error."""
        qs_list = [
            "?filter[limit]=5&filter[offset]=0&order_by[delta]=asc&delta=usage",
            "?order_by[delta]=asc&delta=usage",
        ]
        for qs in qs_list:
            url = reverse("reports-aws-instance-type") + qs
            response = self.client.get(url, **self.headers)
            self.assertEqual(response.status_code, status.HTTP_200_OK)

            response_data = response.json()
            data = response_data.get("data", [])
            meta = response_data.get("meta", {})

            self.assertIn("total", meta)
            self.assertIn("filter", meta)
            self.assertIn("count", meta)

            compared_deltas = False
            for day in data:
                previous_delta = None
                for instance_type in day.get("instance_types", []):
                    values = instance_type.get("values", [])
                    if values:
                        current_delta = values[0].get("delta_percent")
                        if previous_delta and current_delta:
                            self.assertLessEqual(previous_delta, current_delta)
                            compared_deltas = True
                            previous_delta = current_delta
                        else:
                            previous_delta = current_delta
            self.assertTrue(compared_deltas)

    def test_others_count(self):
        """Test that the others count works with a small limit."""
        qs_list = ["?filter[limit]=1"]
        for qs in qs_list:
            url = reverse("reports-aws-instance-type") + qs
            response = self.client.get(url, **self.headers)
            self.assertEqual(response.status_code, status.HTTP_200_OK)

            response_data = response.json()
            meta = response_data.get("meta", {})
            self.assertNotEqual(meta.get("others"), 0)

    def test_order_by_delta_no_delta(self):
        """Test that the order_by delta with no delta passed in triggers 400."""
        qs_list = [
            "?filter[limit]=5&filter[offset]=0&order_by[delta]=asc",
            "?order_by[delta]=asc"
        ]
        for qs in qs_list:
            url = reverse("reports-aws-instance-type") + qs
            response = self.client.get(url, **self.headers)
            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)

    def test_start_end_parameters_monthly_resolution(self):
        """Test that a validation error is raised for monthly resolution with start/end parameters."""
        qs_list = [
            f"?start_date={self.dh.last_month_end.date()}&end_date={self.dh.today.date()}&filter[resolution]=monthly"
        ]
        for qs in qs_list:
            url = reverse("reports-aws-instance-type") + qs
            response = self.client.get(url, **self.headers)
            self.assertEqual(response.status_code, status.HTTP_200_OK)

    def test_start_end_parameters_no_resolution(self):
        """Test that a no_validation error is raised for no resolution with start/end parameters."""
        qs_list = [
            f"?start_date={self.dh.last_month_end.date()}&end_date={self.dh.today.date()}"
        ]
        for qs in qs_list:
            url = reverse("reports-aws-costs") + qs
            response = self.client.get(url, **self.headers)
            self.assertEqual(response.status_code, status.HTTP_200_OK)

    def test_start_end_parameters_with_delta(self):
        """Test that a validation error is raised for delta with start/end parameters."""
        qs_list = [
            f"?start_date={self.dh.last_month_end.date()}&end_date={self.dh.today.date()}&delta=usage",
            f"?start_date={self.dh.last_month_end.date()}&delta=usage",
            f"?start_date={self.dh.last_month_end.date()}&delta=usage",
        ]
        for qs in qs_list:
            url = reverse("reports-aws-instance-type") + qs
            response = self.client.get(url, **self.headers)
            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
Esempio n. 24
0
class OCPReportQueryHandlerTest(IamTestCase):
    """Tests for the OCP report query handler."""
    def setUp(self):
        """Set up the customer view tests."""
        super().setUp()
        self.dh = DateHelper()

        _, self.provider = create_generic_provider(Provider.PROVIDER_OCP,
                                                   self.headers)

        self.this_month_filter = {"usage_start__gte": self.dh.this_month_start}
        self.ten_day_filter = {
            "usage_start__gte": self.dh.n_days_ago(self.dh.today, 9)
        }
        self.thirty_day_filter = {
            "usage_start__gte": self.dh.n_days_ago(self.dh.today, 29)
        }
        self.last_month_filter = {
            "usage_start__gte": self.dh.last_month_start,
            "usage_end__lte": self.dh.last_month_end,
        }
        OCPReportDataGenerator(self.tenant, self.provider).add_data_to_tenant()

    def get_totals_by_time_scope(self, aggregates, filters=None):
        """Return the total aggregates for a time period."""
        if filters is None:
            filters = self.ten_day_filter
        with tenant_context(self.tenant):
            return OCPUsageLineItemDailySummary.objects.filter(
                **filters).aggregate(**aggregates)

    def get_totals_costs_by_time_scope(self, aggregates, filters=None):
        """Return the total costs aggregates for a time period."""
        if filters is None:
            filters = self.this_month_filter
        with tenant_context(self.tenant):
            return OCPUsageLineItemDailySummary.objects.filter(
                **filters).aggregate(**aggregates)

    def test_execute_sum_query(self):
        """Test that the sum query runs properly."""
        url = "?"
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        aggregates = handler._mapper.report_type_map.get("aggregates")
        current_totals = self.get_totals_by_time_scope(aggregates)
        query_output = handler.execute_query()
        self.assertIsNotNone(query_output.get("data"))
        self.assertIsNotNone(query_output.get("total"))
        total = query_output.get("total")

        self.assertEqual(
            total.get("usage", {}).get("value"), current_totals.get("usage"))
        self.assertEqual(
            total.get("request", {}).get("value"),
            current_totals.get("request"))
        self.assertEqual(
            total.get("cost", {}).get("value"), current_totals.get("cost"))
        self.assertEqual(
            total.get("limit", {}).get("value"), current_totals.get("limit"))

    def test_execute_sum_query_costs(self):
        """Test that the sum query runs properly for the costs endpoint."""
        url = "?"
        query_params = self.mocked_query_params(url, OCPCostView)
        handler = OCPReportQueryHandler(query_params)
        aggregates = handler._mapper.report_type_map.get("aggregates")
        current_totals = self.get_totals_costs_by_time_scope(
            aggregates, self.ten_day_filter)
        query_output = handler.execute_query()
        self.assertIsNotNone(query_output.get("data"))
        self.assertIsNotNone(query_output.get("total"))
        total = query_output.get("total")
        self.assertEqual(
            total.get("cost", {}).get("value"), current_totals.get("cost"))

    def test_get_cluster_capacity_monthly_resolution(self):
        """Test that cluster capacity returns a full month's capacity."""
        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly"
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        query_data = [{"row": 1}]
        query_data, total_capacity = handler.get_cluster_capacity(query_data)
        self.assertTrue("capacity" in total_capacity)
        self.assertTrue(isinstance(total_capacity["capacity"], Decimal))
        self.assertTrue("capacity" in query_data[0])
        self.assertEqual(query_data[0].get("capacity"),
                         total_capacity.get("capacity"))

    def test_get_cluster_capacity_monthly_resolution_group_by_cluster(self):
        """Test that cluster capacity returns capacity by cluster."""
        # Add data for a second cluster
        OCPReportDataGenerator(self.tenant, self.provider).add_data_to_tenant()

        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&group_by[cluster]=*"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        query_data = handler.execute_query()

        capacity_by_cluster = defaultdict(Decimal)
        total_capacity = Decimal(0)
        query_filter = handler.query_filter
        query_group_by = ["usage_start", "cluster_id"]
        annotations = {"capacity": Max("cluster_capacity_cpu_core_hours")}
        cap_key = list(annotations.keys())[0]

        q_table = handler._mapper.provider_map.get("tables").get("query")
        query = q_table.objects.filter(query_filter)

        with tenant_context(self.tenant):
            cap_data = query.values(*query_group_by).annotate(**annotations)
            for entry in cap_data:
                cluster_id = entry.get("cluster_id", "")
                capacity_by_cluster[cluster_id] += entry.get(cap_key, 0)
                total_capacity += entry.get(cap_key, 0)

        for entry in query_data.get("data", []):
            for cluster in entry.get("clusters", []):
                cluster_name = cluster.get("cluster", "")
                capacity = cluster.get("values")[0].get("capacity",
                                                        {}).get("value")
                self.assertEqual(capacity, capacity_by_cluster[cluster_name])

        self.assertEqual(
            query_data.get("total", {}).get("capacity", {}).get("value"),
            total_capacity)

    def test_get_cluster_capacity_daily_resolution(self):
        """Test that total capacity is returned daily resolution."""
        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=daily"
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        query_data = handler.execute_query()

        daily_capacity = defaultdict(Decimal)
        total_capacity = Decimal(0)
        query_filter = handler.query_filter
        query_group_by = ["usage_start"]
        annotations = {"capacity": Max("total_capacity_cpu_core_hours")}
        cap_key = list(annotations.keys())[0]

        q_table = handler._mapper.provider_map.get("tables").get("query")
        query = q_table.objects.filter(query_filter)

        with tenant_context(self.tenant):
            cap_data = query.values(*query_group_by).annotate(**annotations)
            for entry in cap_data:
                date = handler.date_to_string(entry.get("usage_start"))
                daily_capacity[date] += entry.get(cap_key, 0)
            # This is a hack because the total capacity in the test data
            # is artificial but the total should still be a sum of
            # cluster capacities
            annotations = {"capacity": Max("cluster_capacity_cpu_core_hours")}
            cap_data = query.values(*query_group_by).annotate(**annotations)
            for entry in cap_data:
                total_capacity += entry.get(cap_key, 0)

        self.assertEqual(
            query_data.get("total", {}).get("capacity", {}).get("value"),
            total_capacity)
        for entry in query_data.get("data", []):
            date = entry.get("date")
            values = entry.get("values")
            if values:
                capacity = values[0].get("capacity", {}).get("value")
                self.assertEqual(capacity, daily_capacity[date])

    def test_get_cluster_capacity_daily_resolution_group_by_clusters(self):
        """Test that cluster capacity returns daily capacity by cluster."""
        url = (
            "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=daily&group_by[cluster]=*"
        )
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        query_data = handler.execute_query()

        daily_capacity_by_cluster = defaultdict(dict)
        total_capacity = Decimal(0)
        query_filter = handler.query_filter
        query_group_by = ["usage_start", "cluster_id"]
        annotations = {"capacity": Max("cluster_capacity_cpu_core_hours")}
        cap_key = list(annotations.keys())[0]

        q_table = handler._mapper.query_table
        query = q_table.objects.filter(query_filter)

        with tenant_context(self.tenant):
            cap_data = query.values(*query_group_by).annotate(**annotations)
            for entry in cap_data:
                date = handler.date_to_string(entry.get("usage_start"))
                cluster_id = entry.get("cluster_id", "")
                if cluster_id in daily_capacity_by_cluster[date]:
                    daily_capacity_by_cluster[date][cluster_id] += entry.get(
                        cap_key, 0)
                else:
                    daily_capacity_by_cluster[date][cluster_id] = entry.get(
                        cap_key, 0)
                total_capacity += entry.get(cap_key, 0)

        for entry in query_data.get("data", []):
            date = entry.get("date")
            for cluster in entry.get("clusters", []):
                cluster_name = cluster.get("cluster", "")
                capacity = cluster.get("values")[0].get("capacity",
                                                        {}).get("value")
                self.assertEqual(capacity,
                                 daily_capacity_by_cluster[date][cluster_name])

        self.assertEqual(
            query_data.get("total", {}).get("capacity", {}).get("value"),
            total_capacity)

    @patch("api.report.ocp.query_handler.ReportQueryHandler.add_deltas")
    @patch(
        "api.report.ocp.query_handler.OCPReportQueryHandler.add_current_month_deltas"
    )
    def test_add_deltas_current_month(self, mock_current_deltas, mock_deltas):
        """Test that the current month method is called for deltas."""
        url = "?"
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        handler._delta = "usage__request"
        handler.add_deltas([], [])
        mock_current_deltas.assert_called()
        mock_deltas.assert_not_called()

    @patch("api.report.ocp.query_handler.ReportQueryHandler.add_deltas")
    @patch(
        "api.report.ocp.query_handler.OCPReportQueryHandler.add_current_month_deltas"
    )
    def test_add_deltas_super_delta(self, mock_current_deltas, mock_deltas):
        """Test that the super delta method is called for deltas."""
        url = "?"
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        handler._delta = "usage"

        handler.add_deltas([], [])

        mock_current_deltas.assert_not_called()
        mock_deltas.assert_called()

    def test_add_current_month_deltas(self):
        """Test that current month deltas are calculated."""
        url = "?"
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        handler._delta = "usage__request"

        q_table = handler._mapper.provider_map.get("tables").get("query")
        with tenant_context(self.tenant):
            query = q_table.objects.filter(handler.query_filter)
            query_data = query.annotate(**handler.annotations)
            group_by_value = handler._get_group_by()
            query_group_by = ["date"] + group_by_value
            query_order_by = ("-date", )
            query_order_by += (handler.order, )

            annotations = handler.report_annotations
            query_data = query_data.values(*query_group_by).annotate(
                **annotations)

            aggregates = handler._mapper.report_type_map.get("aggregates")
            metric_sum = query.aggregate(**aggregates)
            query_sum = {key: metric_sum.get(key) for key in aggregates}

            result = handler.add_current_month_deltas(query_data, query_sum)

            delta_field_one, delta_field_two = handler._delta.split("__")
            field_one_total = Decimal(0)
            field_two_total = Decimal(0)
            for entry in result:
                field_one_total += entry.get(delta_field_one, 0)
                field_two_total += entry.get(delta_field_two, 0)
                delta_percent = entry.get("delta_percent")
                expected = ((entry.get(delta_field_one, 0) /
                             entry.get(delta_field_two, 0) *
                             100) if entry.get(delta_field_two) else 0)
                self.assertEqual(delta_percent, expected)

            expected_total = field_one_total / field_two_total * 100 if field_two_total != 0 else 0

            self.assertEqual(handler.query_delta.get("percent"),
                             expected_total)

    def test_add_current_month_deltas_no_previous_data_wo_query_data(self):
        """Test that current month deltas are calculated with no previous month data."""
        OCPReportDataGenerator(self.tenant,
                               self.provider).remove_data_from_tenant()
        OCPReportDataGenerator(self.tenant,
                               self.provider,
                               current_month_only=True).add_data_to_tenant()

        url = "?filter[time_scope_value]=-2&filter[resolution]=monthly&filter[time_scope_units]=month&filter[limit]=1&delta=usage__request"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)

        q_table = handler._mapper.provider_map.get("tables").get("query")
        with tenant_context(self.tenant):
            query = q_table.objects.filter(handler.query_filter)
            query_data = query.annotate(**handler.annotations)
            group_by_value = handler._get_group_by()
            query_group_by = ["date"] + group_by_value
            query_order_by = ("-date", )
            query_order_by += (handler.order, )

            annotations = annotations = handler.report_annotations
            query_data = query_data.values(*query_group_by).annotate(
                **annotations)

            aggregates = handler._mapper.report_type_map.get("aggregates")
            metric_sum = query.aggregate(**aggregates)
            query_sum = {
                key: metric_sum.get(key) if metric_sum.get(key) else Decimal(0)
                for key in aggregates
            }

            result = handler.add_current_month_deltas(query_data, query_sum)

            self.assertEqual(result, query_data)
            self.assertEqual(handler.query_delta["value"], Decimal(0))
            self.assertIsNone(handler.query_delta["percent"])

    def test_add_current_month_deltas_no_previous_data_w_query_data(self):
        """Test that current month deltas are calculated with no previous data for field two."""
        OCPReportDataGenerator(self.tenant,
                               self.provider).remove_data_from_tenant()
        OCPReportDataGenerator(self.tenant,
                               self.provider,
                               current_month_only=True).add_data_to_tenant()

        url = "?filter[time_scope_value]=-1&filter[resolution]=monthly&filter[limit]=1"
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        handler._delta = "usage__foo"

        q_table = handler._mapper.provider_map.get("tables").get("query")
        with tenant_context(self.tenant):
            query = q_table.objects.filter(handler.query_filter)
            query_data = query.annotate(**handler.annotations)
            group_by_value = handler._get_group_by()
            query_group_by = ["date"] + group_by_value
            query_order_by = ("-date", )
            query_order_by += (handler.order, )

            annotations = annotations = handler.report_annotations
            query_data = query_data.values(*query_group_by).annotate(
                **annotations)

            aggregates = handler._mapper.report_type_map.get("aggregates")
            metric_sum = query.aggregate(**aggregates)
            query_sum = {
                key: metric_sum.get(key) if metric_sum.get(key) else Decimal(0)
                for key in aggregates
            }

            result = handler.add_current_month_deltas(query_data, query_sum)

            self.assertEqual(result, query_data)
            self.assertIsNotNone(handler.query_delta["value"])
            self.assertIsNone(handler.query_delta["percent"])

    def test_get_tag_filter_keys(self):
        """Test that filter params with tag keys are returned."""
        url = "?"
        query_params = self.mocked_query_params(url, OCPTagView)
        handler = OCPTagQueryHandler(query_params)
        tag_keys = handler.get_tag_keys(filters=False)

        url = f"?filter[tag:{tag_keys[0]}]=*"
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        results = handler.get_tag_filter_keys()
        self.assertEqual(results, ["tag:" + tag_keys[0]])

    def test_get_tag_group_by_keys(self):
        """Test that group_by params with tag keys are returned."""
        url = "?"
        query_params = self.mocked_query_params(url, OCPTagView)
        handler = OCPTagQueryHandler(query_params)
        tag_keys = handler.get_tag_keys(filters=False)
        group_by_key = tag_keys[0]

        url = f"?group_by[tag:{group_by_key}]=*"
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        results = handler.get_tag_group_by_keys()
        self.assertEqual(results, ["tag:" + group_by_key])

    def test_set_tag_filters(self):
        """Test that tag filters are created properly."""
        filters = QueryFilterCollection()

        url = "?"
        query_params = self.mocked_query_params(url, OCPTagView)
        handler = OCPTagQueryHandler(query_params)
        tag_keys = handler.get_tag_keys(filters=False)

        filter_key = tag_keys[0]

        filter_value = "filter"
        group_by_key = tag_keys[1]

        group_by_value = "group_By"

        url = f"?filter[tag:{filter_key}]={filter_value}&group_by[tag:{group_by_key}]={group_by_value}"
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        filters = handler._set_tag_filters(filters)

        expected = f"""<class 'api.query_filter.QueryFilterCollection'>: (AND: ('pod_labels__{filter_key}__icontains', '{filter_value}')), (AND: ('pod_labels__{group_by_key}__icontains', '{group_by_value}')), """  # noqa: E501

        self.assertEqual(repr(filters), expected)

    def test_get_tag_group_by(self):
        """Test that tag based group bys work."""
        url = "?"
        query_params = self.mocked_query_params(url, OCPTagView)
        handler = OCPTagQueryHandler(query_params)
        tag_keys = handler.get_tag_keys(filters=False)

        group_by_key = tag_keys[0]
        group_by_value = "group_by"
        url = f"?group_by[tag:{group_by_key}]={group_by_value}"
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        group_by = handler._get_tag_group_by()
        group = group_by[0]
        expected = "pod_labels__" + group_by_key
        self.assertEqual(len(group_by), 1)
        self.assertEqual(group[0], expected)

    def test_get_tag_order_by(self):
        """Verify that a propery order by is returned."""
        tag = "pod_labels__key"
        expected_param = (tag.split("__")[1], )

        url = "?"
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        result = handler.get_tag_order_by(tag)
        expression = result.expression

        self.assertIsInstance(result, OrderBy)
        self.assertEqual(expression.sql, "pod_labels -> %s")
        self.assertEqual(expression.params, expected_param)

    def test_filter_by_infrastructure_ocp_on_aws(self):
        """Test that filter by infrastructure for ocp on aws."""
        data_generator = OCPAWSReportDataGenerator(self.tenant,
                                                   self.provider,
                                                   current_month_only=True)
        data_generator.add_data_to_tenant()

        url = "?filter[resolution]=monthly&filter[time_scope_value]=-1&filter[time_scope_units]=month&filter[infrastructures]=aws"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        query_data = handler.execute_query()

        self.assertTrue(
            query_data.get("data"))  # check that returned list is not empty
        for entry in query_data.get("data"):
            self.assertTrue(entry.get("values"))
            for value in entry.get("values"):
                self.assertIsNotNone(value.get("usage").get("value"))
                self.assertIsNotNone(value.get("request").get("value"))
        data_generator.remove_data_from_tenant()

    def test_filter_by_infrastructure_ocp_on_azure(self):
        """Test that filter by infrastructure for ocp on azure."""
        data_generator = OCPAzureReportDataGenerator(self.tenant,
                                                     self.provider,
                                                     current_month_only=True)
        data_generator.add_data_to_tenant()
        data_generator.add_ocp_data_to_tenant()
        url = "?filter[resolution]=monthly&filter[time_scope_value]=-1&filter[time_scope_units]=month&filter[infrastructures]=azure"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        query_data = handler.execute_query()

        self.assertTrue(
            query_data.get("data"))  # check that returned list is not empty
        for entry in query_data.get("data"):
            self.assertTrue(entry.get("values"))
            for value in entry.get("values"):
                self.assertIsNotNone(value.get("usage").get("value"))
                self.assertIsNotNone(value.get("request").get("value"))
        data_generator.remove_data_from_tenant()

    def test_filter_by_infrastructure_ocp(self):
        """Test that filter by infrastructure for ocp not on aws."""
        data_generator = OCPReportDataGenerator(self.tenant,
                                                self.provider,
                                                current_month_only=True)
        data_generator.add_data_to_tenant()

        url = "?filter[resolution]=monthly&filter[time_scope_value]=-1&filter[time_scope_units]=month&filter[infrastructures]=aws"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)
        query_data = handler.execute_query()

        self.assertTrue(
            query_data.get("data"))  # check that returned list is not empty
        for entry in query_data.get("data"):
            for value in entry.get("values"):
                self.assertEqual(value.get("usage").get("value"), 0)
                self.assertEqual(value.get("request").get("value"), 0)
        data_generator.remove_data_from_tenant()

    def test_order_by_null_values(self):
        """Test that order_by returns properly sorted data with null data."""
        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)

        unordered_data = [
            {
                "node": None,
                "cluster": "cluster-1"
            },
            {
                "node": "alpha",
                "cluster": "cluster-2"
            },
            {
                "node": "bravo",
                "cluster": "cluster-3"
            },
            {
                "node": "oscar",
                "cluster": "cluster-4"
            },
        ]

        order_fields = ["node"]
        expected = [
            {
                "node": "alpha",
                "cluster": "cluster-2"
            },
            {
                "node": "bravo",
                "cluster": "cluster-3"
            },
            {
                "node": "no-node",
                "cluster": "cluster-1"
            },
            {
                "node": "oscar",
                "cluster": "cluster-4"
            },
        ]
        ordered_data = handler.order_by(unordered_data, order_fields)
        self.assertEqual(ordered_data, expected)

    def test_ocp_cpu_query_group_by_cluster(self):
        """Test that group by cluster includes cluster and cluster_alias."""
        for _ in range(1, 5):
            OCPReportDataGenerator(self.tenant,
                                   self.provider).add_data_to_tenant()

        url = "?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&filter[limit]=3&group_by[cluster]=*"  # noqa: E501
        query_params = self.mocked_query_params(url, OCPCpuView)
        handler = OCPReportQueryHandler(query_params)

        query_data = handler.execute_query()
        for data in query_data.get("data"):
            self.assertIn("clusters", data)
            for cluster_data in data.get("clusters"):
                self.assertIn("cluster", cluster_data)
                self.assertIn("values", cluster_data)
                for cluster_value in cluster_data.get("values"):
                    # cluster_value is a dictionary
                    self.assertIn("cluster", cluster_value.keys())
                    self.assertIn("clusters", cluster_value.keys())
                    self.assertIsNotNone(cluster_value["cluster"])
                    self.assertIsNotNone(cluster_value["clusters"])