Example #1
0
    def configure_and_execute_overlap_test(self, schedule_get_all_mock,
                                           start_reprocess_time,
                                           end_reprocess_time):

        self.configure_schedules_mock(schedule_get_all_mock,
                                      start_reprocess_time, end_reprocess_time)

        scheduling_range = DateTimeRange(
            tzutils.utc_to_local(self.start_reprocess_time),
            tzutils.utc_to_local(self.end_reprocess_time))
        scheduled_range = DateTimeRange(
            tzutils.local_to_utc(start_reprocess_time),
            tzutils.local_to_utc(end_reprocess_time))
        expected_message = \
            "400 Bad Request: Cannot schedule a reprocessing for scope " \
            "[toStringMock] for reprocessing time [%s], because it already " \
            "has a schedule for a similar time range [%s]." \
            % (scheduling_range, scheduled_range)

        expected_message = re.escape(expected_message)

        self.assertRaisesRegex(
            http_exceptions.BadRequest, expected_message,
            self.endpoint.validate_reprocessing_schedules_overlaps,
            self.generate_all_scopes_object(), self.end_reprocess_time,
            self.start_reprocess_time)

        schedule_get_all_mock.assert_called_with(
            identifier=[self.scope_ids[0]])
Example #2
0
    def total(self,
              groupby=None,
              begin=None,
              end=None,
              metric_types=None,
              filters=None,
              offset=0,
              limit=1000,
              paginate=True):

        begin, end = self._check_begin_end(begin, end)

        total = self._conn.get_total(metric_types, begin, end, groupby,
                                     filters)

        output = []
        for (series_name, series_groupby), points in total.items():
            for point in points:
                # NOTE(peschk_l): InfluxDB returns all timestamps for a given
                # period and interval, even those with no data. This filters
                # out periods with no data
                if point['qty'] is not None and point['price'] is not None:
                    output.append(
                        self._get_total_elem(tzutils.utc_to_local(begin),
                                             tzutils.utc_to_local(end),
                                             groupby, series_groupby, point))

        groupby = _sanitized_groupby(groupby)
        if groupby:
            output.sort(key=lambda x: [x[group] for group in groupby])
        return {
            'total': len(output),
            'results': output[offset:offset + limit] if paginate else output,
        }
Example #3
0
 def test_get_total_groupby_tenant_and_restype(self):
     begin = tzutils.utc_to_local(samples.FIRST_PERIOD_BEGIN)
     end = tzutils.utc_to_local(samples.SECOND_PERIOD_END)
     self.insert_data()
     total = self.storage.total(
         begin=begin,
         end=end,
         groupby=['project_id', 'type'])['results']
     self.assertEqual(4, len(total))
     self.assertEqual(0.1337, total[0]["rate"])
     self.assertEqual(self._other_tenant_id, total[0]["tenant_id"])
     self.assertEqual('image.size', total[0]["res_type"])
     self.assertEqual(begin, total[0]["begin"])
     self.assertEqual(end, total[0]["end"])
     self.assertEqual(0.1337, total[1]["rate"])
     self.assertEqual(self._tenant_id, total[1]["tenant_id"])
     self.assertEqual('image.size', total[1]["res_type"])
     self.assertEqual(begin, total[1]["begin"])
     self.assertEqual(end, total[1]["end"])
     self.assertEqual(0.84, total[2]["rate"])
     self.assertEqual(self._other_tenant_id, total[2]["tenant_id"])
     self.assertEqual('instance', total[2]["res_type"])
     self.assertEqual(begin, total[2]["begin"])
     self.assertEqual(end, total[2]["end"])
     self.assertEqual(0.84, total[3]["rate"])
     self.assertEqual(self._tenant_id, total[3]["tenant_id"])
     self.assertEqual('instance', total[3]["res_type"])
     self.assertEqual(begin, total[3]["begin"])
     self.assertEqual(end, total[3]["end"])
Example #4
0
    def get_all(self,
                identifier=None,
                fetcher=None,
                collector=None,
                scope_key=None,
                active=1,
                limit=100,
                offset=0):
        """Returns the state of all scopes.

        This function returns the state of all scopes with support for optional
        filters.

        :param identifier: optional scope identifiers to filter on
        :type identifier: list
        :param fetcher: optional scope fetchers to filter on
        :type fetcher: list
        :param collector: optional collectors to filter on
        :type collector: list
        :param fetcher: optional fetchers to filter on
        :type fetcher: list
        :param scope_key: optional scope_keys to filter on
        :type scope_key: list
        :param active: optional active to filter scopes by status
                       (active/deactivated)
        :type active: int
        :param limit: optional to restrict the projection
        :type limit: int
        :param offset: optional to shift the projection
        :type offset: int
        """
        session = db.get_session()
        session.begin()

        q = utils.model_query(self.model, session)
        if identifier:
            q = q.filter(
                self.model.identifier.in_(to_list_if_needed(identifier)))
        if fetcher:
            q = q.filter(self.model.fetcher.in_(to_list_if_needed(fetcher)))
        if collector:
            q = q.filter(self.model.collector.in_(
                to_list_if_needed(collector)))
        if scope_key:
            q = q.filter(self.model.scope_key.in_(
                to_list_if_needed(scope_key)))
        if active is not None and active != []:
            q = q.filter(self.model.active.in_(to_list_if_needed(active)))
        q = apply_offset_and_limit(limit, offset, q)

        r = q.all()
        session.close()

        for item in r:
            item.last_processed_timestamp = tzutils.utc_to_local(
                item.last_processed_timestamp)
            item.scope_activation_toggle_date = tzutils.utc_to_local(
                item.scope_activation_toggle_date)
        return r
Example #5
0
 def _localize_total(iterable):
     for elem in iterable:
         begin = elem['begin']
         end = elem['end']
         if begin:
             elem['begin'] = tzutils.utc_to_local(begin)
         if end:
             elem['end'] = tzutils.utc_to_local(end)
Example #6
0
 def test_get_total_filtering_on_one_period(self):
     begin = tzutils.utc_to_local(samples.FIRST_PERIOD_BEGIN)
     end = tzutils.utc_to_local(samples.FIRST_PERIOD_END)
     self.insert_data()
     total = self.storage.total(begin=begin, end=end)['results']
     self.assertEqual(1, len(total))
     self.assertEqual(1.1074, total[0]["rate"])
     self.assertEqual(begin, total[0]["begin"])
     self.assertEqual(end, total[0]["end"])
Example #7
0
 def test_get_total_without_filter_but_timestamp(self):
     begin = tzutils.utc_to_local(samples.FIRST_PERIOD_BEGIN)
     end = tzutils.utc_to_local(samples.SECOND_PERIOD_END)
     self.insert_data()
     total = self.storage.total(begin=begin, end=end)['results']
     # FIXME(sheeprine): floating point error (transition to decimal)
     self.assertEqual(1, len(total))
     self.assertEqual(1.9473999999999998, total[0]["rate"])
     self.assertEqual(begin, total[0]["begin"])
     self.assertEqual(end, total[0]["end"])
Example #8
0
 def test_get_empty_total(self):
     begin = tzutils.utc_to_local(samples.FIRST_PERIOD_BEGIN -
                                  datetime.timedelta(seconds=3600))
     end = tzutils.utc_to_local(samples.FIRST_PERIOD_BEGIN)
     self.insert_data()
     total = self.storage.total(begin=begin, end=end)['results']
     self.assertEqual(1, len(total))
     self.assertEqual(total[0]["rate"], 0)
     self.assertEqual(begin, total[0]["begin"])
     self.assertEqual(end, total[0]["end"])
Example #9
0
 def test_get_total_filtering_on_service(self):
     begin = tzutils.utc_to_local(samples.FIRST_PERIOD_BEGIN)
     end = tzutils.utc_to_local(samples.FIRST_PERIOD_END)
     self.insert_data()
     total = self.storage.total(begin=begin,
                                end=end,
                                metric_types='instance')['results']
     self.assertEqual(1, len(total))
     self.assertEqual(0.84, total[0]["rate"])
     self.assertEqual('instance', total[0]["res_type"])
     self.assertEqual(begin, total[0]["begin"])
     self.assertEqual(end, total[0]["end"])
Example #10
0
 def test_get_total_filtering_on_one_period_and_one_tenant(self):
     begin = tzutils.utc_to_local(samples.FIRST_PERIOD_BEGIN)
     end = tzutils.utc_to_local(samples.FIRST_PERIOD_END)
     self.insert_data()
     filters = {'project_id': self._tenant_id}
     total = self.storage.total(begin=begin, end=end,
                                filters=filters)['results']
     self.assertEqual(1, len(total))
     self.assertEqual(0.5537, total[0]["rate"])
     self.assertEqual(self._tenant_id, total[0]["tenant_id"])
     self.assertEqual(begin, total[0]["begin"])
     self.assertEqual(end, total[0]["end"])
Example #11
0
 def test_get_total_groupby_restype(self):
     begin = tzutils.utc_to_local(samples.FIRST_PERIOD_BEGIN)
     end = tzutils.utc_to_local(samples.SECOND_PERIOD_END)
     self.insert_data()
     total = self.storage.total(begin=begin, end=end,
                                groupby=['type'])['results']
     self.assertEqual(2, len(total))
     self.assertEqual(0.2674, total[0]["rate"])
     self.assertEqual('image.size', total[0]["res_type"])
     self.assertEqual(begin, total[0]["begin"])
     self.assertEqual(end, total[0]["end"])
     self.assertEqual(1.68, total[1]["rate"])
     self.assertEqual('instance', total[1]["res_type"])
     self.assertEqual(begin, total[1]["begin"])
     self.assertEqual(end, total[1]["end"])
Example #12
0
 def test_get_total_groupby_tenant(self):
     begin = tzutils.utc_to_local(samples.FIRST_PERIOD_BEGIN)
     end = tzutils.utc_to_local(samples.SECOND_PERIOD_END)
     self.insert_data()
     total = self.storage.total(begin=begin,
                                end=end,
                                groupby=['project_id'])['results']
     self.assertEqual(2, len(total))
     self.assertEqual(0.9737, total[0]["rate"])
     self.assertEqual(self._other_tenant_id, total[0]["tenant_id"])
     self.assertEqual(begin, total[0]["begin"])
     self.assertEqual(end, total[0]["end"])
     self.assertEqual(0.9737, total[1]["rate"])
     self.assertEqual(self._tenant_id, total[1]["tenant_id"])
     self.assertEqual(begin, total[1]["begin"])
     self.assertEqual(end, total[1]["end"])
Example #13
0
    def total(self,
              groupby=None,
              begin=None,
              end=None,
              metric_types=None,
              filters=None,
              offset=0,
              limit=1000,
              paginate=True,
              custom_fields="SUM(qty) AS qty, SUM(price) AS rate"):

        begin, end = self._check_begin_end(begin, end)

        total = self._conn.get_total(metric_types, begin, end, custom_fields,
                                     groupby, filters)

        output = []
        for (series_name, series_groupby), points in total.items():
            for point in points:
                # NOTE(peschk_l): InfluxDB returns all timestamps for a given
                # period and interval, even those with no data. This filters
                # out periods with no data

                # NOTE (rafaelweingartner): the summary get API is allowing
                # users to customize the report. Therefore, we only ignore
                # data points, if all of the entries have None values.
                # Otherwise, they are presented to the user.
                if [k for k in point.keys() if point[k]]:
                    output.append(
                        self._get_total_elem(tzutils.utc_to_local(begin),
                                             tzutils.utc_to_local(end),
                                             groupby, series_groupby, point))

        groupby = _sanitized_groupby(groupby)
        if groupby:
            output.sort(key=lambda x: [x[group] for group in groupby])

        return {
            'total': len(output),
            'results': output[offset:offset + limit] if paginate else output,
        }
Example #14
0
 def init_data(self):
     project_ids = [self._project_id, self._other_project_id]
     start_base = tzutils.utc_to_local(datetime.datetime(2018, 1, 1))
     for i in range(3):
         start_delta = datetime.timedelta(seconds=3600 * i)
         end_delta = start_delta + datetime.timedelta(seconds=3600)
         start = tzutils.add_delta(start_base, start_delta)
         end = tzutils.add_delta(start_base, end_delta)
         data = test_utils.generate_v2_storage_data(project_ids=project_ids,
                                                    start=start,
                                                    end=end)
         self.data.append(data)
         self.storage.push([data])
Example #15
0
    def get_all(self,
                identifier=None,
                fetcher=None,
                collector=None,
                scope_key=None,
                limit=100, offset=0):
        """Returns the state of all scopes.

        This function returns the state of all scopes with support for optional
        filters.

        :param identifier: optional scope identifiers to filter on
        :type identifier: list
        :param fetcher: optional scope fetchers to filter on
        :type fetcher: list
        :param collector: optional collectors to filter on
        :type collector: list
        :param fetcher: optional fetchers to filter on
        :type fetcher: list
        :param scope_key: optional scope_keys to filter on
        :type scope_key: list
        """
        session = db.get_session()
        session.begin()

        q = utils.model_query(self.model, session)
        if identifier:
            q = q.filter(self.model.identifier.in_(identifier))
        if fetcher:
            q = q.filter(self.model.fetcher.in_(fetcher))
        if collector:
            q = q.filter(self.model.collector.in_(collector))
        if scope_key:
            q = q.filter(self.model.scope_key.in_(scope_key))
        q = q.offset(offset).limit(limit)

        r = q.all()
        session.close()

        for item in r:
            item.state = tzutils.utc_to_local(item.state)

        return r
Example #16
0
    def get_state(self, identifier,
                  fetcher=None, collector=None, scope_key=None):
        """Get the state of a scope.

        :param identifier: Identifier of the scope
        :type identifier: str
        :param fetcher: Fetcher associated to the scope
        :type fetcher: str
        :param collector: Collector associated to the scope
        :type collector: str
        :param scope_key: scope_key associated to the scope
        :type scope_key: str
        :rtype: datetime.datetime
        """
        session = db.get_session()
        session.begin()
        r = self._get_db_item(
            session, identifier, fetcher, collector, scope_key)
        session.close()
        return tzutils.utc_to_local(r.state) if r else None
Example #17
0
 def _normalize_time(t):
     if isinstance(t, datetime.datetime):
         return tzutils.utc_to_local(t)
     return tzutils.dt_from_iso(t)
Example #18
0
 def test_utc_to_local(self):
     self.assertEqual(tzutils.utc_to_local(self.naive_now), self.local_now)
Example #19
0
    def test_reset_state(self):
        coord_start_patch = mock.patch.object(
            coordination.CoordinationDriverWithExecutor, 'start')
        lock_acquire_patch = mock.patch.object(file.FileLock,
                                               'acquire',
                                               return_value=True)

        storage_delete_patch = mock.patch.object(influx.InfluxStorage,
                                                 'delete')

        state_set_patch = mock.patch.object(storage_state.StateManager,
                                            'set_last_processed_timestamp')

        with coord_start_patch, lock_acquire_patch, \
                storage_delete_patch as sd, state_set_patch as ss:

            endpoint = orchestrator.ScopeEndpoint()
            endpoint.reset_state({}, {
                'scopes': [
                    {
                        'scope_id': 'f266f30b11f246b589fd266f85eeec39',
                        'scope_key': 'project_id',
                        'collector': 'prometheus',
                        'fetcher': 'prometheus',
                    },
                    {
                        'scope_id': '4dfb25b0947c4f5481daf7b948c14187',
                        'scope_key': 'project_id',
                        'collector': 'gnocchi',
                        'fetcher': 'gnocchi',
                    },
                ],
                'last_processed_timestamp':
                '20190716T085501Z',
            })

            sd.assert_has_calls([
                mock.call(begin=tzutils.utc_to_local(
                    datetime.datetime(2019, 7, 16, 8, 55, 1)),
                          end=None,
                          filters={
                              'project_id': 'f266f30b11f246b589fd266f85eeec39'
                          }),
                mock.call(
                    begin=tzutils.utc_to_local(
                        datetime.datetime(2019, 7, 16, 8, 55, 1)),
                    end=None,
                    filters={'project_id': '4dfb25b0947c4f5481daf7b948c14187'},
                )
            ],
                                any_order=True)

            ss.assert_has_calls([
                mock.call('f266f30b11f246b589fd266f85eeec39',
                          tzutils.utc_to_local(
                              datetime.datetime(2019, 7, 16, 8, 55, 1)),
                          scope_key='project_id',
                          collector='prometheus',
                          fetcher='prometheus'),
                mock.call('4dfb25b0947c4f5481daf7b948c14187',
                          tzutils.utc_to_local(
                              datetime.datetime(2019, 7, 16, 8, 55, 1)),
                          scope_key='project_id',
                          collector='gnocchi',
                          fetcher='gnocchi')
            ],
                                any_order=True)