def init_data(self): project_ids = [self._project_id, self._other_project_id] start_base = tzutils.utc_to_local(datetime.datetime(2018, 1, 1)) for i in range(3): start_delta = datetime.timedelta(seconds=3600 * i) end_delta = start_delta + datetime.timedelta(seconds=3600) start = tzutils.add_delta(start_base, start_delta) end = tzutils.add_delta(start_base, end_delta) data = test_utils.generate_v2_storage_data(project_ids=project_ids, start=start, end=end) self.data.append(data) self.storage.push([data])
def test_do_execute_scope_processing_with_usage_data( self, update_scope_processing_state_db_mock, persist_rating_data_mock, execute_measurements_rating_mock, do_collection_mock): self.worker._conf = {"metrics": {"metric1": "s", "metric2": "d"}} usage_data_mock = {"some_usage_data": 2} do_collection_mock.return_value = usage_data_mock execute_measurements_rating_mock_return = mock.Mock() execute_measurements_rating_mock.return_value =\ execute_measurements_rating_mock_return timestamp_now = tzutils.localized_now() self.worker.do_execute_scope_processing(timestamp_now) do_collection_mock.assert_has_calls( [mock.call(["metric1", "metric2"], timestamp_now)]) end_time = tzutils.add_delta( timestamp_now, datetime.timedelta(seconds=self.worker._period)) execute_measurements_rating_mock.assert_has_calls( [mock.call(end_time, timestamp_now, usage_data_mock)]) persist_rating_data_mock.assert_has_calls([ mock.call(end_time, execute_measurements_rating_mock_return, timestamp_now) ]) self.assertTrue(update_scope_processing_state_db_mock.called)
def _test_add_substract_delta(self, obj, tzone): delta = datetime.timedelta(seconds=3600) naive = obj.astimezone(tz.tzutc()).replace(tzinfo=None) self.assertEqual( tzutils.add_delta(obj, delta).astimezone(tzone), (naive + delta).replace(tzinfo=tz.tzutc()).astimezone(tzone), ) self.assertEqual( tzutils.substract_delta(obj, delta).astimezone(tzone), (naive - delta).replace(tzinfo=tz.tzutc()).astimezone(tzone), )
def _fetch_resources(self, metric_name, start, end, project_id=None, q_filter=None): """Get resources during the timeframe. :type metric_name: str :param start: Start of the timeframe. :param end: End of the timeframe if needed. :param project_id: Filter on a specific tenant/project. :type project_id: str :param q_filter: Append a custom filter. :type q_filter: list """ # Get gnocchi specific conf extra_args = self.conf[metric_name]['extra_args'] resource_type = extra_args['resource_type'] scope_key = CONF.collect.scope_key # Build query # FIXME(peschk_l): In order not to miss any resource whose metrics may # contain measures after its destruction, we scan resources over three # collect periods. delta = timedelta(seconds=CONF.collect.period) start = tzutils.substract_delta(start, delta) end = tzutils.add_delta(end, delta) query_parameters = self._generate_time_filter(start, end) if project_id: kwargs = {scope_key: project_id} query_parameters.append(self.gen_filter(**kwargs)) if q_filter: query_parameters.append(q_filter) sorts = [extra_args['resource_key'] + ':asc'] resources = [] marker = None while True: resources_chunk = self._conn.resource.search( resource_type=resource_type, query=self.extend_filter(*query_parameters), sorts=sorts, marker=marker) if len(resources_chunk) < 1: break resources += resources_chunk marker = resources_chunk[-1][extra_args['resource_key']] return {res[extra_args['resource_key']]: res for res in resources}
def _collect(self, metric, start_timestamp): next_timestamp = tzutils.add_delta(start_timestamp, timedelta(seconds=self._period)) name, data = self._collector.retrieve( metric, start_timestamp, next_timestamp, self._tenant_id, ) if not data: raise collector.NoDataCollected return name, data
def _get_total_elem(self, begin, end, groupby, series_groupby, point): if groupby and 'time' in groupby: begin = tzutils.dt_from_iso(point['time']) period = point.get(PERIOD_FIELD_NAME) or self._default_period end = tzutils.add_delta(begin, datetime.timedelta(seconds=period)) output = { 'begin': begin, 'end': end, 'qty': point['qty'], 'rate': point['price'], } if groupby: for group in _sanitized_groupby(groupby): output[group] = series_groupby.get(group, '') return output
def check_time_state(timestamp=None, period=0, wait_periods=0): """Checks the state of a timestamp compared to the current time. Returns the next timestamp based on the current timestamp and the period if the next timestamp is inferior to the current time and the waiting period or None if not. :param timestamp: Current timestamp :type timestamp: datetime.datetime :param period: Period, in seconds :type period: int :param wait_periods: periods to wait before the current timestamp. :type wait_periods: int :rtype: datetime.datetime """ if not timestamp: return tzutils.get_month_start() period_delta = datetime.timedelta(seconds=period) next_timestamp = tzutils.add_delta(timestamp, period_delta) wait_time = wait_periods * period_delta if tzutils.add_delta(next_timestamp, wait_time) < tzutils.localized_now(): return next_timestamp return None
def test_collect_with_data(self): metric = "metric1" timestamp_now = tzutils.localized_now() usage_data = {"some_usage_data": 3} self.collector_mock.retrieve.return_value = (metric, usage_data) return_of_method = self.worker._collect(metric, timestamp_now) next_timestamp = tzutils.add_delta( timestamp_now, datetime.timedelta(seconds=self.worker._period)) self.collector_mock.retrieve.assert_has_calls([ mock.call(metric, timestamp_now, next_timestamp, self.worker._tenant_id) ]) self.assertEqual((metric, usage_data), return_of_method)
def _build_dataframes(self, points): dataframes = {} for point in points: point_type = point['type'] time = tzutils.dt_from_iso(point['time']) period = point.get(PERIOD_FIELD_NAME) or self._default_period timekey = (time, tzutils.add_delta(time, datetime.timedelta(seconds=period))) if timekey not in dataframes.keys(): dataframes[timekey] = dataframe.DataFrame(start=timekey[0], end=timekey[1]) dataframes[timekey].add_point( self._point_to_dataframe_entry(point), point_type) output = list(dataframes.values()) output.sort(key=lambda frame: (frame.start, frame.end)) return output
def test_collect_no_data(self): metric = "metric1" timestamp_now = tzutils.localized_now() self.collector_mock.retrieve.return_value = (metric, None) expected_message = "Collector 'toString' returned no data for " \ "resource 'metric1'" expected_message = re.escape(expected_message) self.assertRaisesRegex(collector.NoDataCollected, expected_message, self.worker._collect, metric, timestamp_now) next_timestamp = tzutils.add_delta( timestamp_now, datetime.timedelta(seconds=self.worker._period)) self.collector_mock.retrieve.assert_has_calls([ mock.call(metric, timestamp_now, next_timestamp, self.worker._tenant_id) ])
def run(self): while True: timestamp = self._check_state() if not timestamp: break metrics = list(self._conf['metrics'].keys()) # Collection usage_data = self._do_collection(metrics, timestamp) frame = dataframe.DataFrame( start=timestamp, end=tzutils.add_delta(timestamp, timedelta(seconds=self._period)), usage=usage_data, ) # Rating for processor in self._processors: frame = processor.obj.process(frame) # Writing self._storage.push([frame], self._tenant_id) self._state.set_state(self._tenant_id, timestamp)
def do_execute_scope_processing(self, timestamp): metrics = list(self._conf['metrics'].keys()) # Collection metrics = sorted(metrics) usage_data = self._do_collection(metrics, timestamp) LOG.debug("Usage data [%s] found for storage scope [%s] in " "timestamp [%s].", usage_data, self._tenant_id, timestamp) start_time = timestamp end_time = tzutils.add_delta(timestamp, timedelta(seconds=self._period)) # No usage records found in if not usage_data: LOG.warning("No usage data for storage scope [%s] on " "timestamp [%s]. You might want to consider " "de-activating it.", self._tenant_id, timestamp) else: frame = self.execute_measurements_rating(end_time, start_time, usage_data) self.persist_rating_data(end_time, frame, start_time) self.update_scope_processing_state_db(timestamp)
def quote(self, ctxt, res_data): LOG.debug('Received quote request [%s] from RPC.', res_data) worker = APIWorker() start = tzutils.localized_now() end = tzutils.add_delta(start, timedelta(seconds=CONF.collect.period)) # Need to prepare data to support the V2 processing format usage = {} for k in res_data['usage']: all_data_points_for_metric = [] all_quote_data_entries = res_data['usage'][k] for p in all_quote_data_entries: vol = p['vol'] desc = p.get('desc', {}) data_point = dataframe.DataPoint( vol['unit'], vol['qty'], 0, desc.get('groupby', []), desc.get('metadata', []), ) all_data_points_for_metric.append(data_point) usage[k] = all_data_points_for_metric frame = dataframe.DataFrame( start=start, end=end, usage=usage, ) quote_result = worker.quote(frame) LOG.debug("Quote result [%s] for input data [%s].", quote_result, res_data) return str(quote_result)