def test_invalid_functions_in_collection(self): topic_d = STATUS_TOPIC data = generate_test_data() parameters = dict(method="bear", name="Test invalid") topic_d['data_tools'] = [ dict(field='number', method='table_item', parameters=parameters) ] results, warnings = run_data_tools(topic_d, data) self.assertEqual(len(warnings), 1) self.assertEqual(warnings[0], method_not_supported('bear')) self.assertEqual(len(results), 0)
def _collection(type_, data, field, parameters=None): try: method = parameters.get("method", "latest") child_parameters = parameters.get("parameters") except AttributeError: return None functions = {**CHART_FUNCS, **STATUS_FUNCS, **VALUE_FUNCS} if method not in functions: raise ValueError(method_not_supported(method)) value_d = functions.get(method)(data, field, child_parameters) return statistics_dict(type_, parameters=parameters, **value_d)
def test_aggregate_unknown_data_tool(self): data = generate_test_data(15) aggregated, warnings = aggregate(data, 10, 'horse') self.assertEqual(aggregated, []) self.assertEqual(warnings, [method_not_supported('horse')])
def _check_data_tool(data_tool, topic_d): if data_tool["method"] not in data_functions: raise ValueError(method_not_supported(data_tool["method"])) if data_tool["field"] not in topic_d["fields"]: raise ValueError(field_is_undefined(data_tool["field"]))
def _get_value(method, data, field, parameters=None): if method not in functions: raise ValueError(method_not_supported(method)) value_d = functions.get(method)(data, field, parameters) return value_d.get("payload", {}).get("value")
def aggregate(data, aggregate_to, aggregate_with=None, aggregate_always=False): '''Aggregate data to less data points Args: data: Data before aggregation aggregate_to: Number of data points to aggregate data to. aggregate_with: Value function to use to when combining data-points. Defaults to average. aggregate_always: If true, data is aggregated even if datas length is shorter than aggregate_to value. Disabled by default. Returns: List of aggregated data-points ''' if not aggregate_with: aggregate_with = 'average' warnings = [] aggregated = [] if not data: warnings.append(no_data()) return ([], warnings,) if len(data) <= aggregate_to and not aggregate_always: return (data, warnings,) if aggregate_with not in VALUE_FUNCS: warnings.append(method_not_supported(aggregate_with)) return ([], warnings,) start = _dt_timestamp(data[0]) end = _dt_timestamp(data[-1]) window = (end - start) / aggregate_to keys = _get_keys(data[0]) remaining = data for i in range(aggregate_to): try: last = next(j for j, a in enumerate(remaining) if _dt_timestamp(a) > start + (i + 1) * window) current = remaining[:last] if not current: continue remaining = remaining[last:] except StopIteration: if i == (aggregate_to - 1): current = remaining else: continue aggregated_point = dict( timestamp=timestamp_as_str( _as_naive_utc( start + i * window))) for key in keys: try: aggregated_point[key] = data_functions[aggregate_with]( current, key, None).get('payload').get('value') except BaseException: aggregated_point[key] = None aggregated.append(aggregated_point) return (aggregated, warnings,)