def time_series_aggregated( self, aggregation, aggregation_period, dt_start=None, dt_end=None): """ Aggregated time series. Result is a dictionary with timeseries. The keys are: (location, parameter, option), where option is (GraphItemMixin.)TIME_SERIES_ALL, TIME_SERIES_POSITIVE or TIME_SERIES_NEGATIVE aggregation: see PredefinedGraph 'avg', 'sum' aggregation_period: see PredefinedGraph 'day', 'month', 'quarter', 'year' """ source = self.fews_norm_source series = self.series() result = {} for single_series in series: obj = { GraphItemMixin.TIME_SERIES_ALL: timeseries.TimeSeries(), GraphItemMixin.TIME_SERIES_POSITIVE: timeseries.TimeSeries(), GraphItemMixin.TIME_SERIES_NEGATIVE: timeseries.TimeSeries()} # Somehow get the events with aggregation in it # aggregation == PredefinedGraph.AGGREGATION_AVG, AGGREGATION_SUM # aggregation_period == PredefinedGraph.PERIOD_YEAR, PERIOD_MONTH, # PERIOD_QUARTER, PERIOD_DAY # between dt_start and dt_end events = Event.agg_from_raw( single_series, dt_start=dt_start, dt_end=dt_end, schema_prefix=source.database_schema_name, agg_function=aggregation, agg_period=aggregation_period).using(source.database_name) # Put the events in Timeseries objects in obj for event in events: obj[GraphItemMixin.TIME_SERIES_ALL][event.timestamp] = ( event.value, event.flag, event.comment) if event.value >= 0: obj[GraphItemMixin.TIME_SERIES_POSITIVE][ event.timestamp] = ( event.value, event.flag, event.comment) else: obj[GraphItemMixin.TIME_SERIES_NEGATIVE][ event.timestamp] = ( event.value, event.flag, event.comment) # Now put the timeseries in the result for k in obj.keys(): obj[k].location_id = single_series.location obj[k].parameter_id = single_series.parameter obj[k].time_step = single_series.timestep obj[k].units = single_series.unit result[single_series.location, single_series.parameter, k] = obj[k] return result
def cached_time_series(identifier, start, end): """ Cached time series """ def time_series_key(identifier, start, end): return str(hash(('ts::%s::%s:%s' % ( str(identifier), start, end)).replace(' ', '_'))) cache_key = time_series_key(identifier, start, end) ts = cache.get(cache_key) if ts is None: # Actually fetching time series source_name = identifier['fews_norm_source_slug'] source = FewsNormSource.objects.get(slug=source_name) params = {} if 'geo_ident' in identifier: params['location'] = identifier['geo_ident'] if 'par_ident' in identifier: params['parameter'] = identifier['par_ident'] if 'mod_ident' in identifier: params['moduleinstance'] = identifier['mod_ident'] if 'stp_ident' in identifier: params['timestep'] = identifier['stp_ident'] if 'qua_ident' in identifier and identifier['qua_ident']: params['qualifierset'] = identifier['qua_ident'] series = Series.from_raw( schema_prefix=source.database_schema_name, params=params).using(source.database_name) ts = Event.time_series(source, series, start, end) cache.set(cache_key, ts) return ts
def get(self, request): deadline = datetime.strptime(request.GET['timestamp'], "%Y-%m-%dT%H:%M:%S") lppairs = [tuple(k.split(':')) for k in request.GET['keys'].split(',')] filtered_series = Series.from_lppairs(lppairs) if filtered_series: e = Event.filter_latest_before_deadline(filtered_series, deadline) return [(i.series.location.id, i.series.parameter.id, i.timestamp, i.value, i.flag, i.comment) for i in e] else: return None
def time_series( self, series=None, dt_start=None, dt_end=None, with_comments=False): """ Return dictionary of timeseries. Low level fewsnorm stuff. Keys are (location, parameter), value is timeseries object. 1) which series 2) retrieve events (with comments) for each series Note: with_comments does nothing anymore, comments are always returned. The option is still there for compatibility. """ if not series: series = self.series() source = self.fews_norm_source events = Event.time_series(source, series, dt_start, dt_end) return events