def cached_time_series(identifier, start, end): """ Cached time series """ def time_series_key(identifier, start, end): return str(hash(('ts::%s::%s:%s' % ( str(identifier), start, end)).replace(' ', '_'))) cache_key = time_series_key(identifier, start, end) ts = cache.get(cache_key) if ts is None: # Actually fetching time series source_name = identifier['fews_norm_source_slug'] source = FewsNormSource.objects.get(slug=source_name) params = {} if 'geo_ident' in identifier: params['location'] = identifier['geo_ident'] if 'par_ident' in identifier: params['parameter'] = identifier['par_ident'] if 'mod_ident' in identifier: params['moduleinstance'] = identifier['mod_ident'] if 'stp_ident' in identifier: params['timestep'] = identifier['stp_ident'] if 'qua_ident' in identifier and identifier['qua_ident']: params['qualifierset'] = identifier['qua_ident'] series = Series.from_raw( schema_prefix=source.database_schema_name, params=params).using(source.database_name) ts = Event.time_series(source, series, start, end) cache.set(cache_key, ts) return ts
def series(self): """Return Series corresponding with this object""" params = self.series_params() source = self.fews_norm_source series = Series.from_raw( schema_prefix=source.database_schema_name, params=params).using(source.database_name) return series
def get(self, request): deadline = datetime.strptime(request.GET['timestamp'], "%Y-%m-%dT%H:%M:%S") lppairs = [tuple(k.split(':')) for k in request.GET['keys'].split(',')] filtered_series = Series.from_lppairs(lppairs) if filtered_series: e = Event.filter_latest_before_deadline(filtered_series, deadline) return [(i.series.location.id, i.series.parameter.id, i.timestamp, i.value, i.flag, i.comment) for i in e] else: return None