def get_locations(self, filter_id, parameter_id, cache_timeout=CACHE_TIMEOUT): """ Query locations from jdbc source and return named locations in a list. {'location': '<location name>', 'longitude': <longitude>, 'latitude': <latitude>} cache_timeout gives an alternative timeout duration for the cache, in seconds. """ location_cache_key = ('%s::%s::%s' % (LOCATION_CACHE_KEY, filter_id, parameter_id)) named_locations = cache.get(location_cache_key) if named_locations is None: query = ("select longitude, latitude, " "location, locationid " "from filters " "where id='%s' and parameterid='%s'" % (filter_id, parameter_id)) locations = self.query(query) named_locations = named_list( locations, ['longitude', 'latitude', 'location', 'locationid']) cache.set(location_cache_key, named_locations, cache_timeout) return named_locations
def get_timeseries(self, filter_id, location_id, parameter_id, start_date, end_date): """ SELECT TIME,VALUE,FLAG,DETECTION,COMMENT from ExTimeSeries WHERE filterId = 'MFPS' AND parameterId = 'H.meting' AND locationId = 'BW_NZ_04' AND time BETWEEN '2007-01-01 13:00:00' AND '2008-01-10 13:00:00' Apparently only used by the API. """ q = ("select time, value, flag, detection, comment from " "extimeseries where filterid='%s' and locationid='%s' " "and parameterid='%s' and time between '%s' and '%s'" % (filter_id, location_id, parameter_id, start_date.strftime(JDBC_DATE_FORMAT), end_date.strftime(JDBC_DATE_FORMAT))) query_result = self.query(q) result = named_list( query_result, ['time', 'value', 'flag', 'detection', 'comment']) for row in result: # Expecting dateTime.iso8601 in a mixed format (basic date + # extended time) with time zone indication (Z = UTC), # for example: 20110828T00:00:00Z. date_time = row['time'].value date_time_adjusted = '%s-%s-%s' % ( date_time[0:4], date_time[4:6], date_time[6:]) row['time'] = iso8601.parse_date(date_time_adjusted) return result
def test_named_list(self): rows = [ ['a', 'b', 'c', 'd'], ['f', 'g', 'h', 'i']] names = ['name1', 'name2', 'name3', 'name4'] result = named_list(rows, names) result_good = [ {'name1': 'a', 'name2': 'b', 'name3': 'c', 'name4': 'd'}, {'name1': 'f', 'name2': 'g', 'name3': 'h', 'name4': 'i'}] self.assertEqual(result, result_good)
def _locations(self): """Return location for selected filterkey, parameterkey.""" options = { 't_filter__filterid': self.filterkey, 't_parameter__parameterid': self.parameterkey, 'webrs_source': self.jdbc_source } timeseries = TimeseriesCache.objects.filter(**options).distinct('t_location__id') locations = LocationCache.objects.filter( id__in=timeseries.values_list('t_location__id')) named_locations = named_list( [l.location_as_list for l in locations], ['longitude', 'latitude', 'location', 'locationid']) return named_locations
def get_filter_tree(self, url_name='lizard_fewsjdbc.jdbc_source', ignore_cache=False, cache_timeout=CACHE_TIMEOUT): """ Gets filter tree from Jdbc source. Also adds url per filter which links to url_name. [{'name': <name>, 'url': <url>, children: [...]}] url, children is optional. If url_name is set to None, no url property will be set in the filter tree (useful if the standard fewsjdbc urls don't exist, for instance when only the REST API is used). Uses cache unless ignore_cache == True. cache_timeout gives an alternative timeout duration for the cache, in seconds. """ filter_source_cache_key = '%s::%s::%s' % ( url_name, FILTER_CACHE_KEY, self.slug) #filter_tree = cache.get(filter_source_cache_key) filter_tree = None if filter_tree is None or ignore_cache: # Building up the fews filter tree. if self.usecustomfilter: named_filters = self._customfilter root_parent = None else: try: filters = self.query( "select distinct id, name, parentid from filters;") except FewsJdbcNotAvailableError, e: return [{'name': 'Jdbc2Ei server not available.', 'error': e}] except FewsJdbcQueryError, e: logger.error("JdbcSource returned an error: %s" % e) return [{'name': 'Jdbc data source not available.', 'error code': e}] unique_filters = unique_list(filters) named_filters = named_list(unique_filters, ['id', 'name', 'parentid']) if self.filter_tree_root: root_parent = self.filter_tree_root else: root_parent = JDBC_NONE
def get_named_parameters(self, filter_id, ignore_cache=False, find_lowest=True, url_name='lizard_fewsjdbc.jdbc_source', cache_timeout=CACHE_TIMEOUT): """ Get named parameters given filter_id: [{'name': <filter>, 'parameterid': <parameterid1>, 'parameter': <parameter1>}, ...] The parameters are parameters from the lowest filter below given filter_id. If find_lowest is True, then this function first searches for all the leaf filter nodes below this one, and then returns the parameters of those. If find_lowest is set to False (for instance because filter_id is already known to be a leaf), only parameters directly connected to this filter are returned. Uses cache unless ignore_cache == True. cache_timeout gives an alternative timeout duration for the cache, in seconds. """ parameter_cache_key = ('%s::%s::%s' % (FILTER_CACHE_KEY, self.slug, str(filter_id))) named_parameters = cache.get(parameter_cache_key) if find_lowest: filter_names = lowest_filters( filter_id, self.get_filter_tree(url_name=url_name)) else: filter_names = (filter_id,) filter_query = " or ".join( ["id='%s'" % filter_name for filter_name in filter_names]) if ignore_cache or named_parameters is None: parameter_result = self.query( ("select name, parameterid, parameter, id " "from filters where %s" % filter_query)) unique_parameters = unique_list(parameter_result) named_parameters = named_list( unique_parameters, ['filter_name', 'parameterid', 'parameter', 'filter_id']) cache.set(parameter_cache_key, named_parameters, cache_timeout) return named_parameters
def get_timeseries(self, filter_id, location_id, parameter_id, start_date, end_date): """ SELECT TIME,VALUE,FLAG,DETECTION,COMMENT from ExTimeSeries WHERE filterId = 'MFPS' AND parameterId = 'H.meting' AND locationId = 'BW_NZ_04' AND time BETWEEN '2007-01-01 13:00:00' AND '2008-01-10 13:00:00' Apparently only used by the API. """ q = ("select time, value, flag, detection, comment from " "extimeseries where filterid='%s' and locationid='%s' " "and parameterid='%s' and time between '%s' and '%s'" % (filter_id, location_id, parameter_id, start_date.strftime(JDBC_DATE_FORMAT), end_date.strftime(JDBC_DATE_FORMAT))) query_result = self.query(q) result = named_list( query_result, ['time', 'value', 'flag', 'detection', 'comment']) timezone = self.timezone for row in result: # Expecting dateTime.iso8601 in a mixed format (basic date + # extended time) with time zone indication (Z = UTC), # for example: 20110828T00:00:00Z. date_time = row['time'].value date_time_adjusted = '%s-%s-%s' % ( date_time[0:4], date_time[4:6], date_time[6:]) row['time'] = iso8601.parse_date(date_time_adjusted) if timezone: # Bit of a hack. This is used when the timezone FEWS reported # (usually UTC) is incorrect, and allows overriding it. t = row['time'] row['time'] = datetime.datetime( year=t.year, month=t.month, day=t.day, hour=t.hour, minute=t.minute, second=t.second, tzinfo=timezone) return result