def test_less_than_default_forecast(volttron_instance, cleanup_cache, weather, query_agent, locations, service): query_data = [] cache_data = [] identity = weather[1] if service == 'get_minutely_forecast': query_data = query_agent.vip.rpc.call( identity, service, locations, minutes=2).get(timeout=30) elif service == 'get_hourly_forecast': query_data = query_agent.vip.rpc.call( identity, service, locations, hours=2).get(timeout=30) elif service == 'get_daily_forecast': query_data = query_agent.vip.rpc.call( identity, service, locations, days=2).get(timeout=30) else: pytest.fail('invalid request type') if query_data[0].get("weather_error"): error = query_data[0].get("weather_error") if error.endswith("Remote API returned Code 403"): pytest.skip("API key has exceeded daily call limit") assert len(query_data) == len(locations) for record in query_data: assert len(record['weather_results']) == 2 if service == 'get_minutely_forecast': cache_data = query_agent.vip.rpc.call( identity, service, locations, minutes=2).get(timeout=30) elif service == 'get_hourly_forecast': cache_data = query_agent.vip.rpc.call( identity, service, locations, hours=2).get(timeout=30) elif service == 'get_daily_forecast': cache_data = query_agent.vip.rpc.call( identity, service, locations, days=2).get(timeout=30) assert len(cache_data) == len(query_data) for x in range(0, len(cache_data)): query_location_data = query_data[x] print(query_location_data) cache_location_data = cache_data[x] print(cache_location_data) assert cache_location_data.get( "generation_time") == query_location_data.get("generation_time") assert cache_location_data.get("lat") == query_location_data.get( "lat") assert cache_location_data.get("long") == query_location_data.get( "long") if cache_location_data.get("weather_results"): query_weather_results = query_location_data.get("weather_results") cache_weather_results = cache_location_data.get("weather_results") for y in range(0, len(query_weather_results)): result = query_weather_results[y] cache_result = cache_weather_results[y] query_time, oldtz = utils.process_timestamp(result[0]) query_time = utils.format_timestamp(query_time) assert query_time == cache_result[0] for key in cache_result[1]: assert cache_result[1][key] == result[1][key]
def _capture_log_data(self, peer, sender, bus, topic, headers, message): """Capture log data and submit it to be published by a historian.""" try: data = self._clean_compat(sender, topic, headers, message) except: return for point, item in data.iteritems(): if 'Readings' not in item or 'Units' not in item: _log.error("logging request for {topic} missing Readings " "or Units".format(topic=topic)) continue units = item['Units'] dtype = item.get('data_type', 'float') tz = item.get('tz', None) if dtype == 'double': dtype = 'float' meta = {'units': units, 'type': dtype} readings = item['Readings'] if not isinstance(readings, list): readings = [(get_aware_utc_now(), readings)] elif isinstance(readings[0], str): my_ts, my_tz = process_timestamp(readings[0], topic) readings = [(my_ts, readings[1])] if tz: meta['tz'] = tz elif my_tz: meta['tz'] = my_tz self._add_to_cache(topic, point, readings[1])
def _capture_data(self, peer, sender, bus, topic, headers, message, device): timestamp_string = headers.get(headers_mod.DATE, None) timestamp = get_aware_utc_now() if timestamp_string is not None: timestamp, my_tz = process_timestamp(timestamp_string, topic) try: # 2.0 agents compatability layer makes sender == pubsub.compat so # we can do the proper thing when it is here message = self._clean_compat(sender, topic, headers, message) except Exception as e: _log.exception(e) return try: if isinstance(message, dict): values = message else: values = message[0] except Exception as e: _log.exception(e) return if topic.startswith('analysis'): source = 'analysis' else: source = 'scrape' _log.debug( "Queuing {topic} from {source} for publish".format(topic=topic, source=source)) for key, value in values.iteritems(): self._add_to_cache(device, key, value)
def _capture_data(self, peer, sender, bus, topic, headers, message, device): timestamp_string = headers.get(headers_mod.DATE, None) timestamp = get_aware_utc_now() if timestamp_string is not None: timestamp, my_tz = process_timestamp(timestamp_string, topic) try: # 2.0 agents compatability layer makes sender == pubsub.compat so # we can do the proper thing when it is here message = self._clean_compat(sender, topic, headers, message) except Exception as e: _log.exception(e) return try: if isinstance(message, dict): values = message else: values = message[0] except Exception as e: _log.exception(e) return if topic.startswith('analysis'): source = 'analysis' else: source = 'scrape' _log.debug("Queuing {topic} from {source} for publish".format( topic=topic, source=source)) for key, value in values.iteritems(): self._add_to_cache(device, key, value)
def capture_data(self, peer, sender, bus, topic, headers, message, device): if topic.startswith('analysis'): pass #peer, sender, bus, topic, headers, message timestamp_string = headers.get(headers_mod.DATE) timestamp, my_tz = process_timestamp(timestamp_string) try: # 2.0 agents compatability layer makes sender == pubsub.compat so # we can do the proper thing when it is here if sender == 'pubsub.compat': values = jsonapi.loads(message[0]) else: values = message[0] except ValueError as e: _log.error( "message for {topic} bad message string: {message_string}". format(topic=topic, message_string=message[0])) return except IndexError as e: _log.error("message for {topic} missing message string".format( topic=topic)) return except Exception as e: _log.error(e) return meta = {} try: # 2.0 agents compatability layer makes sender == pubsub.compat so # we can do the proper thing when it is here if sender == 'pubsub.compat': meta = jsonapi.loads(message[1]) else: meta = message[1] except ValueError as e: _log.warning( "meta data for {topic} bad message string: {message_string}". format(topic=topic, message_string=message[0])) except IndexError as e: _log.warning("meta data for {topic} missing message string".format( topic=topic)) if topic.startswith('analysis'): source = 'analysis' else: source = 'scrape' _log.debug("Queuing {topic} from {source} for publish".format( topic=topic, source=source)) for key, value in values.iteritems(): point_topic = device + '/' + key self._event_queue.put({ 'source': source, 'topic': point_topic, 'readings': [(timestamp, value)], 'meta': meta.get(key, {}) })
def capture_log_data(self, peer, sender, bus, topic, headers, message): '''Capture log data and submit it to be published by a historian.''' # parts = topic.split('/') # location = '/'.join(reversed(parts[2:])) try: # 2.0 agents compatability layer makes sender == pubsub.compat so # we can do the proper thing when it is here if sender == 'pubsub.compat': data = jsonapi.loads(message) else: data = message except ValueError as e: _log.error( "message for {topic} bad message string: {message_string}". format(topic=topic, message_string=message[0])) return except IndexError as e: _log.error("message for {topic} missing message string".format( topic=topic)) return source = 'log' _log.debug("Queuing {topic} from {source} for publish".format( topic=topic, source=source)) for point, item in data.iteritems(): # ts_path = location + '/' + point if 'Readings' not in item or 'Units' not in item: _log.error( "logging request for {path} missing Readings or Units". format(path=ts_path)) continue units = item['Units'] dtype = item.get('data_type', 'float') tz = item.get('tz', None) if dtype == 'double': dtype = 'float' meta = {'units': units, 'type': dtype} readings = item['Readings'] if not isinstance(readings, list): readings = [(datetime.utcnow(), readings)] elif isinstance(readings[0], str): my_ts, my_tz = process_timestamp(readings[0]) readings = [(my_ts, readings[1])] if tz: meta['tz'] = tz elif my_tz: meta['tz'] = my_tz self._event_queue.put({ 'source': source, 'topic': topic + '/' + point, 'readings': readings, 'meta': meta })
def _capture_log_data(self, peer, sender, bus, topic, headers, message): """Capture log data and submit it to be published by a historian.""" # Anon the topic if necessary. topic = self._get_topic(topic) try: # 2.0 agents compatability layer makes sender == pubsub.compat so # we can do the proper thing when it is here if sender == 'pubsub.compat': data = compat.unpack_legacy_message(headers, message) else: data = message except ValueError as e: _log.error("message for {topic} bad message string: " "{message_string}".format(topic=topic, message_string=message[0])) return except IndexError as e: _log.error("message for {topic} missing message string".format( topic=topic)) return source = 'log' _log.debug( "Queuing {topic} from {source} for publish".format(topic=topic, source=source)) _log.debug(data) for point, item in data.iteritems(): # ts_path = location + '/' + point if 'Readings' not in item or 'Units' not in item: _log.error("logging request for {topic} missing Readings " "or Units".format(topic=topic)) continue units = item['Units'] dtype = item.get('data_type', 'float') tz = item.get('tz', None) if dtype == 'double': dtype = 'float' meta = {'units': units, 'type': dtype} readings = item['Readings'] if not isinstance(readings, list): readings = [(get_aware_utc_now(), readings)] elif isinstance(readings[0], str): my_ts, my_tz = process_timestamp(readings[0], topic) readings = [(my_ts, readings[1])] if tz: meta['tz'] = tz elif my_tz: meta['tz'] = my_tz self._event_queue.put({'source': source, 'topic': topic + '/' + point, 'readings': readings, 'meta': meta})
def capture_data(self, peer, sender, bus, topic, headers, message, device): timestamp_string = headers.get(headers_mod.DATE) timestamp, my_tz = process_timestamp(timestamp_string) try: # 2.0 agents compatability layer makes sender == pubsub.compat so # we can do the proper thing when it is here if sender == 'pubsub.compat': message = jsonapi.loads(message[0]) if isinstance(message, dict): values = message else: values = message[0] except ValueError as e: _log.error("message for {topic} bad message string: {message_string}".format(topic=topic, message_string=message[0])) return except IndexError as e: _log.error("message for {topic} missing message string".format(topic=topic)) return except Exception as e: _log.exception(e) return meta = {} try: # 2.0 agents compatability layer makes sender == pubsub.compat so # we can do the proper thing when it is here if sender == 'pubsub.compat': if isinstance(message[1], str): meta = jsonapi.loads(message[1]) if not isinstance(message, dict): meta = message[1] except ValueError as e: _log.warning("meta data for {topic} bad message string: {message_string}".format(topic=topic, message_string=message[0])) except IndexError as e: _log.warning("meta data for {topic} missing message string".format(topic=topic)) if topic.startswith('analysis'): source = 'analysis' else: source = 'scrape' _log.debug("Queuing {topic} from {source} for publish".format(topic=topic, source=source)) for key, value in values.iteritems(): point_topic = device + '/' + key self._event_queue.put({'source': source, 'topic': point_topic, 'readings': [(timestamp,value)], 'meta': meta.get(key,{})})
def _capture_log_data(self, peer, sender, bus, topic, headers, message): """Capture log data and submit it to be published by a historian.""" # Anon the topic if necessary. topic = self._get_topic(topic) try: # 2.0 agents compatability layer makes sender == pubsub.compat so # we can do the proper thing when it is here if sender == 'pubsub.compat': data = compat.unpack_legacy_message(headers, message) else: data = message except ValueError as e: _log.error("message for {topic} bad message string: " "{message_string}".format(topic=topic, message_string=message[0])) return except IndexError as e: _log.error("message for {topic} missing message string".format( topic=topic)) return source = 'log' _log.debug("Queuing {topic} from {source} for publish".format( topic=topic, source=source)) for point, item in data.iteritems(): # ts_path = location + '/' + point if 'Readings' not in item or 'Units' not in item: _log.error("logging request for {topic} missing Readings " "or Units".format(topic=topic)) continue units = item['Units'] dtype = item.get('data_type', 'float') tz = item.get('tz', None) if dtype == 'double': dtype = 'float' meta = {'units': units, 'type': dtype} readings = item['Readings'] if not isinstance(readings, list): readings = [(get_aware_utc_now(), readings)] elif isinstance(readings[0], str): my_ts, my_tz = process_timestamp(readings[0], topic) readings = [(my_ts, readings[1])] if tz: meta['tz'] = tz elif my_tz: meta['tz'] = my_tz self._event_queue.put({ 'source': source, 'topic': topic + '/' + point, 'readings': readings, 'meta': meta })
def test_more_than_default_forecast(volttron_instance, cleanup_cache, weather, query_agent, locations, service): identity = weather[1] big_request = 0 query_data = [] cache_data = [] if service == 'get_minutely_forecast': big_request = 61 query_data = query_agent.vip.rpc.call(identity, service, locations, minutes=big_request).get(timeout=30) if big_request > 60: big_request = 60 # dark sky provides 60 minutes max. elif service == 'get_hourly_forecast': big_request = 50 query_data = query_agent.vip.rpc.call(identity, service, locations, hours=big_request).get(timeout=30) elif service == 'get_daily_forecast': big_request = 9 query_data = query_agent.vip.rpc.call(identity, service, locations, days=big_request).get(timeout=30) else: pytest.fail('invalid request type') if query_data[0].get("weather_error"): error = query_data[0].get("weather_error") if error.endswith("Remote API returned Code 403"): pytest.skip("API key has exceeded daily call limit") assert len(query_data) == len(locations) for record in query_data: assert len(record['weather_results']) == big_request if service == 'get_minutely_forecast': cache_data = query_agent.vip.rpc.call(identity, service, locations, minutes=big_request).get(timeout=30) elif service == 'get_hourly_forecast': cache_data = query_agent.vip.rpc.call(identity, service, locations, hours=big_request).get(timeout=30) elif service == 'get_daily_forecast': cache_data = query_agent.vip.rpc.call(identity, service, locations, days=big_request).get(timeout=30) assert len(cache_data) == len(query_data) print("Query data: \n {}".format(query_data)) print("Cache data: \n {}".format(cache_data)) # TODO: verify that we get the right forecast times for x in range(0, len(cache_data)): query_location_data = query_data[x] cache_location_data = cache_data[x] assert cache_location_data.get("generation_time") == query_location_data.get("generation_time") assert cache_location_data.get("lat") == query_location_data.get("lat") assert cache_location_data.get("long") == query_location_data.get("long") if cache_location_data.get("weather_results"): query_weather_results = query_location_data.get("weather_results") cache_weather_results = cache_location_data.get("weather_results") for y in range(0, len(query_weather_results)): result = query_weather_results[y] cache_result = cache_weather_results[y] query_time, oldtz = utils.process_timestamp(result[0]) query_time = utils.format_timestamp(query_time) assert query_time == cache_result[0] for key in cache_result[1]: assert cache_result[1][key] == result[1][key]
def _capture_data(self, peer, sender, bus, topic, headers, message, device): # Anon the topic if necessary. topic = self._get_topic(topic) timestamp_string = headers.get(headers_mod.DATE, None) timestamp = get_aware_utc_now() if timestamp_string is not None: timestamp, my_tz = process_timestamp(timestamp_string, topic) _log.debug("### In capture_data timestamp str {} ".format(timestamp)) try: _log.debug( "### In capture_data Actual message {} ".format(message)) # 2.0 agents compatability layer makes sender == pubsub.compat so # we can do the proper thing when it is here if sender == 'pubsub.compat': # message = jsonapi.loads(message[0]) message = compat.unpack_legacy_message(headers, message) _log.debug("### message after compat {}".format(message)) if isinstance(message, dict): values = message else: values = message[0] except ValueError as e: _log.error("message for {topic} bad message string: " "{message_string}".format(topic=topic, message_string=message[0])) return except IndexError as e: _log.error("message for {topic} missing message string".format( topic=topic)) return except Exception as e: _log.exception(e) return meta = {} if not isinstance(message, dict): meta = message[1] if topic.startswith('analysis'): source = 'analysis' else: source = 'scrape' _log.debug( "Queuing {topic} from {source} for publish".format(topic=topic, source=source)) for key, value in values.iteritems(): point_topic = device + '/' + key self._event_queue.put({'source': source, 'topic': point_topic, 'readings': [(timestamp, value)], 'meta': meta.get(key, {})})
def _capture_data(self, peer, sender, bus, topic, headers, message, device): # Anon the topic if necessary. topic = self._get_topic(topic) timestamp_string = headers.get(headers_mod.DATE, None) timestamp = get_aware_utc_now() if timestamp_string is not None: timestamp, my_tz = process_timestamp(timestamp_string, topic) try: _log.debug( "### In capture_data Actual message {} ".format(message)) # 2.0 agents compatability layer makes sender == pubsub.compat so # we can do the proper thing when it is here if sender == 'pubsub.compat': # message = jsonapi.loads(message[0]) message = compat.unpack_legacy_message(headers, message) if isinstance(message, dict): values = message else: values = message[0] except ValueError as e: _log.error("message for {topic} bad message string: " "{message_string}".format(topic=topic, message_string=message[0])) return except IndexError as e: _log.error("message for {topic} missing message string".format( topic=topic)) return except Exception as e: _log.exception(e) return meta = {} if not isinstance(message, dict): meta = message[1] if topic.startswith('analysis'): source = 'analysis' else: source = 'scrape' _log.debug("Queuing {topic} from {source} for publish".format( topic=topic, source=source)) for key, value in values.iteritems(): point_topic = device + '/' + key self._event_queue.put({ 'source': source, 'topic': point_topic, 'readings': [(timestamp, value)], 'meta': meta.get(key, {}) })
def capture_log_data(self, peer, sender, bus, topic, headers, message): '''Capture log data and submit it to be published by a historian.''' # parts = topic.split('/') # location = '/'.join(reversed(parts[2:])) try: # 2.0 agents compatability layer makes sender == pubsub.compat so # we can do the proper thing when it is here if sender == 'pubsub.compat': data = jsonapi.loads(message) else: data = message except ValueError as e: _log.error("message for {topic} bad message string: {message_string}".format(topic=topic, message_string=message[0])) return except IndexError as e: _log.error("message for {topic} missing message string".format(topic=topic)) return source = 'log' _log.debug("Queuing {topic} from {source} for publish".format(topic=topic, source=source)) for point, item in data.iteritems(): # ts_path = location + '/' + point if 'Readings' not in item or 'Units' not in item: _log.error("logging request for {path} missing Readings or Units".format(path=ts_path)) continue units = item['Units'] dtype = item.get('data_type', 'float') tz = item.get('tz', None) if dtype == 'double': dtype = 'float' meta = {'units': units, 'type': dtype} readings = item['Readings'] if not isinstance(readings, list): readings = [(datetime.utcnow(), readings)] elif isinstance(readings[0],str): my_ts, my_tz = process_timestamp(readings[0]) readings = [(my_ts,readings[1])] if tz: meta['tz'] = tz elif my_tz: meta['tz'] = my_tz self._event_queue.put({'source': source, 'topic': topic+'/'+point, 'readings': readings, 'meta':meta})
def _capture_record_data(self, peer, sender, bus, topic, headers, message): _log.debug('Capture record data {}'.format(message)) # Anon the topic if necessary. topic = self._get_topic(topic) timestamp_string = headers.get(headers_mod.DATE, None) timestamp = get_aware_utc_now() if timestamp_string is not None: timestamp, my_tz = process_timestamp(timestamp_string, topic) if sender == 'pubsub.compat': message = compat.unpack_legacy_message(headers, message) self._event_queue.put({ 'source': 'record', 'topic': topic, 'readings': [(timestamp, message)], 'meta': {} })
def _capture_record_data(self, peer, sender, bus, topic, headers, message): _log.debug('Capture record data {}'.format(message)) # Anon the topic if necessary. topic = self._get_topic(topic) timestamp_string = headers.get(headers_mod.DATE, None) timestamp = get_aware_utc_now() if timestamp_string is not None: timestamp, my_tz = process_timestamp(timestamp_string, topic) if sender == 'pubsub.compat': message = compat.unpack_legacy_message(headers, message) self._event_queue.put( {'source': 'record', 'topic': topic, 'readings': [(timestamp, message)], 'meta': {}})
def __init__( self, dependent_variables, independent_variables, use_timestamp, epoch, epoch_span, model_settings=None, **kwargs, ): super(Forecast, self).__init__(**kwargs) if model_settings is None: model_settings = {} self.dependent_variables = dependent_variables self.independent_variables = independent_variables self.use_timestamp = use_timestamp self.epoch, _ = process_timestamp(epoch) self.epoch_span = epoch_span self.model = self.load_serialized_model(**model_settings)
def test_success_forecast(cleanup_cache, weather, query_agent, locations): """ Tests the basic functionality of a weather agent under optimal conditions. :param weather: instance of weather service to be tested :param query_agent: agent to leverage to use RPC calls """ print(datetime.utcnow()) query_data = query_agent.vip.rpc.call(identity, 'get_hourly_forecast', locations, hours=2).get(timeout=30) # print(query_data) assert len(query_data) == len(locations) for x in range(0, len(query_data)): location_data = query_data[x] assert (location_data.get("lat") and location_data.get("long")) or \ (location_data.get("wfo") and location_data.get( "x") and location_data.get("y")) results = location_data.get("weather_results") error = location_data.get("weather_error") if error and not results: if error.startswith("Remote API returned no data") \ or error.startswith("Remote API redirected request, but " "redirect failed") \ or error.startswith("Remote API returned invalid " "response") \ or error.startswith("API request failed with " "unexpected response"): assert True else: assert False if results: assert location_data.get("generation_time") for record in results: forecast_time = utils.parse_timestamp_string(record[0]) assert isinstance(forecast_time, datetime) cache_data = query_agent.vip.rpc.call(identity, 'get_hourly_forecast', locations, hours=2).get(timeout=30) assert len(cache_data) == len(query_data) for x in range(0, len(cache_data)): query_location_data = query_data[x] cache_location_data = cache_data[x] assert cache_location_data.get( "generation_time") == query_location_data.get("generation_time") if cache_location_data.get("lat") and cache_location_data.get("long"): assert cache_location_data.get("lat") == query_location_data.get( "lat") assert cache_location_data.get("long") == query_location_data.get( "long") elif cache_location_data.get("wfo") and cache_location_data.get( "x") and cache_location_data.get("y"): assert cache_location_data.get("wfo") == query_location_data.get( "wfo") assert cache_location_data.get("x") == query_location_data.get("x") assert cache_location_data.get("y") == query_location_data.get("y") else: assert False if cache_location_data.get("weather_results"): query_weather_results = query_location_data.get("weather_results") cache_weather_results = cache_location_data.get("weather_results") for y in range(0, len(query_weather_results)): result = query_weather_results[y] cache_result = cache_weather_results[y] query_time, oldtz = utils.process_timestamp(result[0]) query_time = utils.format_timestamp(query_time) assert query_time == cache_result[0] for key in cache_result[1]: assert cache_result[1][key] == result[1][key] else: results = cache_location_data.get("weather_error") if results.startswith("Remote API returned no data") \ or results.startswith("Remote API redirected request, but " "redirect failed") \ or results.startswith("Remote API returned invalid " "response") \ or results.startswith("API request failed with unexpected " "response"): assert True else: assert False
def test_success_forecast(volttron_instance, cleanup_cache, weather, query_agent, locations, service): weather_uuid = weather[0] identity = weather[1] version = query_agent.vip.rpc.call(identity, 'get_version').get(timeout=3) cwd = volttron_instance.volttron_home database_file = "/".join([cwd, "agents", weather_uuid, "darkskyagent-" + version, "darkskyagent-" + version + ".agent-data", "weather.sqlite"]) sqlite_connection = sqlite3.connect(database_file) cursor = sqlite_connection.cursor() api_calls_query = 'SELECT COUNT(*) FROM API_CALLS' cursor.execute(api_calls_query) current_api_calls = cursor.fetchone()[0] query_data = [] if service == "get_minutely_forecast": query_data = query_agent.vip.rpc.call(identity, service, locations).get(timeout=30) if service == "get_hourly_forecast": query_data = query_agent.vip.rpc.call(identity, service, locations).get(timeout=30) if service == "get_daily_forecast": query_data = query_agent.vip.rpc.call(identity, service, locations).get(timeout=30) if query_data[0].get("weather_error"): error = query_data[0].get("weather_error") if error.endswith("Remote API returned Code 403"): pytest.skip("API key has exceeded daily call limit") cursor.execute(api_calls_query) new_api_calls = cursor.fetchone()[0] # For daily forecast, when request time is on the same day but earlier hour as first forecast, the agent discards # the forecast entry of current day and makes a second call for the 8th day forecast. if service == "get_daily_forecast": number = current_api_calls + len(locations) assert new_api_calls == number or new_api_calls == number + 1 else: assert new_api_calls == current_api_calls + len(locations) current_api_calls = new_api_calls services = { "get_minutely_forecast": 60, "get_hourly_forecast": 48, "get_current_weather": 1, "get_daily_forecast": 7} for service_name, records_amount in services.items(): query = 'SELECT COUNT(*) FROM {service}'.format(service=service_name) print(query) cursor.execute(query) num_records = cursor.fetchone()[0] if service_name == service: assert num_records is records_amount * len(locations) else: if identity == 'platform.darksky_perf': assert num_records is 0 else: assert num_records is records_amount * len(locations) assert len(query_data) == len(locations) for x in range(0, len(query_data)): location_data = query_data[x] assert location_data.get("lat") and location_data.get("long") results = location_data.get("weather_results") error = location_data.get("weather_error") if error and not results: if error.startswith("Remote API returned no data") \ or error.startswith("Remote API redirected request, but redirect failed") \ or error.startswith("Remote API returned invalid response") \ or error.startswith("API request failed with unexpected response"): assert True else: assert False if results: assert location_data.get("generation_time") for record in results: forecast_time = utils.parse_timestamp_string(record[0]) assert isinstance(forecast_time, datetime) if not service == "get_minutely_forecast": assert 'summary' in record[1] else: assert 'summary' not in record[1] assert record[1]["attribution"] == "Powered by Dark Sky" cache_data = [] # default quantity if service == 'get_minutely_forecast': cache_data = query_agent.vip.rpc.call(identity, service, locations).get(timeout=30) if service == 'get_hourly_forecast': cache_data = query_agent.vip.rpc.call(identity, service, locations).get(timeout=30) if service == 'get_daily_forecast': cache_data = query_agent.vip.rpc.call(identity, service, locations).get(timeout=30) cursor.execute(api_calls_query) new_api_calls = cursor.fetchone()[0] assert new_api_calls == current_api_calls assert len(cache_data) == len(query_data) for x in range(0, len(cache_data)): query_location_data = query_data[x] print(query_location_data) cache_location_data = cache_data[x] print(cache_location_data) assert cache_location_data.get("generation_time") == query_location_data.get("generation_time") assert cache_location_data.get("lat") == query_location_data.get("lat") assert cache_location_data.get("long") == query_location_data.get("long") if cache_location_data.get("weather_results"): query_weather_results = query_location_data.get("weather_results") cache_weather_results = cache_location_data.get("weather_results") for y in range(0, len(query_weather_results)): result = query_weather_results[y] cache_result = cache_weather_results[y] query_time, oldtz = utils.process_timestamp(result[0]) query_time = utils.format_timestamp(query_time) assert query_time == cache_result[0] for key in cache_result[1]: assert cache_result[1][key] == result[1][key] else: results = cache_location_data.get("weather_error") if results.startswith("Remote API returned no data") \ or results.startswith("Remote API redirected request, but redirect failed") \ or results.startswith("Remote API returned invalid response") \ or results.startswith("API request failed with unexpected response"): assert True else: assert False for service_name, records_amount in services.items(): if not service_name == service: query = 'SELECT COUNT(*) FROM {service}'.format(service=service_name) cursor.execute(query) num_records = cursor.fetchone()[0] if identity == 'platform.darksky_perf': assert num_records is 0 else: assert num_records is records_amount*len(locations)