def latest_obs_and_forecast(site_id): result = memcache.get(site_id, "site_latest") if result: return result site = Site.get_by_key_name(site_id) if site is None: return None obs = ObservationTimestep.find_latest_by_site(site, limit=6) result = None if len(obs) > 0: forecasts = ForecastTimestep.find_by_site_closest_by_date( site, first(obs).observation_datetime, limit=50) closest_forecast = first(forecasts) if closest_forecast: matching_obs = first( filter( lambda o: o.observation_datetime == closest_forecast. forecast_datetime, obs)) matching_forecasts = ifilter( lambda f: f.forecast_datetime == closest_forecast. forecast_datetime, forecasts) if matching_obs: #finally have both... a single obs report and multiple forecasts obs_dict = to_dict_excl_sites(matching_obs) obs_dict['best_forecast'] = map( to_dict_excl_sites, make_five_day_list(matching_forecasts)) result = {'site': site.to_dict(), 'observation': obs_dict} memcache.set(site_id, result, 60 * 60, namespace='site_latest') return result
def latest_obs_and_forecast(site_id): result = memcache.get(site_id, "site_latest") if result: return result site = Site.get_by_key_name(site_id) if site is None: return None obs = ObservationTimestep.find_latest_by_site(site, limit=6) result = None if len(obs) > 0: forecasts = ForecastTimestep.find_by_site_closest_by_date(site, first(obs).observation_datetime, limit=50) closest_forecast = first(forecasts) if closest_forecast: matching_obs = first(filter(lambda o: o.observation_datetime == closest_forecast.forecast_datetime, obs)) matching_forecasts = ifilter(lambda f: f.forecast_datetime == closest_forecast.forecast_datetime, forecasts) if matching_obs: #finally have both... a single obs report and multiple forecasts obs_dict = to_dict_excl_sites(matching_obs) obs_dict['best_forecast'] = map(to_dict_excl_sites, make_five_day_list(matching_forecasts)) result = { 'site': site.to_dict(), 'observation': obs_dict } memcache.set(site_id, result, 60 * 60, namespace='site_latest') return result
def forecast_update(site_key): site = Site.get_by_key_name(site_key) if site is None: return Response(status=404) forecast_url = "http://www.metoffice.gov.uk/public/data/PWSCache/BestForecast/Forecast/%s?format=application/json" % site_key result = urlfetch.fetch(forecast_url) if result.status_code == 200: forecast = parse_forecast(result.content) issued_date = parse_date(forecast["@dataDate"]) for date, day in days(forecast): forecast_day = ForecastDay.get_by_key_name( make_key_name(site, date)) if forecast_day is None: forecast_day = ForecastDay(key_name=make_key_name(site, date), forecast_date=date, site=site) forecast_day.site = site for timestep, data in day_timesteps(day): w = Forecast() w.issued = issued_date for k, v in data.items(): prop_name = snake_case(k) if hasattr(w, prop_name): if v == "missing": v = None setattr(w, prop_name, v) forecast_day.forecasts.add(timestep, w) forecast_day.save() site.save() return Response(status=204)
def forecast_update2(site_key): site = Site.get_by_key_name(site_key) if site is None: return Response(status = 404) forecast_url = "http://www.metoffice.gov.uk/public/data/PWSCache/BestForecast/Forecast/%s?format=application/json" % site_key result = urlfetch.fetch(forecast_url) if result.status_code == 200: forecast = parse_forecast(result.content) issued_date = parse_date(forecast["@dataDate"]) for date, data in timesteps(forecast): forecast_timestep = ForecastTimestep.find_by_site_and_dates(site, date, issued_date) if forecast_timestep is None: forecast_timestep = ForecastTimestep(site = site, forecast_datetime = date, issued_datetime = issued_date, forecast_date = date.date()) for k,v in data.items(): prop_name = snake_case(k) if hasattr(forecast_timestep, prop_name): if v == "missing": v = None setattr(forecast_timestep, prop_name, v) forecast_timestep.save() return Response(status = 204)
def observation_update2(site_key): site = Site.get_by_key_name(site_key) if site is None: return Response(status=404) url = "http://www.metoffice.gov.uk/public/data/PWSCache/BestForecast/Observation/%s?format=application/json" % site_key result = urlfetch.fetch(url) if result.status_code == 200: observations = parse_observation(result.content) # issued_date = parse_date(forecast["@dataDate"]) for date, data in timesteps(observations): obs_timestep = ObservationTimestep.get_by_site_and_datetime( site, date) if obs_timestep is None: obs_timestep = ObservationTimestep( site=site, observation_datetime=date, observation_date=date.date()) for k, v in data.items(): prop_name = snake_case(k) if hasattr(obs_timestep, prop_name): if v == "missing": v = None elif prop_name == 'temperature': v = float(v) setattr(obs_timestep, prop_name, v) obs_timestep.save() #logging.info("%s, %s" % (str(date), str(ObservationTimestep))) return Response(status=204)
def observation_update2(site_key): site = Site.get_by_key_name(site_key) if site is None: return Response(status = 404) url = "http://www.metoffice.gov.uk/public/data/PWSCache/BestForecast/Observation/%s?format=application/json" % site_key result = urlfetch.fetch(url) if result.status_code == 200: observations = parse_observation(result.content) # issued_date = parse_date(forecast["@dataDate"]) for date, data in timesteps(observations): obs_timestep = ObservationTimestep.get_by_site_and_datetime(site, date) if obs_timestep is None: obs_timestep = ObservationTimestep(site = site, observation_datetime = date, observation_date = date.date()) for k,v in data.items(): prop_name = snake_case(k) if hasattr(obs_timestep, prop_name): if v == "missing": v = None elif prop_name == 'temperature': v = float(v) setattr(obs_timestep, prop_name, v) obs_timestep.save() #logging.info("%s, %s" % (str(date), str(ObservationTimestep))) return Response(status = 204)
def forecast_update(site_key): site = Site.get_by_key_name(site_key) if site is None: return Response(status = 404) forecast_url = "http://www.metoffice.gov.uk/public/data/PWSCache/BestForecast/Forecast/%s?format=application/json" % site_key result = urlfetch.fetch(forecast_url) if result.status_code == 200: forecast = parse_forecast(result.content) issued_date = parse_date(forecast["@dataDate"]) for date, day in days(forecast): forecast_day = ForecastDay.get_by_key_name(make_key_name(site,date)) if forecast_day is None: forecast_day = ForecastDay(key_name=make_key_name(site,date), forecast_date = date, site = site) forecast_day.site = site for timestep, data in day_timesteps(day): w = Forecast() w.issued = issued_date for k,v in data.items(): prop_name = snake_case(k) if hasattr(w, prop_name): if v == "missing": v = None setattr(w, prop_name, v) forecast_day.forecasts.add(timestep,w) forecast_day.save() site.save() return Response(status = 204)
def forecast_update2(site_key): site = Site.get_by_key_name(site_key) if site is None: return Response(status=404) forecast_url = "http://www.metoffice.gov.uk/public/data/PWSCache/BestForecast/Forecast/%s?format=application/json" % site_key result = urlfetch.fetch(forecast_url) if result.status_code == 200: forecast = parse_forecast(result.content) issued_date = parse_date(forecast["@dataDate"]) for date, data in timesteps(forecast): forecast_timestep = ForecastTimestep.find_by_site_and_dates( site, date, issued_date) if forecast_timestep is None: forecast_timestep = ForecastTimestep( site=site, forecast_datetime=date, issued_datetime=issued_date, forecast_date=date.date()) for k, v in data.items(): prop_name = snake_case(k) if hasattr(forecast_timestep, prop_name): if v == "missing": v = None setattr(forecast_timestep, prop_name, v) forecast_timestep.save() return Response(status=204)
def _tx(): site = Site.get_by_key_name(loc["id"]) if site is None: site = Site(key_name=loc["id"]) site.location = GeoPt(lat = loc["location"][0], lon = loc["location"][1]) site.name = loc["name"] site.region = loc["region"] site.save()
def observation_update(site_key): site = Site.get_by_key_name(site_key) if site is None: return Response(status=404) url = "http://www.metoffice.gov.uk/public/data/PWSCache/BestForecast/Observation/%s?format=application/json" % site_key obs = {} def get_db_observation(date): key_name = make_key_name(site, date.date()) if key_name in obs: return obs[key_name] o = ObservationDay.get_by_key_name(key_name) if o is None: o = ObservationDay(key_name=key_name) o.site = site o.observation_date = date.date() o.observations = Observations() obs[key_name] = o return o result = urlfetch.fetch(url) if result.status_code == 200: observations = parse_observation(result.content) issue_date = parse_date(observations['@issueDate']) site.last_obs_issue_datetime = issue_date site.last_obs_update_datetime = datetime.now() for date, data in timesteps(observations): o = get_db_observation(date) o.lastdata_datetime = issue_date w = Weather({}) for k, v in data.items(): prop_name = snake_case(k) if hasattr(w, prop_name): if v == "missing": v = None elif prop_name == 'temperature': v = float(v) setattr(w, prop_name, v) o.observations.add(date, w) for o in obs.values(): o.save() site.save() return Response(status=204)
def observation_update(site_key): site = Site.get_by_key_name(site_key) if site is None: return Response(status = 404) url = "http://www.metoffice.gov.uk/public/data/PWSCache/BestForecast/Observation/%s?format=application/json" % site_key obs = {} def get_db_observation(date): key_name = make_key_name(site, date.date()) if key_name in obs: return obs[key_name] o = ObservationDay.get_by_key_name(key_name) if o is None: o = ObservationDay(key_name=key_name) o.site = site o.observation_date = date.date() o.observations = Observations() obs[key_name] = o return o result = urlfetch.fetch(url) if result.status_code == 200: observations = parse_observation(result.content) issue_date = parse_date(observations['@issueDate']) site.last_obs_issue_datetime = issue_date site.last_obs_update_datetime = datetime.now() for date, data in timesteps(observations): o = get_db_observation(date) o.lastdata_datetime = issue_date w = Weather({}) for k,v in data.items(): prop_name = snake_case(k) if hasattr(w, prop_name): if v == "missing": v = None elif prop_name == 'temperature': v = float(v) setattr(w, prop_name, v) o.observations.add(date, w) for o in obs.values(): o.save() site.save() return Response(status = 204)
def site_graph_data(site_id): site = Site.get_by_key_name(site_id) if site is None: return Response(status = 404) day = date.today() if request.args.has_key('day'): p = request.args.get('day') day = parse_yyyy_mm_dd_date(p) # obs data first obs = ObservationTimestep.find_by_site_and_date(site, day) series = [make_series('Observation temperature °C', obs, 'observation_datetime', 'temperature')] return json_response({'day': str(day), 'series': series})
def site_observations(site_id): site = Site.get_by_key_name(site_id) if site is None: return Response(status = 404) if request.args.has_key('day'): p = request.args.get('day') day = parse_yyyy_mm_dd_date(p) else: return Response("day query parameter is required",status = 400) observation_day = ObservationDay.get_by_key_name(make_key_name(site,day)) if observation_day is None: return Response(status = 404) return json_response(observation_day)
def observation_import(site_id): site = Site.get_by_key_name(site_id) if site is None: return Response(status = 404) today = date.today() url = "http://metofficewatch.appspot.com/sites/%s/observations?day=%s" % (site_id, today.isoformat()) result = urlfetch.fetch(url) if result.status_code == 200: obs = json.loads(result.content) obs_day = ObservationDay.get_by(site, today, not_found_return_new = True) obs_day.observations = Observations.from_json(obs['observations']) obs_day.lastdata_datetime = parse_date(obs['lastdata_datetime']) obs_day.save() return Response(status = 204)
def forecast_import(site_id): site = Site.get_by_key_name(site_id) if site is None: return Response(status = 404) today = date.today() url = "http://metofficewatch.appspot.com/sites/%s/forecasts?day=%s" % (site_id, today.isoformat()) result = urlfetch.fetch(url) if result.status_code == 200: forecasts = json.loads(result.content) forecast_day = ForecastDay.get_by(site, today, not_found_return_new = True) forecast_day.forecasts = Forecasts.from_json(forecasts['forecasts']) forecast_day.lastdata_datetime = parse_date(forecasts['lastdata_datetime']) if forecasts['lastdata_datetime'] is not None else None forecast_day.save() return Response(status = 204)
def site_detail(site_id): site = Site.get_by_key_name(site_id) if site is None: return Response(status = 404) obs = ObservationTimestep.find_latest_by_site(site = site, limit = 24) forecasts = [] if len(obs) > 0: first_obs = first(obs) last_obs = last(obs) forecasts = ForecastTimestep.find_by_site_between_dates( site = site, from_dt = last_obs.observation_datetime, to_dt = first_obs.observation_datetime) return Response(json.dumps({ 'site': site.to_dict(), 'observations': map(lambda o: o.to_dict(excluding = ['site']), obs), 'forecasts': map(lambda f: f.to_dict(excluding = ['site']), forecasts) }), content_type = "application/json")
def site_graph(site_id): site = Site.get_by_key_name(site_id) if site is None: return Response(status=404) return render_template('graph.html', site=site)
def site_graph(site_id): site = Site.get_by_key_name(site_id) if site is None: return Response(status = 404) return render_template('graph.html', site = site)
def site_by_id(site_id): site = Site.get_by_key_name(site_id) if site is None: return Response(status = 404) return json_response(site)