def query_stations(self, rec): """Query stations. .. warning:: Only `ident`, `rep_memo`, `lon` and `lat` are returned. Loading static data must be implemented. """ dates = set(r["datemax"] for r in self.query_summary(dballe.Record())) db = dballe.DB.connect_from_url("mem:") for d in dates: self.load_arkiquery_to_dbadb(dballe.Record(date=d), db) for s in db.query_station_data(rec): yield s
def query_data(self, rec): query = self.record_to_arkiquery(rec) url = "{}/query?{}".format( self.dataset, "&".join([ "{}={}".format(k, quote(v)) for k, v in { "style": "postprocess", "command": "json", "query": query, }.iteritems() ])) r = urlopen(url) for f in json.load(r)["features"]: p = f["properties"] r = dballe.Record( **{ "lon": p["lon"], "lat": p["lat"], "rep_memo": str(p["network"]), "level": tuple(p[k] for k in ["level_t1", "level_v1", "level_t2", "level_v2"]), "trange": tuple(p[k] for k in ["trange_pind", "trange_p1", "trange_p2"]), "date": datetime.strptime(p["datetime"], "%Y-%m-%dT%H:%M:%SZ"), str(p["bcode"]): float(p["value"]), }) yield r
def get_summaries(ident=None, network=None, lon=None, lat=None, pind=None, p1=None, p2=None, lt1=None, l1=None, lt2=None, l2=None, bcode=None, year=None, month=None, day=None): query = dballe.Record() query["ident"] = ident query["lon"] = lon query["lat"] = lat query["rep_memo"] = network query["trange"] = (pind, p1, p2) for n, v in ( ("leveltype1", lt1), ("l1", l1), ("leveltype2", lt2), ("l2", l2), ): if v is not None: query.set_from_string("%s=%s" % (n, v)) query["var"] = bcode query["year"] = year query["month"] = month query["day"] = day return list(app.config.db.query_summary(query))
def query_summary(self, rec): query = self.record_to_arkiquery(rec) url = "{}/summary?{}".format(self.dataset, "&".join([ "{}={}".format(k, quote(v)) for k, v in { "style": "json", "query": query, }.iteritems()])) r = urlopen(url) for i in json.load(r)["items"]: yield dballe.Record(**{ "ident": i["area"]["va"].get("ident"), "lon": i["area"]["va"]["lon"], "lat": i["area"]["va"]["lat"], "rep_memo": i["area"]["va"]["rep"], "var": i["product"]["va"]["bcode"], "level": [i["product"]["va"]["lt1"], i["product"]["va"].get("l1"), i["product"]["va"].get("lt2"), i["product"]["va"].get("l2")], "trange": [i["product"]["va"]["tr"], i["product"]["va"]["p1"], i["product"]["va"]["p2"]], "datemin": datetime(*i["summarystats"]["b"]), "datemax": datetime(*i["summarystats"]["e"]), })
def get_station_data(network, ident=None, lon=None, lat=None): query = dballe.Record() query.set_station_context() query["rep_memo"] = network query["ident"] = ident query["lon"] = lon query["lat"] = lat return list(app.config.db.query_stations(query))
def export_data(outfile, low=0, high=None, datetimemin=None): db = dballe.DB.connect_from_url("sqlite://:memory:") db.reset() last = low stations = load_stations() variables = load_variables() for rec in stations.values(): db.insert_station_data(rec, can_add_stations=True) for row in iter_datastore(low=low, high=high): last += 1 #last=row["_id"] variable = variables.get(row["variable_id"]) station = stations.get(row["station_id"]) reftime = datetime.strptime(row["reftime"], "%Y-%m-%dT%H:%M:%S") value = row["value"] if variable is None: logger.warning("Unknown variable {}, skipping".format( row["variable_id"])) continue elif station is None: logger.warning("Unknown station {}, skipping".format( row["station_id"])) continue else: rec = dballe.Record(**{ k: station.get(k) for k in ("ident", "lon", "lat", "rep_memo") }) try: rec["date"] = reftime rec[variable["var"]] = value * 10**-9 rec["level"] = variable["level"] rec["trange"] = variable["trange"] db.insert_data(rec) except: logger.error("Error encoding/write message") db.export_to_file(dballe.Record(datemin=datetimemin), filename=outfile, format="BUFR", generic=True) return last + 1
def params2record(p): q = dballe.Record( **{ k2: p.get(k1) if p.get(k1) != "-" else None for k1, k2 in (("ident", "ident"), ("lon", "lon"), ("lat", "lat"), ("network", "rep_memo"), ("tr", "pindicator"), ("p1", "p1"), ("p2", "p2"), ("lt1", "leveltype1"), ("lv1", "l1"), ("lt2", "leveltype2"), ("lv2", "l2"), ("var", "var")) if k1 in p and p.get(k1) != "*" }) return q
def query_summary(self, rec): """Query summary. .. warning:: Every station is supposed to measure all the `self.measurements` """ query = self.record_to_arkiquery(rec) url = "{}/summary?{}".format( self.dataset, "&".join([ "{}={}".format(k, quote(v)) for k, v in { "style": "json", "query": query, }.iteritems() ])) r = urlopen(url) for i in json.load(r)["items"]: for m in self.measurements: if all([ rec.get(k) == i.get(k) for k in ["var", "level", "trange"] if k in rec ]): if "lon" in i["area"]["va"]: lon = i["area"]["va"]["lon"] # fixed station else: lon = i["area"]["va"]["x"] # mobile if "lat" in i["area"]["va"]: lat = i["area"]["va"]["lat"] # fixed station else: lat = i["area"]["va"]["y"] # mobile yield dballe.Record( **{ "var": m["var"], "level": m["level"], "trange": m["trange"], "ident": i.get("proddef", {}).get("va", {}).get("id", None), "lon": lon, "lat": lat, "rep_memo": i["product"]["va"]["t"], "datemin": datetime(*i["summarystats"]["b"]), "datemax": datetime(*i["summarystats"]["e"]), })
def set_cached_summary(self): res = self.db.query_summary(dballe.Record()) summary = [{ "ident": o.get("ident"), "lon": o.key("lon").enqi(), "lat": o.key("lat").enqi(), "rep_memo": o.get("rep_memo"), "level": o.get("level"), "trange": o.get("trange"), "bcode": o.get("var"), "date": o.date_extremes(), } for o in res] self.cache.set('borinud-summary-cache-%s' % self.dsn, summary, self.timeout) return summary
def decode(self, s): # TODO: decode datemin and datemax jsonsumm = super(SummaryJSONDecoder, self).decode(s) return tuple( dballe.Record( **{ "ident": None if i["ident"] is None else i["ident"].encode(), "lon": i["lon"], "lat": i["lat"], "rep_memo": i["rep_memo"].encode(), "level": i["level"], "trange": i["trange"], "var": i["bcode"].encode(), }) for i in jsonsumm)
def load_stations(): stations = {} logging.info("Loading stations from {}".format(STATIONS_URL)) resp = urlopen(STATIONS_URL) reader = csv.DictReader(codecs.getreader("utf-8")(resp)) for row in reader: key = int(row["Cod_staz"]) rec = dballe.Record(B01019=row["Stazione"], B07030=float(row["Altezza"].replace(",", ".")), lon=float(row["Lon"]), lat=float(row["Lat"]), rep_memo="arpae-aq") stations[key] = rec return stations
def get_cached_summary(self): """Get the cached summary.""" summary = self.cache.get('borinud-summary-cache-%s' % self.dsn) if summary is None: summary = self.set_cached_summary() return tuple(dballe.Record(**{ "ident": None if i["ident"] is None else i["ident"], "lon": i["lon"], "lat": i["lat"], "rep_memo": i["rep_memo"], "level": tuple(i["level"]), "trange": tuple(i["trange"]), "var": i["bcode"], "datemin": i["date"][0], "datemax": i["date"][1], }) for i in summary)
def get_network_spatialseries(network, pind, p1, p2, lt1, l1, lt2, l2, bcode, year, month, day, hour): from datetime import datetime, timedelta query = dballe.Record() query["rep_memo"] = network query["trange"] = (pind, p1, p2) query.set_from_string("leveltype1=%s" % (lt1,)) query.set_from_string("l1=%s" % (l1,)) query.set_from_string("leveltype2=%s" % (lt2,)) query.set_from_string("l2=%s" % (l2,)) query["var"] = bcode d = datetime(year, month, day, hour) query["datemin"] = d - timedelta(seconds=1800) query["datemax"] = d + timedelta(seconds=1799) return list(app.config.db.query_data(query))
def write_cached_summary(self): """Write the db summary to the cache file.""" import os from tempfile import NamedTemporaryFile # The summary is first written in a temporary file and then moved to the # right path (os.rename is atomic in POSIX OS) cachedir = os.path.realpath(os.path.dirname(self.cachefile)) with NamedTemporaryFile(delete=False, dir=cachedir) as f: try: from .codec import SummaryJSONEncoder json.dump( self.db.query_summary(dballe.Record()), f, cls=SummaryJSONEncoder ) # Atomic rename in POSIX OS os.rename(f.name, self.cachefile) except: os.unlink(f.name) raise
def get_resource_timeseries(ident, network, pind, p1, p2, lt1, l1, lt2, l2, bcode, year, lon=None, lat=None, month=None, day=None): query = dballe.Record() if ident != "-": query["ident"] = ident query["lon"] = lon query["lat"] = lat query["rep_memo"] = network query["trange"] = (pind, p1, p2) query.set_from_string("leveltype1=%s" % (lt1,)) query.set_from_string("l1=%s" % (l1,)) query.set_from_string("leveltype2=%s" % (lt2,)) query.set_from_string("l2=%s" % (l2,)) query["var"] = bcode query["year"] = year query["month"] = month query["day"] = day return list(app.config.db.query_data(query))
def export_data(outfile, datetimemin=None, lonmin=None, latmin=None, lonmax=None, latmax=None): db = dballe.DB.connect_from_url("sqlite://:memory:") db.reset() for data in iter_datastore(DATASTORE_URL): try: lon = float(data["location"]["longitude"]) except Exception as e: logging.warning(str(data["location"])) #logging.exception(e) continue try: lat = float(data["location"]["latitude"]) except Exception as e: logging.warning(str(data["location"])) #logging.exception(e) continue constantdata = dballe.Record( B01019=str(data["location"]["id"]), #B07030=float(data["Altezza"].replace(",", ".")), lon=lon, lat=lat, rep_memo="luftdaten") try: db.insert_station_data(constantdata, can_add_stations=True, can_replace=True) except Exception as e: logging.exception(e) rec = dballe.Record(**{ k: constantdata.get(k) for k in ("ident", "lon", "lat", "rep_memo") }) havetowrite = False for sensordatavalues in data["sensordatavalues"]: key = sensordatavalues["value_type"] var = VARIABLE_BCODES.get(key) if var is None: logger.info( "Var for variable {} not found, skipping".format(key)) else: try: bcode = var["bcode"] rec[bcode] = float( sensordatavalues["value"]) * var["a"] + var["b"] havetowrite = True except Exception as e: logging.exception(e) rec[bcode] = None if havetowrite: rec["level"] = (103, 2000) rec["trange"] = (254, 0, 0) rec["date"] = datetime.strptime(data["timestamp"], "%Y-%m-%d %H:%M:%S") try: db.insert_data(rec, can_replace=True) except Exception as e: logging.exception(e) print rec db.export_to_file(dballe.Record(datemin=datetimemin, lonmin=lonmin, latmin=latmin, lonmax=lonmax, latmax=latmax), filename=outfile, format="BUFR", generic=True)
import dballe db = dballe.DB.connect_from_url("sqlite:/dev/shm/tmp.sqlite") for r in db.query_data(dballe.Record()): a = db.attr_query_data(r["context_id"]) try: if a.var("B33196").get() == 1: print("da rimuovere") else: print("B33196=", a.var("B33196").get()) except KeyError: print("B33196 assente")