def write_message(topic, payload, outfile): try: m = parse_message(topic, payload) if m is None: return msg = dballe.Message("generic") if m["ident"] is not None: msg.set_named("ident", dballe.var("B01011", m["ident"])) msg.set_named("longitude", dballe.var("B06001", m["lon"])) msg.set_named("latitude", dballe.var("B05001", m["lat"])) msg.set_named("rep_memo", dballe.var("B01194", m["rep_memo"])) msg.set_named("year", dballe.var("B04001", m["datetime"].year)) msg.set_named("month", dballe.var("B04002", m["datetime"].month)) msg.set_named("day", dballe.var("B04003", m["datetime"].day)) msg.set_named("hour", dballe.var("B04004", m["datetime"].hour)) msg.set_named("minute", dballe.var("B04005", m["datetime"].minute)) msg.set_named("second", dballe.var("B04006", m["datetime"].second)) var = dballe.var(m["var"], m["value"]) for b, v in m["attributes"].items(): var.seta(dballe.var(b, v)) msg.set(m["level"], m["trange"], var) exporter = dballe.Exporter(encoding="BUFR") outfile.write(exporter.to_binary(msg)) outfile.flush() except Exception: import traceback traceback.print_exc()
def export_data(outfile, low=0, high=None, datetimemin=None): db = dballe.DB.connect("mem:") db.reset() last = low stations = load_stations() variables = load_variables() with db.transaction() as tr: for rec in stations.values(): tr.insert_station_data(rec, can_add_stations=True) with db.transaction() as tr: for row in iter_datastore(low=low, high=high): last += 1 #last=row["_id"] variable = variables.get(row["variable_id"]) station = stations.get(row["station_id"]) reftime = datetime.strptime(row["reftime"], "%Y-%m-%dT%H:%M:%S") value = row["value"] if variable is None: logger.warning("Unknown variable {}, skipping".format( row["variable_id"])) continue elif station is None: logger.warning("Unknown station {}, skipping".format( row["station_id"])) continue else: rec = { **{ k: station.get(k) for k in ("ident", "lon", "lat", "rep_memo") } } try: rec["year"] = reftime.year rec["month"] = reftime.month rec["day"] = reftime.day rec["hour"] = reftime.hour rec["min"] = reftime.minute rec["sec"] = reftime.second rec[variable["var"]] = value * 10**-9 rec["level"] = variable["level"] rec["trange"] = variable["trange"] tr.insert_data(rec) except: logger.error("Error encoding/write message") exporter = dballe.Exporter("BUFR") with open(outfile, "wb") as outfile: with db.transaction() as tr: for row in tr.query_messages({"datetimemin": datetimemin}): outfile.write(exporter.to_binary(row.message)) return last + 1
def do_qc(input_file, output_file, preserve): importer = dballe.Importer("BUFR") exporter = dballe.Exporter("BUFR") with importer.from_file(input_file) as fp: for msgs in fp: for msg in msgs: count_vars = 0 new_msg = dballe.Message("generic") new_msg.set_named("year", msg.datetime.year) new_msg.set_named("month", msg.datetime.month) new_msg.set_named("day", msg.datetime.day) new_msg.set_named("hour", msg.datetime.hour) new_msg.set_named("minute", msg.datetime.minute) new_msg.set_named("second", msg.datetime.second) new_msg.set_named("rep_memo", msg.report) new_msg.set_named("longitude", int(msg.coords[0] * 10 ** 5)) new_msg.set_named("latitude", int(msg.coords[1] * 10 ** 5)) if msg.ident: new_msg.set_named("ident", msg.ident) for data in msg.query_data({"query": "attrs"}): variable = data["variable"] attrs = variable.get_attrs() is_ok = pass_qc(attrs) v = dballe.var( data["variable"].code, data["variable"].get() ) if not is_ok: if preserve: v.seta(dballe.var("B33007", 0)) else: continue new_msg.set(data["level"], data["trange"], v) count_vars += 1 for data in msg.query_station_data({"query": "attrs"}): variable = data["variable"] attrs = variable.get_attrs() v = dballe.var( data["variable"].code, data["variable"].get() ) for a in attrs: v.seta(a) new_msg.set(dballe.Level(), dballe.Trange(), v) if count_vars > 0: output_file.write(exporter.to_binary(new_msg))
def on_message(client, userdata, message): try: m = parse_message(message.topic, message.payload.decode("utf-8")) if m is None: return msg = dballe.Message("generic") if m["ident"] is not None: msg.set_named("ident", dballe.var("B01011", m["ident"])) msg.set_named("longitude", dballe.var("B06001", m["lon"])) msg.set_named("latitude", dballe.var("B05001", m["lat"])) msg.set_named("rep_memo", dballe.var("B01194", m["rep_memo"])) if all([ m["level"] != (None, None, None, None), m["trange"] != (None, None, None), userdata["overwrite_date"], ]): m["datetime"] = datetime.utcnow() if m["datetime"] is not None: msg.set_named("year", dballe.var("B04001", m["datetime"].year)) msg.set_named("month", dballe.var("B04002", m["datetime"].month)) msg.set_named("day", dballe.var("B04003", m["datetime"].day)) msg.set_named("hour", dballe.var("B04004", m["datetime"].hour)) msg.set_named("minute", dballe.var("B04005", m["datetime"].minute)) msg.set_named("second", dballe.var("B04006", m["datetime"].second)) var = dballe.var(m["var"], m["value"]) for b, v in m["attributes"].items(): var.seta(dballe.var(b, v)) msg.set(m["level"], m["trange"], var) exporter = dballe.Exporter(encoding="BUFR") userdata["outfile"].write(exporter.to_binary(msg)) userdata["outfile"].flush() except Exception: import traceback traceback.print_exc()
def test_create(self): msg = self.make_gts_acars_uk1_message() exporter = dballe.Exporter("BUFR") binmsg = exporter.to_binary(msg) self.assertEqual(binmsg[:4], b"BUFR") self.assertEqual(binmsg[-4:], b"7777") binmsg1 = exporter.to_binary([msg]) self.assertEqual(binmsg1, binmsg) binmsg2 = exporter.to_binary((msg, msg)) self.assertNotEqual(binmsg2, binmsg) self.assertEqual(binmsg2[:4], b"BUFR") self.assertEqual(binmsg2[-4:], b"7777") binmsg3 = exporter.to_binary((msg for i in range(2))) self.assertEqual(binmsg3, binmsg2) with self.assertRaises(ValueError): exporter.to_binary([])
def export_data(outfile, datetimemin=None, lonmin=None, latmin=None, lonmax=None, latmax=None): db = dballe.DB.connect("mem:") #db.reset() for data in iter_datastore(DATASTORE_URL): try: lon = float(data["location"]["longitude"]) except Exception as e: logging.warning(str(data["location"])) #logging.exception(e) continue try: lat = float(data["location"]["latitude"]) except Exception as e: logging.warning(str(data["location"])) #logging.exception(e) continue try: with db.transaction() as tr: tr.insert_station_data( { "lon": lon, "lat": lat, "report": "luftdaten", "B01019": str(data["location"]["id"]) }, can_add_stations=True, can_replace=True) except Exception as e: logging.exception(e) rec = {} havetowrite = False for sensordatavalues in data["sensordatavalues"]: key = sensordatavalues["value_type"] var = VARIABLE_BCODES.get(key) if var is None: logger.info( "Var for variable {} not found, skipping".format(key)) else: try: rec["lon"] = lon rec["lat"] = lat rec["report"] = "luftdaten" bcode = var["bcode"] rec[bcode] = float( sensordatavalues["value"]) * var["a"] + var["b"] rec["level"] = dballe.Level(*var["level"]) rec["trange"] = dballe.Trange(*var["trange"]) rec["datetime"] = datetime.strptime( data["timestamp"], "%Y-%m-%d %H:%M:%S") havetowrite = True except Exception as e: logging.exception(e) #rec[bcode]=None if havetowrite: try: with db.transaction() as tr: tr.insert_data(rec, can_replace=True) except Exception as e: logging.exception(e) print(rec) exporter = dballe.Exporter("BUFR") with open(outfile, "wb") as outfile: with db.transaction() as tr: for row in tr.query_messages({ "datetimemin": datetimemin, "lonmin": lonmin, "latmin": latmin, "lonmax": lonmax, "latmax": latmax }): outfile.write(exporter.to_binary(row.message))