def write_bufr(args): """Read JSON file, encode as BUFR and write to file-handle. """ import json try: fh_out = open(args.out_file, "wb") except: fh_out = sys.stdout multi_bul = False for fn_in in args.in_file: with open(fn_in, "r") as fh_in: json_data = json.load(fh_in) for json_data_msg in json_data: if not "bufr" in json_data_msg or json_data_msg["bufr"] is None: continue bufr = Bufr(tab_fmt=args.tables_type, tab_path=args.tables_path) bin_data = bufr.encode(json_data_msg["bufr"], load_tables=True) if json_data_msg["heading"] is not None: multi_bul and fh_out.write(b"\r\r\n\r\r\n") fh_out.write(("%s\r\r\n" % json_data_msg["heading"]).encode()) fh_out.write(bin_data) multi_bul = True if fh_out is not sys.stdout: fh_out.close()
def read_bufr_desc(args): """Read BUFR(s), decode meta-data and descriptor list, write to file-handle. """ try: fh_out = open(args.out_file, "w") except: fh_out = sys.stdout for fn_in in args.in_file: print("FILE\t%s" % os.path.basename(fn_in), file=fh_out) i = 0 for blob, size, header in load_file.next_bufr(fn_in): if args.bulletin is not None and i != args.bulletin: i += 1 continue print("BUFR\t#%d (%d B)" % (i, size), file=fh_out) i += 1 print("HEADER\t%s" % header, file=fh_out) try: bufr = Bufr(args.tables_type, args.tables_path) bufr.decode_meta(blob, load_tables=(not args.sparse)) print("META\n%s" % bufr.get_meta_str(), file=fh_out) if args.sparse: d = bufr.get_descr_short() else: d = bufr.get_descr_full() print("DESC :\n%s" % "\n".join(d), file=fh_out) except Exception as e: print("ERROR\t%s" % e, file=fh_out) if logger.isEnabledFor(logging.DEBUG): logger.exception(e) if fh_out is not sys.stdout: fh_out.close()
def read_bufr_to_json(args): """Read and decode BUFR, write as JSON formatted file. """ bufr = Bufr(args.tables_type, args.tables_path) json_data = [] bufr_i = -1 for fn_in in args.in_file: for blob, _, header in load_file.next_bufr(fn_in): bufr_i += 1 if args.bulletin is not None and bufr_i != args.bulletin: continue json_data_item = { "heading": header, "file": os.path.basename(fn_in), "index": bufr_i, "status": False, "error": None, "bufr": None, } try: json_bufr = bufr.decode(blob, load_tables=True, as_array=args.array) except Exception as e: logger.error(e, exc_info=1 and logger.isEnabledFor(logging.DEBUG)) json_data_item["error"] = str(e) else: json_data_item["status"] = True json_data_item["bufr"] = json_bufr finally: json_data.append(json_data_item) import json out_fh = open(args.out_file, "w") or sys.stdout with out_fh as fh_out: if args.sparse: json.dump(json_data, fh_out) else: json.dump(json_data, fh_out, indent=3, separators=(',', ': '))
def test_bufr_read(monkeypatch): """Test reading data and data quality on Metop-A MHS BUFR file.""" monkeypatch.setenv("BUFR_TABLES", os.path.join(test_dir, "bufrtables")) monkeypatch.setenv("BUFR_TABLES_TYPE", "bufrdc") from trollbufr import load_file from trollbufr.bufr import Bufr test_file = os.path.join(test_dir, "metop_mhs.bufr") bufr = Bufr(os.environ["BUFR_TABLES_TYPE"], os.environ["BUFR_TABLES"]) # laod test file and iterate over BUFR for blob, size, header in load_file.next_bufr(test_file): # test header for first BUFR assert header == "IEMX01 EUMP 150722" assert size == 48598 # decode BUFR message bufr.decode(blob) # iterate over subsets for report in bufr.next_subset(): i = 0 # iterate over all descriptor/data sets for k, m, (v, q) in report.next_data(): i += 1 if i >= 4: # after first 3 descriptor/data sets just count continue if i <= 3: # type-marker for first 3 descriptor is not None assert m is not None continue # assert descriptor, data value, quality assert m is not None assert k == 8070 assert v == 3 assert q is None # look-up and assert name and unit kn, ku = bufr.get_tables().lookup_elem(k) assert kn.strip() == "TOVS/ATOVS PRODUCT QUALIFIER" assert ku.strip() == "CODE TABLE 8070" # assert there were 88 descriptors in the subset assert i == 88 # leave for-loops, all tests are done break break
handler.setLevel(logging.WARNING) logging.getLogger('').setLevel(logging.WARNING) logging.getLogger('').addHandler(handler) from trollbufr.bufr import Bufr from trollbufr import load_file import numpy as np TESTFILE = 'TestBulletin_468' PNGFILE = 'metopa_iasi_ctp_%s.png' AREA = 'euro' lon = [] lat = [] pres = [] bfr = Bufr("bufrdc", ".") for blob, size, header in load_file.next_bufr(TESTFILE): bfr.decode(blob) print header, bfr.get_meta()['datetime'] for subset in bfr.next_subset(): gotit = 0 for k, m, (v, q) in subset.next_data(): if gotit: continue if k == 5001: lat.append((0, 0, v)) if k == 6001: lon.append((0, 0, v)) if k == 7004: pres.append((0, 0, v)) gotit = 1
def runner(args): bufr = Bufr(os.environ["BUFR_TABLES_TYPE"], os.environ["BUFR_TABLES"]) with open(args.filename[0], "rb") as fh_in: bufr_data = fh_in.read() if args.amtl: station_descr = (1002, ) else: station_descr = (1002, 1018) try: with gzip_open("%s.geojson.gz" % args.filename[0], "wt") as fh_out: i = 0 if args.jsonp: fh_out.write('appendData( ') fh_out.write('{ "type" : "FeatureCollection",\n') fh_out.write('"datetime_current" : "%s",\n' % (datetime.utcnow().strftime("%Y-%m-%d %H:%M"))) fh_out.write('"features" : [') for blob, size, header in trollbufr.load_file.next_bufr( bin_data=bufr_data): bufr.decode_meta(blob) tabl = bufr.get_tables() for report in bufr.next_subset(): station_accepted = False feature_set = { "type": "Feature", "geometry": { "type": "Point", "coordinates": [] }, "properties": {} } feature_coordinates = [0, 0, 0] feature_properties = {"abbreviated_heading": header} try: j = 0 for descr_entry in report.next_data(): if descr_entry.mark is not None: continue if descr_entry.descr in (5001, 5002, 27001, 27002): feature_coordinates[1] = descr_entry.value continue if descr_entry.descr in (6001, 6002, 28001, 28002): feature_coordinates[0] = descr_entry.value continue if descr_entry.descr in ( 7001, 7002, 7007, 7030, 10007) and descr_entry.value: feature_coordinates[2] = descr_entry.value continue if descr_entry.descr in station_descr and descr_entry.value is not None: station_accepted = True # d_name, d_unit, d_typ d_info = tabl.lookup_elem(descr_entry.descr) if d_info.unit.upper() in ("CCITT IA5", "NUMERIC", "CODE TABLE", "FLAG TABLE"): d_unit = None else: d_unit = d_info.unit if descr_entry.value is None or d_info.type in ( TabBType.NUMERIC, TabBType.LONG, TabBType.DOUBLE): d_value = descr_entry.value elif d_info.type in ( TabBType.CODE, TabBType.FLAG ) and descr_entry.value is not None: d_value = tabl.lookup_codeflag( descr_entry.descr, descr_entry.value) else: d_value = str( descr_entry.value).decode("latin1") feature_properties["data_%03d" % (j)] = { "name": d_info.name, "value": d_value } if d_info.shortname is not None: feature_properties[ "data_%03d" % (j)]["shortname"] = d_info.shortname if d_unit is not None: feature_properties["data_%03d" % (j)]["unit"] = str(d_unit) j += 1 except Exception as e: station_accepted = False if "Unknown descriptor" not in str(e): raise e if station_accepted: if i: fh_out.write(",\n") i += 1 feature_set["geometry"][ "coordinates"] = feature_coordinates feature_set["properties"] = feature_properties dump(feature_set, fh_out, indent=3, separators=(',', ': ')) fh_out.write(']\n}\n') if args.jsonp: fh_out.write(');\n') except Exception as e: logger.info(e, exc_info=1) return 0
def read_synop(file, params, min=None, max=None): """ Reading bufr files for synoptical station data and provide dictionary with weather data for cloud base height and visibility. The results are subsequently filtered by cloud base height and visibility Arguments: file Bufr file with synop reports params List of parameter names that will be extracted min Threshold for minimum value of parameter max Threshold for maximum value of parameter Returns list of station dictionaries for given thresholds """ result = {} bfr = Bufr("libdwd", os.getenv("BUFR_TABLES")) for blob, size, header in load_file.next_bufr(file): bfr.decode(blob) try: for subset in bfr.next_subset(): stationdict = {} for (k, m, v, q) in subset.next_data(): if k == 1015: # Station name stationdict['name'] = v.strip() if k == 5001: # Latitude stationdict['lat'] = v if k == 6001: # Longitude stationdict['lon'] = v if k == 7030: # Altitude stationdict['altitude'] = v elif k == 4001: # Year stationdict['year'] = v elif k == 4002: # Month stationdict['month'] = v elif k == 4003: # Day stationdict['day'] = v elif k == 4004: # Hour stationdict['hour'] = v elif k == 4005: # Hour stationdict['minute'] = v elif k == 20003: # Present weather stationdict['present weather'] = v # Values from 40 to 49 are refering to fog and ice fog # Patchy fog or fog edges value 11 or 12 elif k == 20004: # Past weather stationdict['past weather'] = v # Values from 40 to 49 are refering to fog and ice fog # Patchy fog or fog edges value 11 or 12 elif k == 20013: # Cloud base height if v is not None: if ('cbh' in stationdict.keys() and stationdict["cbh"] is not None): if stationdict['cbh'] > v: stationdict['cbh'] = v else: stationdict['cbh'] = v else: stationdict['cbh'] = None elif k == 2001: # Auto/manual measurement # 1 - 3 : Manual human observations. Manned stations # 0, 4 - 7 : Only automatic observations stationdict['type'] = v elif k == 20001: # Visibility stationdict['visibility'] = v elif k == 12101: # Mean air temperature in K stationdict['air temperature'] = v elif k == 12103: # Dew point temperature in K stationdict['dew point'] = v elif k == 20010: # Cloud cover in % stationdict['cloudcover'] = v elif k == 13003: # Relative humidity in % stationdict['relative humidity'] = v elif k == 11001: # Wind direction in degree stationdict['wind direction'] = v elif k == 11002: # Wind speed in m s-1 stationdict['wind speed'] = v elif k == 1002: # WMO station number stationdict['wmo'] = v # Apply thresholds stationtime = datetime( stationdict['year'], stationdict['month'], stationdict['day'], stationdict['hour'], stationdict['minute'], ).strftime("%Y%m%d%H%M%S") paralist = [] if not isinstance(params, list): params = [params] for param in params: if param not in stationdict: res = None elif min is not None and stationdict[param] < min: res = None elif max is not None and stationdict[param] >= max: res = None elif stationdict[param] is None: res = None else: res = stationdict[param] paralist.append(res) if all([i is None for i in paralist]): continue # Add station data to result list if stationtime in result.keys(): result[stationtime].append([ stationdict['name'], stationdict['altitude'], stationdict['lat'], stationdict['lon'] ] + paralist) else: result[stationtime] = [[ stationdict['name'], stationdict['altitude'], stationdict['lat'], stationdict['lon'] ] + paralist] except DummyException as e: "ERROR: Unresolved station request: {}".format(e) return (result)
import logging handler = logging.StreamHandler() handler.setFormatter( logging.Formatter("[%(levelname)s: %(module)s] %(message)s")) handler.setLevel(logging.WARNING) logging.getLogger('').setLevel(logging.WARNING) logging.getLogger('').addHandler(handler) from trollbufr.bufr import Bufr from trollbufr import load_file import sys if len(sys.argv) != 2: print "SYNTAX:", sys.argv[0], "<bufr>" sys.exit(1) testfile = sys.argv[1] bfr = Bufr("eccodes", "tables") for blob, size, header in load_file.next_bufr(testfile): bfr.decode(blob) print "\n", testfile, header, "\n", bfr.get_meta_str() for subset in bfr.next_subset(): for k, m, (v, q) in subset.next_data(): print k, m, v break with open(header.replace(" ", "_"), "w") as fh: fh.write(blob[0:])
def read_bufr_data(args): """Read BUFR(s), decode data section and write to file-handle. Depending on command argument "--array", either process the subsets in sequence, which is ideal for un-compressed BUFR, or process each descriptor per all subsets at once, which improves performance for compressed BUFR. """ try: fh_out = open(args.out_file, "w") except: fh_out = sys.stdout bufr = Bufr(args.tables_type, args.tables_path) for fn_in in args.in_file: print("FILE\t%s" % os.path.basename(fn_in), file=fh_out) i = 0 for blob, size, header in load_file.next_bufr(fn_in): if args.bulletin is not None and i != args.bulletin: i += 1 continue print("BUFR\t#%d (%d B)" % (i, size), file=fh_out) i += 1 print("HEADER\t%s" % header, file=fh_out) try: bufr.decode_meta(blob, load_tables=False) tabl = bufr.load_tables() print("META:\n%s" % bufr.get_meta_str(), file=fh_out) for report in bufr.next_subset(args.array and bufr.is_compressed): print("SUBSET\t#%d/%d" % report.subs_num, file=fh_out) if args.sparse or (args.array and bufr.is_compressed): for descr_entry in report.next_data(): if descr_entry.mark is not None: if isinstance(descr_entry.value, (list)): descr_value = "".join( [str(x) for x in descr_entry.value]) else: descr_value = descr_entry.value print(" ", descr_entry.mark, descr_value, end="", file=fh_out) print(file=fh_out) continue if descr_entry.value is None: print("%06d: ///" % (descr_entry.descr), file=fh_out) elif descr_entry.quality is not None: print( "%06d: %s (%s)" % (descr_entry.descr, str(descr_entry.value), descr_entry.quality), file=fh_out) else: print("%06d: %s" % (descr_entry.descr, str(descr_entry.value)), file=fh_out) else: for descr_entry in report.next_data(): if descr_entry.mark is not None: if isinstance(descr_entry.value, (list)): descr_value = "".join( [str(x) for x in descr_entry.value]) else: descr_value = descr_entry.value print(" ", descr_entry.mark, descr_value, end="", file=fh_out) print(file=fh_out) continue descr_info = tabl.lookup_elem(descr_entry.descr) if descr_info.type in (TabBType.CODE, TabBType.FLAG): if descr_entry.value is None: print("%06d %-40s = Missing value" % (descr_entry.descr, descr_info.name), file=fh_out) else: v = tabl.lookup_codeflag( descr_entry.descr, descr_entry.value) print("%06d %-40s = %s" % (descr_entry.descr, descr_info.name, str(v)), file=fh_out) else: if descr_info.unit in ("CCITT IA5", "Numeric"): dinf_unit = "" else: dinf_unit = descr_info.unit if descr_entry.value is None: print("%06d %-40s = /// %s" % (descr_entry.descr, descr_info.name, dinf_unit), file=fh_out) elif descr_entry.quality is not None: print("%06d %-40s = %s %s (%s)" % (descr_entry.descr, descr_info.name, str(descr_entry.value), dinf_unit, descr_entry.quality), file=fh_out) else: print("%06d %-40s = %s %s" % (descr_entry.descr, descr_info.name, str(descr_entry.value), dinf_unit), file=fh_out) except Exception as e: print("ERROR\t%s" % e, file=fh_out) if logger.isEnabledFor(logging.DEBUG): logger.exception(e) else: logger.warning(e) if fh_out is not sys.stdout: fh_out.close()
from trollbufr.bufr import Bufr from trollbufr import load_file import numpy as np import sys if len(sys.argv) != 3: print "SYNTAX:", sys.argv[0], "<bufr> <png>" sys.exit(1) testfile = sys.argv[1] pngfile = sys.argv[2] lon = [] lat = [] pres = [] bfr = Bufr("eccodes", "tables") for blob, size, header in load_file.next_bufr(testfile): bfr.decode(blob) print header, bfr.get_meta()['datetime'] for subset in bfr.next_subset(): gotit = 0 for k, m, (v, q) in subset.next_data(): if gotit: continue if k == 5001: lat.append((0, 0, v)) if k == 6001: lon.append((0, 0, v)) if k == 7004: pres.append((0, 0, v)) gotit = 1
import sys import glob import logging handler = logging.StreamHandler() handler.setFormatter( logging.Formatter("[%(levelname)s: %(module)s] %(message)s")) # handler.setLevel(logging.DEBUG) # logging.getLogger('').setLevel(logging.DEBUG) handler.setLevel(logging.WARNING) logging.getLogger('').setLevel(logging.WARNING) logging.getLogger('').addHandler(handler) fp = sys.argv[1] bfr = Bufr("eccodes", "tables") for fn in glob.glob(fp): print fn i = 0 for blob, size, header in load_file.next_bufr(fn): try: bfr.decode(blob) lon = lat = 0 for subset in bfr.next_subset(): for k, m, (v, q) in subset.next_data(): if k == 5001: lat = v if k == 6001: lon = v break