def read_bufr_desc(args): """Read BUFR(s), decode meta-data and descriptor list, write to file-handle. """ try: fh_out = open(args.out_file, "w") except: fh_out = sys.stdout for fn_in in args.in_file: print("FILE\t%s" % os.path.basename(fn_in), file=fh_out) i = 0 for blob, size, header in load_file.next_bufr(fn_in): if args.bulletin is not None and i != args.bulletin: i += 1 continue print("BUFR\t#%d (%d B)" % (i, size), file=fh_out) i += 1 print("HEADER\t%s" % header, file=fh_out) try: bufr = Bufr(args.tables_type, args.tables_path) bufr.decode_meta(blob, load_tables=(not args.sparse)) print("META\n%s" % bufr.get_meta_str(), file=fh_out) if args.sparse: d = bufr.get_descr_short() else: d = bufr.get_descr_full() print("DESC :\n%s" % "\n".join(d), file=fh_out) except Exception as e: print("ERROR\t%s" % e, file=fh_out) if logger.isEnabledFor(logging.DEBUG): logger.exception(e) if fh_out is not sys.stdout: fh_out.close()
def json(data, full=True): from flask import jsonify bufr_obj = bufr.Bufr(BUFR_TABLES_TYPE, BUFR_TABLES_DIR) decoded_list = [] try: for blob_obj, _, header in load_file.next_bufr(bin_data=data): json_dict = { "heading": header, "index": len(decoded_list), "status": False, "error": None, "bufr": None, } try: json_obj = bufr_obj.decode(blob_obj, as_array=True) if full: json_dict["bufr"] = json_obj json_dict["status"] = True except StandardError as e: json_dict["error"] = str(e) decoded_list.append(json_dict) except Warning or StandardError as broke: decoded_list.append({ "index": len(decoded_list), "status": False, "bufr": None, "heading": None, "error": broke.__str__() }) return jsonify(decoded_list)
def test_bufr_read(monkeypatch): """Test reading data and data quality on Metop-A MHS BUFR file.""" monkeypatch.setenv("BUFR_TABLES", os.path.join(test_dir, "bufrtables")) monkeypatch.setenv("BUFR_TABLES_TYPE", "bufrdc") from trollbufr import load_file from trollbufr.bufr import Bufr test_file = os.path.join(test_dir, "metop_mhs.bufr") bufr = Bufr(os.environ["BUFR_TABLES_TYPE"], os.environ["BUFR_TABLES"]) # laod test file and iterate over BUFR for blob, size, header in load_file.next_bufr(test_file): # test header for first BUFR assert header == "IEMX01 EUMP 150722" assert size == 48598 # decode BUFR message bufr.decode(blob) # iterate over subsets for report in bufr.next_subset(): i = 0 # iterate over all descriptor/data sets for k, m, (v, q) in report.next_data(): i += 1 if i >= 4: # after first 3 descriptor/data sets just count continue if i <= 3: # type-marker for first 3 descriptor is not None assert m is not None continue # assert descriptor, data value, quality assert m is not None assert k == 8070 assert v == 3 assert q is None # look-up and assert name and unit kn, ku = bufr.get_tables().lookup_elem(k) assert kn.strip() == "TOVS/ATOVS PRODUCT QUALIFIER" assert ku.strip() == "CODE TABLE 8070" # assert there were 88 descriptors in the subset assert i == 88 # leave for-loops, all tests are done break break
def human(data): decoded_list = [] decoded_ahl = [] idx = 1 try: for blob_obj, _, header in load_file.next_bufr(bin_data=data): head = header or "BUFR #%s" % str(idx) idx += 1 decoded_ahl.append(head) decoded_list.extend( ("<h3><a name='", head, "'>", head, "</a></h3>")) decoded_list.append("<pre>") decoded_list.append(pretty(blob_obj)) decoded_list.append("</pre>") except Warning or StandardError as broke: decoded_list.extend(("<b>", broke.__str__(), "</b>")) cont_lst = ["<ul>"] for head in decoded_ahl: cont_lst.extend(("<li><a href='#", head, "'>", head, "</a></li>")) cont_lst.append("</ul>") return Markup("{}<p>{}".format("".join(cont_lst), "".join(decoded_list)))
def read_bufr_to_json(args): """Read and decode BUFR, write as JSON formatted file. """ bufr = Bufr(args.tables_type, args.tables_path) json_data = [] bufr_i = -1 for fn_in in args.in_file: for blob, _, header in load_file.next_bufr(fn_in): bufr_i += 1 if args.bulletin is not None and bufr_i != args.bulletin: continue json_data_item = { "heading": header, "file": os.path.basename(fn_in), "index": bufr_i, "status": False, "error": None, "bufr": None, } try: json_bufr = bufr.decode(blob, load_tables=True, as_array=args.array) except Exception as e: logger.error(e, exc_info=1 and logger.isEnabledFor(logging.DEBUG)) json_data_item["error"] = str(e) else: json_data_item["status"] = True json_data_item["bufr"] = json_bufr finally: json_data.append(json_data_item) import json out_fh = open(args.out_file, "w") or sys.stdout with out_fh as fh_out: if args.sparse: json.dump(json_data, fh_out) else: json.dump(json_data, fh_out, indent=3, separators=(',', ': '))
logging.getLogger('').setLevel(logging.WARNING) logging.getLogger('').addHandler(handler) from trollbufr.bufr import Bufr from trollbufr import load_file import numpy as np TESTFILE = 'TestBulletin_468' PNGFILE = 'metopa_iasi_ctp_%s.png' AREA = 'euro' lon = [] lat = [] pres = [] bfr = Bufr("bufrdc", ".") for blob, size, header in load_file.next_bufr(TESTFILE): bfr.decode(blob) print header, bfr.get_meta()['datetime'] for subset in bfr.next_subset(): gotit = 0 for k, m, (v, q) in subset.next_data(): if gotit: continue if k == 5001: lat.append((0, 0, v)) if k == 6001: lon.append((0, 0, v)) if k == 7004: pres.append((0, 0, v)) gotit = 1 lons = np.concatenate(lon)
def read_synop(file, params, min=None, max=None): """ Reading bufr files for synoptical station data and provide dictionary with weather data for cloud base height and visibility. The results are subsequently filtered by cloud base height and visibility Arguments: file Bufr file with synop reports params List of parameter names that will be extracted min Threshold for minimum value of parameter max Threshold for maximum value of parameter Returns list of station dictionaries for given thresholds """ result = {} bfr = Bufr("libdwd", os.getenv("BUFR_TABLES")) for blob, size, header in load_file.next_bufr(file): bfr.decode(blob) try: for subset in bfr.next_subset(): stationdict = {} for (k, m, v, q) in subset.next_data(): if k == 1015: # Station name stationdict['name'] = v.strip() if k == 5001: # Latitude stationdict['lat'] = v if k == 6001: # Longitude stationdict['lon'] = v if k == 7030: # Altitude stationdict['altitude'] = v elif k == 4001: # Year stationdict['year'] = v elif k == 4002: # Month stationdict['month'] = v elif k == 4003: # Day stationdict['day'] = v elif k == 4004: # Hour stationdict['hour'] = v elif k == 4005: # Hour stationdict['minute'] = v elif k == 20003: # Present weather stationdict['present weather'] = v # Values from 40 to 49 are refering to fog and ice fog # Patchy fog or fog edges value 11 or 12 elif k == 20004: # Past weather stationdict['past weather'] = v # Values from 40 to 49 are refering to fog and ice fog # Patchy fog or fog edges value 11 or 12 elif k == 20013: # Cloud base height if v is not None: if ('cbh' in stationdict.keys() and stationdict["cbh"] is not None): if stationdict['cbh'] > v: stationdict['cbh'] = v else: stationdict['cbh'] = v else: stationdict['cbh'] = None elif k == 2001: # Auto/manual measurement # 1 - 3 : Manual human observations. Manned stations # 0, 4 - 7 : Only automatic observations stationdict['type'] = v elif k == 20001: # Visibility stationdict['visibility'] = v elif k == 12101: # Mean air temperature in K stationdict['air temperature'] = v elif k == 12103: # Dew point temperature in K stationdict['dew point'] = v elif k == 20010: # Cloud cover in % stationdict['cloudcover'] = v elif k == 13003: # Relative humidity in % stationdict['relative humidity'] = v elif k == 11001: # Wind direction in degree stationdict['wind direction'] = v elif k == 11002: # Wind speed in m s-1 stationdict['wind speed'] = v elif k == 1002: # WMO station number stationdict['wmo'] = v # Apply thresholds stationtime = datetime( stationdict['year'], stationdict['month'], stationdict['day'], stationdict['hour'], stationdict['minute'], ).strftime("%Y%m%d%H%M%S") paralist = [] if not isinstance(params, list): params = [params] for param in params: if param not in stationdict: res = None elif min is not None and stationdict[param] < min: res = None elif max is not None and stationdict[param] >= max: res = None elif stationdict[param] is None: res = None else: res = stationdict[param] paralist.append(res) if all([i is None for i in paralist]): continue # Add station data to result list if stationtime in result.keys(): result[stationtime].append([ stationdict['name'], stationdict['altitude'], stationdict['lat'], stationdict['lon'] ] + paralist) else: result[stationtime] = [[ stationdict['name'], stationdict['altitude'], stationdict['lat'], stationdict['lon'] ] + paralist] except DummyException as e: "ERROR: Unresolved station request: {}".format(e) return (result)
import logging handler = logging.StreamHandler() handler.setFormatter( logging.Formatter("[%(levelname)s: %(module)s] %(message)s")) handler.setLevel(logging.WARNING) logging.getLogger('').setLevel(logging.WARNING) logging.getLogger('').addHandler(handler) from trollbufr.bufr import Bufr from trollbufr import load_file import sys if len(sys.argv) != 2: print "SYNTAX:", sys.argv[0], "<bufr>" sys.exit(1) testfile = sys.argv[1] bfr = Bufr("eccodes", "tables") for blob, size, header in load_file.next_bufr(testfile): bfr.decode(blob) print "\n", testfile, header, "\n", bfr.get_meta_str() for subset in bfr.next_subset(): for k, m, (v, q) in subset.next_data(): print k, m, v break with open(header.replace(" ", "_"), "w") as fh: fh.write(blob[0:])
def read_bufr_data(args): """Read BUFR(s), decode data section and write to file-handle. Depending on command argument "--array", either process the subsets in sequence, which is ideal for un-compressed BUFR, or process each descriptor per all subsets at once, which improves performance for compressed BUFR. """ try: fh_out = open(args.out_file, "w") except: fh_out = sys.stdout bufr = Bufr(args.tables_type, args.tables_path) for fn_in in args.in_file: print("FILE\t%s" % os.path.basename(fn_in), file=fh_out) i = 0 for blob, size, header in load_file.next_bufr(fn_in): if args.bulletin is not None and i != args.bulletin: i += 1 continue print("BUFR\t#%d (%d B)" % (i, size), file=fh_out) i += 1 print("HEADER\t%s" % header, file=fh_out) try: bufr.decode_meta(blob, load_tables=False) tabl = bufr.load_tables() print("META:\n%s" % bufr.get_meta_str(), file=fh_out) for report in bufr.next_subset(args.array and bufr.is_compressed): print("SUBSET\t#%d/%d" % report.subs_num, file=fh_out) if args.sparse or (args.array and bufr.is_compressed): for descr_entry in report.next_data(): if descr_entry.mark is not None: if isinstance(descr_entry.value, (list)): descr_value = "".join( [str(x) for x in descr_entry.value]) else: descr_value = descr_entry.value print(" ", descr_entry.mark, descr_value, end="", file=fh_out) print(file=fh_out) continue if descr_entry.value is None: print("%06d: ///" % (descr_entry.descr), file=fh_out) elif descr_entry.quality is not None: print( "%06d: %s (%s)" % (descr_entry.descr, str(descr_entry.value), descr_entry.quality), file=fh_out) else: print("%06d: %s" % (descr_entry.descr, str(descr_entry.value)), file=fh_out) else: for descr_entry in report.next_data(): if descr_entry.mark is not None: if isinstance(descr_entry.value, (list)): descr_value = "".join( [str(x) for x in descr_entry.value]) else: descr_value = descr_entry.value print(" ", descr_entry.mark, descr_value, end="", file=fh_out) print(file=fh_out) continue descr_info = tabl.lookup_elem(descr_entry.descr) if descr_info.type in (TabBType.CODE, TabBType.FLAG): if descr_entry.value is None: print("%06d %-40s = Missing value" % (descr_entry.descr, descr_info.name), file=fh_out) else: v = tabl.lookup_codeflag( descr_entry.descr, descr_entry.value) print("%06d %-40s = %s" % (descr_entry.descr, descr_info.name, str(v)), file=fh_out) else: if descr_info.unit in ("CCITT IA5", "Numeric"): dinf_unit = "" else: dinf_unit = descr_info.unit if descr_entry.value is None: print("%06d %-40s = /// %s" % (descr_entry.descr, descr_info.name, dinf_unit), file=fh_out) elif descr_entry.quality is not None: print("%06d %-40s = %s %s (%s)" % (descr_entry.descr, descr_info.name, str(descr_entry.value), dinf_unit, descr_entry.quality), file=fh_out) else: print("%06d %-40s = %s %s" % (descr_entry.descr, descr_info.name, str(descr_entry.value), dinf_unit), file=fh_out) except Exception as e: print("ERROR\t%s" % e, file=fh_out) if logger.isEnabledFor(logging.DEBUG): logger.exception(e) else: logger.warning(e) if fh_out is not sys.stdout: fh_out.close()
handler = logging.StreamHandler() handler.setFormatter( logging.Formatter("[%(levelname)s: %(module)s] %(message)s")) # handler.setLevel(logging.DEBUG) # logging.getLogger('').setLevel(logging.DEBUG) handler.setLevel(logging.WARNING) logging.getLogger('').setLevel(logging.WARNING) logging.getLogger('').addHandler(handler) fp = sys.argv[1] bfr = Bufr("eccodes", "tables") for fn in glob.glob(fp): print fn i = 0 for blob, size, header in load_file.next_bufr(fn): try: bfr.decode(blob) lon = lat = 0 for subset in bfr.next_subset(): for k, m, (v, q) in subset.next_data(): if k == 5001: lat = v if k == 6001: lon = v break if header.startswith("IEDX"): print i, header, lon, lat, if lon > -10 and lon < 30 and lat > 50 and lat < 70: print "<------"