def message_set(self, item: str, value: T.Any) -> None: arr = isinstance( value, (np.ndarray, T.Sequence)) and not isinstance(value, str) if arr: eccodes.codes_set_array(self.codes_id, item, value) else: eccodes.codes_set(self.codes_id, item, value)
def message_set(self, item, value): # type: (str, T.Any) -> None set_array = isinstance(value, T.Sequence) and not isinstance(value, (str, bytes)) if set_array: eccodes.codes_set_array(self.codes_id, item, value) else: eccodes.codes_set(self.codes_id, item, value)
def copy_needed_field(gid, fout): """Copy the needed field""" nx = ecc.codes_get(gid, 'Ni') ny = ecc.codes_get(gid, 'Nj') first_lat = ecc.codes_get(gid, 'latitudeOfFirstGridPointInDegrees') north_south_step = ecc.codes_get(gid, 'jDirectionIncrementInDegrees') filter_north = 0 new_ny = int((first_lat - filter_north) / north_south_step) + 1 values = ecc.codes_get_values(gid) values_r = np.reshape(values, (ny, nx)) new_values = values_r[:new_ny, :] clone_id = ecc.codes_clone(gid) ecc.codes_set(clone_id, 'latitudeOfLastGridPointInDegrees', (filter_north)) ecc.codes_set(clone_id, 'Nj', new_ny) ecc.codes_set_values(clone_id, new_values.flatten()) ecc.codes_write(clone_id, fout) ecc.codes_release(clone_id)
def test_grib_codes_set_missing(): gid = eccodes.codes_grib_new_from_samples("reduced_rotated_gg_ml_grib2") eccodes.codes_set(gid, "typeOfFirstFixedSurface", "sfc") eccodes.codes_set_missing(gid, "scaleFactorOfFirstFixedSurface") eccodes.codes_set_missing(gid, "scaledValueOfFirstFixedSurface") assert eccodes.eccodes.codes_is_missing(gid, "scaleFactorOfFirstFixedSurface")
def get_array(self, key): """Get all data from file for the given BUFR key.""" with open(self.filename, "rb") as fh: msgCount = 0 while True: bufr = ec.codes_bufr_new_from_file(fh) if bufr is None: break ec.codes_set(bufr, 'unpack', 1) values = ec.codes_get_array(bufr, key, float) if len(values) == 1: values = np.repeat(values, 120) # if is the first message initialise our final array if (msgCount == 0): arr = da.from_array([values], chunks=CHUNK_SIZE) else: tmpArr = da.from_array([values], chunks=CHUNK_SIZE) arr = da.concatenate((arr, tmpArr), axis=0) msgCount = msgCount + 1 ec.codes_release(bufr) if arr.size == 1: arr = arr[0] return arr
def test_grib_write(tmpdir): gid = eccodes.codes_grib_new_from_samples("GRIB2") eccodes.codes_set(gid, "backgroundProcess", 44) output = tmpdir.join("test_grib_write.grib") with open(str(output), "wb") as fout: eccodes.codes_write(gid, fout) eccodes.codes_release(gid)
def test_grib_set_error(): gid = eccodes.codes_grib_new_from_samples("regular_ll_sfc_grib1") with pytest.raises(TypeError): eccodes.codes_set_long(gid, "centre", "kwbc") with pytest.raises(TypeError): eccodes.codes_set_double(gid, "centre", "kwbc") with pytest.raises(eccodes.CodesInternalError): eccodes.codes_set(gid, "centre", [])
def seviri_l2_bufr_test(self, ): """Test the SEVIRI BUFR handler.""" from satpy.readers.seviri_l2_bufr import SeviriL2BufrFileHandler import eccodes as ec buf1 = ec.codes_bufr_new_from_samples('BUFR4_local_satellite') ec.codes_set(buf1, 'unpac' 'k', 1) samp1 = np.random.uniform(low=250, high=350, size=(128, )) samp2 = np.random.uniform(low=-60, high=60, size=(128, )) samp3 = np.random.uniform(low=10, high=60, size=(128, )) # write the bufr test data twice as we want to read in and the concatenate the data in the reader ec.codes_set_array(buf1, '#1#brightnessTemperature', samp1) ec.codes_set_array(buf1, '#1#brightnessTemperature', samp1) ec.codes_set_array(buf1, 'latitude', samp2) ec.codes_set_array(buf1, 'latitude', samp2) ec.codes_set_array(buf1, 'longitude', samp3) ec.codes_set_array(buf1, 'longitude', samp3) m = mock.mock_open() with mock.patch( 'satpy.readers.seviri_l2_bufr.np.fromfile') as fromfile: fromfile.return_value = MPEF_PRODUCT_HEADER with mock.patch('satpy.readers.seviri_l2_bufr.recarray2dict' ) as recarray2dict: recarray2dict.side_effect = (lambda x: x) fh = SeviriL2BufrFileHandler(None, {}, FILETYPE_INFO) fh.mpef_header = MPEF_PRODUCT_HEADER with mock.patch('satpy.readers.seviri_l2_bufr.open', m, create=True): with mock.patch('eccodes.codes_bufr_new_from_file', side_effect=[ buf1, buf1, None, buf1, buf1, None, buf1, buf1, None ]) as ec1: ec1.return_value = ec1.side_effect with mock.patch('eccodes.codes_set') as ec2: ec2.return_value = 1 with mock.patch('eccodes.codes_release') as ec5: ec5.return_value = 1 z = fh.get_dataset(None, DATASET_INFO) # concatenate the original test arrays as # get dataset will have read and concatented the data x1 = np.concatenate((samp1, samp1), axis=0) x2 = np.concatenate((samp2, samp2), axis=0) x3 = np.concatenate((samp3, samp3), axis=0) np.testing.assert_array_equal(z.values, x1) np.testing.assert_array_equal( z.coords['latitude'].values, x2) np.testing.assert_array_equal( z.coords['longitude'].values, x3) self.assertEqual( z.attrs['platform_name'], DATASET_ATTRS['platform_name']) self.assertEqual(z.attrs['ssp_lon'], DATASET_ATTRS['ssp_lon']) self.assertEqual(z.attrs['seg_size'], DATASET_ATTRS['seg_size'])
def __init__(self, filename, with_adef=False, rect_lon='default'): """Initialize by mocking test data for testing the SEVIRI L2 BUFR reader.""" import eccodes as ec from satpy.readers.seviri_l2_bufr import SeviriL2BufrFileHandler self.buf1 = ec.codes_bufr_new_from_samples('BUFR4_local_satellite') ec.codes_set(self.buf1, 'unpack', 1) # write the bufr test data twice as we want to read in and the concatenate the data in the reader # 55 id corresponds to METEOSAT 8` ec.codes_set(self.buf1, 'satelliteIdentifier', 55) ec.codes_set_array(self.buf1, 'latitude', LAT) ec.codes_set_array(self.buf1, 'latitude', LAT) ec.codes_set_array(self.buf1, 'longitude', LON) ec.codes_set_array(self.buf1, 'longitude', LON) ec.codes_set_array(self.buf1, '#1#brightnessTemperature', DATA) ec.codes_set_array(self.buf1, '#1#brightnessTemperature', DATA) self.m = mock.mock_open() # only our offline product contain MPEF product headers so we get the metadata from there if ('BUFRProd' in filename): with mock.patch( 'satpy.readers.seviri_l2_bufr.np.fromfile') as fromfile: fromfile.return_value = MPEF_PRODUCT_HEADER with mock.patch('satpy.readers.seviri_l2_bufr.recarray2dict' ) as recarray2dict: recarray2dict.side_effect = (lambda x: x) self.fh = SeviriL2BufrFileHandler( filename, FILENAME_INFO2, FILETYPE_INFO, with_area_definition=with_adef, rectification_longitude=rect_lon) self.fh.mpef_header = MPEF_PRODUCT_HEADER else: # No Mpef Header so we get the metadata from the BUFR messages with mock.patch('satpy.readers.seviri_l2_bufr.open', self.m, create=True): with mock.patch('eccodes.codes_bufr_new_from_file', side_effect=[ self.buf1, None, self.buf1, None, self.buf1, None ]) as ec1: ec1.return_value = ec1.side_effect with mock.patch('eccodes.codes_set') as ec2: ec2.return_value = 1 with mock.patch('eccodes.codes_release') as ec5: ec5.return_value = 1 self.fh = SeviriL2BufrFileHandler( filename, FILENAME_INFO, FILETYPE_INFO, with_area_definition=with_adef, rectification_longitude=rect_lon)
def test_grib_uuid_get_set(): # ECC-1167 gid = eccodes.codes_grib_new_from_samples("GRIB2") eccodes.codes_set(gid, "gridType", "unstructured_grid") uuid = eccodes.codes_get_string(gid, "uuidOfHGrid") assert uuid == "00000000000000000000000000000000" eccodes.codes_set_string(gid, "uuidOfHGrid", "DEfdBEef10203040b00b1e50001100FF") uuid = eccodes.codes_get_string(gid, "uuidOfHGrid") assert uuid == "defdbeef10203040b00b1e50001100ff" eccodes.codes_release(gid)
def test_bufr_multi_element_constant_arrays(): eccodes.codes_bufr_multi_element_constant_arrays_off() bid = eccodes.codes_bufr_new_from_samples("BUFR3_local_satellite") eccodes.codes_set(bid, "unpack", 1) assert eccodes.codes_get_size(bid, "satelliteIdentifier") == 1 eccodes.codes_release(bid) eccodes.codes_bufr_multi_element_constant_arrays_on() bid = eccodes.codes_bufr_new_from_samples("BUFR3_local_satellite") eccodes.codes_set(bid, "unpack", 1) numSubsets = eccodes.codes_get(bid, "numberOfSubsets") assert eccodes.codes_get_size(bid, "satelliteIdentifier") == numSubsets eccodes.codes_release(bid)
def clone_with_new_values(self, values): tmp_fd, tmp_path = tempfile.mkstemp(suffix=".tmp.grib") with os.fdopen(tmp_fd, "wb") as tmp, load_grib(self.path) as gid: clone_id = codes_clone(gid) # Use single-precision floating-point representation codes_set(clone_id, "bitsPerValue", 32) codes_set_values(clone_id, values) codes_write(clone_id, tmp) codes_release(clone_id) return type(self)(tmp_path)
def test_codes_bufr_key_is_header(): bid = eccodes.codes_bufr_new_from_samples("BUFR4_local_satellite") assert eccodes.codes_bufr_key_is_header(bid, "edition") assert eccodes.codes_bufr_key_is_header(bid, "satelliteID") assert eccodes.codes_bufr_key_is_header(bid, "unexpandedDescriptors") with pytest.raises(eccodes.KeyValueNotFoundError): eccodes.codes_bufr_key_is_header(bid, "satelliteSensorIndicator") eccodes.codes_set(bid, "unpack", 1) assert not eccodes.codes_bufr_key_is_header(bid, "satelliteSensorIndicator") assert not eccodes.codes_bufr_key_is_header(bid, "#6#brightnessTemperature")
def extract_msg_date_extremes(self, bufr, date_min=None, date_max=None): """Extract the minimum and maximum dates from a single bufr message.""" ec.codes_set(bufr, 'unpack', 1) size = ec.codes_get(bufr, 'numberOfSubsets') years = np.resize(ec.codes_get_array(bufr, 'year'), size) months = np.resize(ec.codes_get_array(bufr, 'month'), size) days = np.resize(ec.codes_get_array(bufr, 'day'), size) hours = np.resize(ec.codes_get_array(bufr, 'hour'), size) minutes = np.resize(ec.codes_get_array(bufr, 'minute'), size) seconds = np.resize(ec.codes_get_array(bufr, 'second'), size) for year, month, day, hour, minute, second in zip(years, months, days, hours, minutes, seconds): time_stamp = datetime(year, month, day, hour, minute, second) date_min = time_stamp if not date_min else min(date_min, time_stamp) date_max = time_stamp if not date_max else max(date_max, time_stamp) return date_min, date_max
def save_test_data(path): """Save the test file to the indicated directory.""" import eccodes as ec filepath = os.path.join(path, FILENAME) with open(filepath, "wb") as f: for m in [MSG]: buf = ec.codes_bufr_new_from_samples('BUFR4_local_satellite') for key in m: val = m[key] if np.isscalar(val): ec.codes_set(buf, key, val) else: ec.codes_set_array(buf, key, val) ec.codes_set(buf, 'pack', 1) ec.codes_write(buf, f) ec.codes_release(buf) return filepath
def get_attribute(self, key): """Get BUFR attributes.""" # This function is inefficient as it is looping through the entire # file to get 1 attribute. It causes a problem though if you break # from the file early - dont know why but investigating - fix later fh = open(self.filename, "rb") while True: # get handle for message bufr = ec.codes_bufr_new_from_file(fh) if bufr is None: break ec.codes_set(bufr, 'unpack', 1) attr = ec.codes_get(bufr, key) ec.codes_release(bufr) fh.close() return attr
def test_grib_multi_field_write(tmpdir): # Note: eccodes.codes_grib_multi_new() calls codes_grib_multi_support_on() # hence the 'finally' block try: gid = eccodes.codes_grib_new_from_samples("GRIB2") mgid = eccodes.codes_grib_multi_new() section_num = 4 for step in range(12, 132, 12): eccodes.codes_set(gid, "step", step) eccodes.codes_grib_multi_append(gid, section_num, mgid) output = tmpdir.join("test_grib_multi_field_write.grib2") with open(str(output), "wb") as fout: eccodes.codes_grib_multi_write(mgid, fout) eccodes.codes_grib_multi_release(mgid) eccodes.codes_release(gid) finally: eccodes.codes_grib_multi_support_off()
def get_bufr_data(self, key): """Get BUFR data by key.""" attr = np.array([]) with open(self.filename, 'rb') as fh: while True: # get handle for message bufr = ec.codes_bufr_new_from_file(fh) if bufr is None: break ec.codes_set(bufr, 'unpack', 1) tmp = ec.codes_get_array(bufr, key, float) if len(tmp) == 1: size = ec.codes_get(bufr, 'numberOfSubsets') tmp = np.resize(tmp, size) attr = np.append(attr, tmp) ec.codes_release(bufr) return attr
def decode_file(filename, report_type, field_lists): '''Returns a list of tuples, where each tuple is (message_data, report_count). message_data is dictionary of name: values where the name is the field and the values is a masked numpy array for all reports. Note the length of these arrays is not necessarily the same as the number of reports. Accumulated fields will have names like rain@60min or maxtemp@6h''' if not os.path.exists(filename): LOGGER.error('decode_file could not find file: %s', filename) return [] if report_type == 'metar': # metar is so specialised that it has it's own decoding methods return decode_metar_file(filename, field_lists) # open bufr file result = [] with open(filename) as bufr_file: # loop through the messages in the file for each_message in range(eccodes.codes_count_in_file(bufr_file)): # get handle for message msgid = eccodes.codes_bufr_new_from_file(bufr_file) if msgid is None: break # decoded all of the messages in the file try: # we need to instruct ecCodes to expand all the BUFR descriptors try: eccodes.codes_set(msgid, 'unpack', 1) except eccodes.CodesInternalError: LOGGER.warning('failed to unpack message in %s', filename) break message_data, message_report_count = process_message( msgid, field_lists) if message_report_count > 0: # special temp decoding, sorry... if report_type == 'temp': message_data = decode_temp_report( message_data, message_report_count) result.append((message_data, message_report_count)) LOGGER.debug('found %i reports in message[%d] of %s', message_report_count, each_message, os.path.basename(filename)) finally: # release the handle for this message eccodes.codes_release(msgid) return result
def production_step(idx, values, fout): '''Compute z at half & full level for the given level, based on t/q/sp''' # We want to integrate up into the atmosphere, starting at the # ground so we start at the lowest level (highest number) and # keep accumulating the height as we go. # See the IFS documentation, part III # For speed and file I/O, we perform the computations with # numpy vectors instead of fieldsets. z_h = values['z'] for lev in list(reversed(list(range(1, values['nlevels'] + 1)))): z_h, z_f = compute_z_level(idx, lev, values, z_h) # store the result (z_f) in a field and add to the output if values['levelist'] == '' or str(lev) in values['levelist']: codes_set(values['sample'], 'level', lev) codes_set_values(values['sample'], z_f) codes_write(values['sample'], fout)
def get_next_msg(self): # #[ """ step to the next BUFR message in the open file """ print('getting next message') if self.msg_loaded < self.num_msgs: self.msg_loaded += 1 # get an instance of the eccodes bufr class self.bufr_id = eccodes.codes_bufr_new_from_file(self.fd) print('self.bufr_id = ', self.bufr_id) if self.bufr_id is None: raise StopIteration else: self.msg_loaded = -1 self.bufr_id = -1 raise StopIteration # unpack this bufr message eccodes.codes_set(self.bufr_id,'unpack',1)
def test_bufr_keys_iterator(): bid = eccodes.codes_bufr_new_from_samples("BUFR3_local_satellite") # Header keys only iterid = eccodes.codes_bufr_keys_iterator_new(bid) count = 0 while eccodes.codes_bufr_keys_iterator_next(iterid): keyname = eccodes.codes_bufr_keys_iterator_get_name(iterid) assert "#" not in keyname count += 1 # assert count == 54 eccodes.codes_set(bid, "unpack", 1) eccodes.codes_bufr_keys_iterator_rewind(iterid) count = 0 while eccodes.codes_bufr_keys_iterator_next(iterid): keyname = eccodes.codes_bufr_keys_iterator_get_name(iterid) count += 1 # assert count == 157 eccodes.codes_bufr_keys_iterator_rewind(iterid) eccodes.codes_bufr_keys_iterator_delete(iterid) eccodes.codes_release(bid)
def __setitem__(self, key, value): """ Set value associated with key. Iterables and scalars are handled intelligently. """ if isinstance(key, str): if hasattr(value, "__iter__") and not isinstance(value, str): eccodes.codes_set_array(self.codes_id, key, value) else: eccodes.codes_set(self.codes_id, key, value) else: if len(key) != len(value): raise ValueError( "Key array must have same size as value array") eccodes.codes_set_key_vals( self.codes_id, ",".join([ str(key[i]) + "=" + str(value[i]) for i in range(len(key)) ]), )
def generate_grib(target, **kwargs): import eccodes for k, v in list(kwargs.items()): if not isinstance(v, (list, tuple)): kwargs[k] = [v] handle = None try: with open(os.path.join(os.path.dirname(__file__), "dummy.grib"), "rb") as f: handle = eccodes.codes_new_from_file(f, eccodes.CODES_PRODUCT_GRIB) with open(target, "wb") as f: for r in iterate_request(kwargs): for k, v in r.items(): eccodes.codes_set(handle, k, v) eccodes.codes_write(handle, f) finally: if handle is not None: eccodes.codes_release(handle)
def test_bufr_set_float(): ibufr = eccodes.codes_bufr_new_from_samples("BUFR4") eccodes.codes_set(ibufr, "unpack", 1) eccodes.codes_set(ibufr, "totalPrecipitationPast24Hours", np.float32(1.26e04)) eccodes.codes_set(ibufr, "totalPrecipitationPast24Hours", np.float16(1.27e04)) eccodes.codes_release(ibufr)
def get_start_end_date(self): """Get the first and last date from the bufr file.""" fh = open(self.filename, "rb") i = 0 while True: # get handle for message bufr = ec.codes_bufr_new_from_file(fh) if bufr is None: break ec.codes_set(bufr, 'unpack', 1) year = ec.codes_get(bufr, 'year') month = ec.codes_get(bufr, 'month') day = ec.codes_get(bufr, 'day') hour = ec.codes_get(bufr, 'hour') minute = ec.codes_get(bufr, 'minute') second = ec.codes_get(bufr, 'second') obs_time = datetime(year=year, month=month, day=day, hour=hour, minute=minute, second=second) if i == 0: start_time = obs_time ec.codes_release(bufr) i += 1 end_time = obs_time fh.close() return start_time, end_time
def repack(input_file, outfile, packing_type): """Repack infile with packing_type, write result to outfile.""" with open(input_file) as infile: i = 1 while True: in_gid = codes_grib_new_from_file(infile) if in_gid is None: break info("Repacking GRIB #{}".format(i)) payload = codes_get_values(in_gid) clone_id = codes_clone(in_gid) codes_set(clone_id, "packingType", packing_type) codes_set_values(clone_id, payload) if i == 1: mode = "w" else: mode = "a" with open(outfile, mode) as output: codes_write(clone_id, output) codes_release(clone_id) codes_release(in_gid) i += 1 if not confirm_packing_type(outfile, packing_type): raise EncodingError("Reencoding silently failed.")
def test_bufr_read_write(tmpdir): bid = eccodes.codes_new_from_samples("BUFR4", eccodes.CODES_PRODUCT_BUFR) eccodes.codes_set(bid, "unpack", 1) assert eccodes.codes_get(bid, "typicalYear") == 2012 assert eccodes.codes_get(bid, "centre", str) == "ecmf" eccodes.codes_set(bid, "totalSunshine", 13) eccodes.codes_set(bid, "pack", 1) output = tmpdir.join("test_bufr_write.bufr") with open(str(output), "wb") as fout: eccodes.codes_write(bid, fout) assert eccodes.codes_get(bid, "totalSunshine") == 13 eccodes.codes_release(bid)
def test_bufr_set_string_array(): ibufr = eccodes.codes_bufr_new_from_samples("BUFR3_local_satellite") eccodes.codes_set(ibufr, "numberOfSubsets", 3) eccodes.codes_set(ibufr, "unexpandedDescriptors", 307022) inputVals = ("ARD2-LPTR", "EPFL-LPTR", "BOU2-LPTR") eccodes.codes_set_array(ibufr, "stationOrSiteName", inputVals) eccodes.codes_set(ibufr, "pack", 1) outputVals = eccodes.codes_get_string_array(ibufr, "stationOrSiteName") assert len(outputVals) == 3 assert outputVals[0] == "ARD2-LPTR" assert outputVals[1] == "EPFL-LPTR" assert outputVals[2] == "BOU2-LPTR" eccodes.codes_release(ibufr)
def test_bufr_codes_is_missing(): bid = eccodes.eccodes.codes_bufr_new_from_samples("BUFR4_local") eccodes.codes_set(bid, "unpack", 1) assert eccodes.codes_is_missing(bid, "heightOfBarometerAboveMeanSeaLevel") == 1 assert eccodes.codes_is_missing(bid, "blockNumber") == 1 assert eccodes.codes_is_missing(bid, "stationOrSiteName") == 1 assert eccodes.codes_is_missing(bid, "unexpandedDescriptors") == 0 assert eccodes.codes_is_missing(bid, "ident") == 0 eccodes.codes_set(bid, "stationOrSiteName", "Barca") eccodes.codes_set(bid, "pack", 1) assert eccodes.codes_is_missing(bid, "stationOrSiteName") == 0 eccodes.codes_release(bid)
def __init__(self, bufrfile, variables, valid_dtg, valid_range, lonrange=None, latrange=None, label=""): if lonrange is None: lonrange = [-180, 180] if latrange is None: latrange = [-90, 90] if eccodes is None: raise Exception("ECCODES not found. Needed for bufr reading") # open bufr file f = open(bufrfile) # define the keys to be printed keys = [ # 'blockNumber', # 'stationNumber', 'latitude', 'longitude', 'year', 'month', 'day', 'hour', 'minute', 'heightOfStationGroundAboveMeanSeaLevel', 'heightOfStation', 'stationNumber', 'blockNumber' ] nerror = {} ntime = {} nundef = {} ndomain = {} nobs = {} for var in variables: if var == "relativeHumidityAt2M": keys.append("airTemperatureAt2M") keys.append("dewpointTemperatureAt2M") else: keys.append(var) nerror.update({var: 0}) ntime.update({var: 0}) nundef.update({var: 0}) ndomain.update({var: 0}) nobs.update({var: 0}) # The cloud information is stored in several blocks in the # SYNOP message and the same key means a different thing in different # parts of the message. In this example we will read the first # cloud block introduced by the key # verticalSignificanceSurfaceObservations=1. # We know that this is the first occurrence of the keys we want to # read so in the list above we used the # (occurrence) operator # accordingly. print("Reading " + bufrfile) print("Looking for keys: " + str(keys)) cnt = 0 observations = list() # loop for the messages in the file # nerror = 0 # ndomain = 0 # nundef = 0 # ntime = 0 not_decoded = 0 # removed = 0 while 1: # get handle for message bufr = eccodes.codes_bufr_new_from_file(f) if bufr is None: break # print("message: %s" % cnt) # we need to instruct ecCodes to expand all the descriptors # i.e. unpack the data values try: eccodes.codes_set(bufr, 'unpack', 1) decoded = True except eccodes.CodesInternalError as err: not_decoded = not_decoded + 1 print('Error with key="unpack" : %s' % err.msg) decoded = False # print the values for the selected keys from the message if decoded: lat = np.nan lon = np.nan value = np.nan elev = np.nan year = -1 month = -1 day = -1 hour = -1 minute = -1 stid = "NA" station_number = -1 block_number = -1 t2m = np.nan td2m = np.nan # rh2m = np.nan sd = np.nan # all_found = True for key in keys: try: val = eccodes.codes_get(bufr, key) # if val != CODES_MISSING_DOUBLE: # print(' %s: %s' % (key,val)) if val == eccodes.CODES_MISSING_DOUBLE or val == eccodes.CODES_MISSING_LONG: val = np.nan if key == "latitude": lat = val if key == "longitude": lon = val if key == "year": year = val if key == "month": month = val if key == "day": day = val if key == "hour": hour = val if key == "minute": minute = val if key == "heightOfStation": elev = val if key == "heightOfStationGroundAboveMeanSeaLevel": elev = val if key == "stationNumber": station_number = val if key == "blockNumber": block_number = val if key == "airTemperatureAt2M": t2m = val if key == "dewpointTemperatureAt2M": td2m = val if key == "totalSnowDepth": sd = val except eccodes.CodesInternalError: pass # all_found = False # print('Report does not contain key="%s" : %s' % (key, err.msg)) # Assign value to var for var in variables: if var == "relativeHumidityAt2M": if not np.isnan(t2m) and not np.isnan(td2m): value = self.td2rh(td2m, t2m) value = value * 0.01 elif var == "airTemperatureAt2M": value = t2m elif var == "totalSnowDepth": value = sd else: raise NotImplementedError("Var " + var + " is not coded! Please do it!") all_found = True if np.isnan(lat): all_found = False if np.isnan(lon): all_found = False if year == -1: all_found = False if month == -1: all_found = False if day == -1: all_found = False if hour == -1: all_found = False if minute == -1: all_found = False if np.isnan(elev): all_found = False if np.isnan(value): all_found = False if not all_found: nerror.update({var: nerror[var] + 1}) # print(lon, lonrange[0], lonrange[1], lat, latrange[0],latrange[1]) if latrange[0] <= lat <= latrange[1] and lonrange[0] <= lon <= lonrange[1]: obs_dtg = datetime(year=year, month=month, day=day, hour=hour, minute=minute) # print(value) if not np.isnan(value): if self.inside_window(obs_dtg, valid_dtg, valid_range): # print(valid_dtg, lon, lat, value, elev, stid) if station_number > 0 and block_number > 0: stid = str((block_number * 1000) + station_number) observations.append(surfex.obs.Observation(obs_dtg, lon, lat, value, elev=elev, stid=stid, varname=var)) nobs.update({var: nobs[var] + 1}) else: ntime.update({var: ntime[var] + 1}) else: nundef.update({var: nundef[var] + 1}) else: ndomain.update({var: ndomain[var] + 1}) cnt += 1 if (cnt % 1000) == 0: print('.', end='') sys.stdout.flush() # delete handle eccodes.codes_release(bufr) print("\nFound " + str(len(observations)) + "/" + str(cnt)) print("Not decoded: " + str(not_decoded)) for var in variables: print("\nObservations for var=" + var + ": " + str(nobs[var])) print("Observations removed because of domain check: " + str(ndomain[var])) print("Observations removed because of not being defined/found: " + str(nundef[var])) print("Observations removed because of time window: " + str(ntime[var])) print("Messages not containing information on all keys: " + str(nerror[var])) # close the file f.close() surfex.obs.ObservationSet.__init__(self, observations, label=label)
def readBufrFile(bufrFile, var, lonrange, latrange): # open bufr file f = open(bufrFile) # define the keys to be printed keys = [ #'blockNumber', #'stationNumber', 'latitude', 'longitude', 'heightOfStation' ] #'airTemperatureAt2M', # 'relativeHumidity', # 'totalSnowDepth' keys.append(var) # The cloud information is stored in several blocks in the # SYNOP message and the same key means a different thing in different # parts of the message. In this example we will read the first # cloud block introduced by the key # verticalSignificanceSurfaceObservations=1. # We know that this is the first occurrence of the keys we want to # read so in the list above we used the # (occurrence) operator # accordingly. print("Reading " + bufrFile) print("Looking for keys: " + str(keys)) cnt = 0 observations = list() # loop for the messages in the file not_found = 0 while 1: # get handle for message bufr = codes_bufr_new_from_file(f) if bufr is None: break #print("message: %s" % cnt) # we need to instruct ecCodes to expand all the descriptors # i.e. unpack the data values codes_set(bufr, 'unpack', 1) # print the values for the selected keys from the message lat = np.nan lon = np.nan value = np.nan elev = np.nan for key in keys: try: val = codes_get(bufr, key) #if val != CODES_MISSING_DOUBLE: # print(' %s: %s' % (key,val)) if val == CODES_MISSING_DOUBLE or val == CODES_MISSING_LONG: val = np.nan if key == "latitude": lat = val if key == "longitude": lon = val if key == "heightOfStation": elev = val if key == var: value = val if var == "relativeHumidity": if value > 100: values = 100. elif var == "airTemperatureAt2M": value = value - 273.15 except CodesInternalError as err: if key == var: not_found = not_found + 1 #print('Error with key="%s" : %s' % (key, err.msg)) if lat > latrange[0] and lat < latrange[1] and lon > lonrange[ 0] and lon < lonrange[1]: if not np.isnan(value): observations.append(observation(lon, lat, value, elev)) cnt += 1 # delete handle codes_release(bufr) print("Found " + str(len(observations)) + "/" + str(cnt)) print("Not encoded for " + str(var) + ": " + str(not_found)) # close the file f.close() return observations