def test_grib_dump(tmp_path): gid = eccodes.codes_grib_new_from_samples("GRIB2") p = tmp_path / "dump.txt" with open(p, "w") as fout: eccodes.codes_dump(gid, fout) eccodes.codes_dump(gid, fout, "debug") eccodes.codes_release(gid)
def get_dataset(self, dataset_id, dataset_info): """Get dataset using the parameter_number key in dataset_info. In a previous version of the reader, the attributes (nrows, ncols, ssp_lon) and projection information (pdict and area_dict) were computed while initializing the file handler. Also the code would break out from the While-loop below as soon as the correct parameter_number was found. This has now been revised becasue the reader would sometimes give corrupt information about the number of messages in the file and the dataset dimensions within a given message if the file was only partly read (not looping over all messages) in an earlier instance. """ logger.debug( 'Reading in file to get dataset with parameter number %d.', dataset_info['parameter_number']) xarr = None message_found = False with open(self.filename, 'rb') as fh: # Iterate over all messages and fetch data when the correct parameter number is found while True: gid = ec.codes_grib_new_from_file(fh) if gid is None: if not message_found: # Could not obtain a valid message ID from the grib file logger.warning( "Could not find parameter_number %d in GRIB file, no valid Dataset created", dataset_info['parameter_number']) break # Check if the parameter number in the GRIB message corresponds to the required key parameter_number = self._get_from_msg(gid, 'parameterNumber') if parameter_number == dataset_info['parameter_number']: self._res = dataset_id.resolution self._read_attributes(gid) # Read the missing value missing_value = self._get_from_msg(gid, 'missingValue') # Retrieve values and metadata from the GRIB message, masking the values equal to missing_value xarr = self._get_xarray_from_msg(gid) xarr.data = da.where(xarr.data == missing_value, np.nan, xarr.data) ec.codes_release(gid) # Combine all metadata into the dataset attributes and break out of the loop xarr.attrs.update(dataset_info) xarr.attrs.update(self._get_attributes()) message_found = True else: # The parameter number is not the correct one, release gid and skip to next message ec.codes_release(gid) return xarr
def test_grib_ecc_1042(): # Issue ECC-1042: Python3 interface writes integer arrays incorrectly gid = eccodes.codes_grib_new_from_samples("regular_ll_sfc_grib2") # Trying write with inferred dtype write_vals = np.array([1, 2, 3]) eccodes.codes_set_values(gid, write_vals) read_vals = eccodes.codes_get_values(gid) length = len(read_vals) assert read_vals[0] == 1 assert read_vals[length - 1] == 3 # Trying write with explicit dtype write_vals = np.array( [ 1, 2, 3, ], dtype=float, ) eccodes.codes_set_values(gid, write_vals) read_vals = eccodes.codes_get_values(gid) assert read_vals[0] == 1 assert read_vals[length - 1] == 3 eccodes.codes_release(gid)
def test_embedded_level_name(file_path, modelvar_file_path): """ embedded level type: * pl * surface * ml """ test_cases = [ TestCase(query=QueryOption(parameter="t", level_type="pl", level=850)), TestCase( query=QueryOption(parameter="t", level_type="sfc", level=None)), ] for test_case in test_cases: message = load_message_from_file(file_path, **asdict(test_case.query)) assert message is not None eccodes.codes_release(message) test_cases = [ TestCase(query=QueryOption(parameter="u", level_type="ml", level=10)) ] for test_case in test_cases: message = load_message_from_file(modelvar_file_path, **asdict(test_case.query)) assert message is not None eccodes.codes_release(message)
def test_grib_keys_iterator_skip(): gid = eccodes.codes_grib_new_from_samples("reduced_gg_pl_1280_grib1") iterid = eccodes.codes_keys_iterator_new(gid, "ls") count = 0 eccodes.codes_skip_computed(iterid) # codes_skip_coded(iterid) eccodes.codes_skip_edition_specific(iterid) eccodes.codes_skip_duplicates(iterid) eccodes.codes_skip_read_only(iterid) eccodes.codes_skip_function(iterid) while eccodes.codes_keys_iterator_next(iterid): keyname = eccodes.codes_keys_iterator_get_name(iterid) keyval = eccodes.codes_get_string(gid, keyname) assert len(keyval) > 0 count += 1 # centre, level and dataType assert count == 3 eccodes.codes_keys_iterator_delete(iterid) iterid = eccodes.codes_keys_iterator_new(gid) count = 0 eccodes.codes_skip_coded(iterid) while eccodes.codes_keys_iterator_next(iterid): count += 1 assert count == 141 eccodes.codes_keys_iterator_delete(iterid) eccodes.codes_release(gid)
def scan_grib(self, **kwargs): gribs = [] v_selected = kwargs['shortName'] v_pert = kwargs.get('perturbationNumber', -1) if not utils.is_container(v_selected): v_selected = [v_selected] if self._grbindx: for v in v_selected: codes_index_select(self._grbindx, 'shortName', str(v)) if v_pert != -1: codes_index_select(self._grbindx, 'perturbationNumber', int(v_pert)) while 1: gid = codes_new_from_index(self._grbindx) if gid is None: break if GRIBReader._find(gid, **kwargs): gribs.append(gid) else: # release unused grib codes_release(gid) elif self._file_handler: while 1: gid = codes_new_from_file(self._file_handler, product_kind=CODES_PRODUCT_GRIB) if gid is None: break if GRIBReader._find(gid, **kwargs): gribs.append(gid) else: # release unused grib codes_release(gid) return gribs
def decode_metar_file(filename, field_lists): '''Decoded metar reports are made to look like decoded BUFR reports''' result = [] message_count = 0 _, timestamp, _, _ = tools.parse_filename(filename) with open(filename) as metar_file: # loop through the messages in the file while True: msgid = eccodes.codes_metar_new_from_file(metar_file) if msgid is None: return result # decoded all of the messages in the file try: message_data, message_report_count = process_metar_message( msgid, field_lists, timestamp) if message_report_count: message_data = add_station_details(message_data) result.append((message_data, message_report_count)) if message_report_count == 1: LOGGER.info('found 1 metar for %s in message[%d]', message_data['CCCC'][0], message_count) elif message_report_count == 0: LOGGER.info('found no metar in message[%d]', message_count) else: LOGGER.info('found %s metars in message[%d]', message_report_count, message_count) message_count += 1 finally: # release the handle for this message eccodes.codes_release(msgid) return result
def copy_needed_field(gid, fout): """Copy the needed field""" nx = ecc.codes_get(gid, 'Ni') ny = ecc.codes_get(gid, 'Nj') first_lat = ecc.codes_get(gid, 'latitudeOfFirstGridPointInDegrees') north_south_step = ecc.codes_get(gid, 'jDirectionIncrementInDegrees') filter_north = 0 new_ny = int((first_lat - filter_north) / north_south_step) + 1 values = ecc.codes_get_values(gid) values_r = np.reshape(values, (ny, nx)) new_values = values_r[:new_ny, :] clone_id = ecc.codes_clone(gid) ecc.codes_set(clone_id, 'latitudeOfLastGridPointInDegrees', (filter_north)) ecc.codes_set(clone_id, 'Nj', new_ny) ecc.codes_set_values(clone_id, new_values.flatten()) ecc.codes_write(clone_id, fout) ecc.codes_release(clone_id)
def test_grib_write(tmpdir): gid = eccodes.codes_grib_new_from_samples("GRIB2") eccodes.codes_set(gid, "backgroundProcess", 44) output = tmpdir.join("test_grib_write.grib") with open(str(output), "wb") as fout: eccodes.codes_write(gid, fout) eccodes.codes_release(gid)
def get_array(self, key): """Get all data from file for the given BUFR key.""" with open(self.filename, "rb") as fh: msgCount = 0 while True: bufr = ec.codes_bufr_new_from_file(fh) if bufr is None: break ec.codes_set(bufr, 'unpack', 1) values = ec.codes_get_array(bufr, key, float) if len(values) == 1: values = np.repeat(values, 120) # if is the first message initialise our final array if (msgCount == 0): arr = da.from_array([values], chunks=CHUNK_SIZE) else: tmpArr = da.from_array([values], chunks=CHUNK_SIZE) arr = da.concatenate((arr, tmpArr), axis=0) msgCount = msgCount + 1 ec.codes_release(bufr) if arr.size == 1: arr = arr[0] return arr
def get_grib_info(self, select_args): _gribs_for_utils = self._get_gids(**select_args) if len(_gribs_for_utils) > 0: # instant, avg, cumul. get last stepType available because first one is sometimes misleading type_of_step = codes_get(_gribs_for_utils[-1], 'stepType') self._mv = codes_get_double(_gribs_for_utils[0], 'missingValue') start_grib, end_grib, self._step_grib, self._step_grib2, self._change_step_at = self._find_start_end_steps( _gribs_for_utils) self._log("Grib input step %d [type of step: %s]" % (self._step_grib, type_of_step)) self._log('Gribs from %d to %d' % (start_grib, end_grib)) for g in _gribs_for_utils: codes_release(g) _gribs_for_utils = None del _gribs_for_utils info = GRIBInfo(input_step=self._step_grib, input_step2=self._step_grib2, change_step_at=self._change_step_at, type_of_param=type_of_step, start=start_grib, end=end_grib, mv=self._mv) return info # no messages found else: raise ApplicationException.get_exc(NO_MESSAGES, details="using " + str(select_args))
def test_bufr_encode(tmpdir): ibufr = eccodes.codes_bufr_new_from_samples("BUFR3_local_satellite") eccodes.codes_set_array(ibufr, "inputDelayedDescriptorReplicationFactor", (4, )) eccodes.codes_set(ibufr, "masterTableNumber", 0) eccodes.codes_set(ibufr, "bufrHeaderSubCentre", 0) eccodes.codes_set(ibufr, "bufrHeaderCentre", 98) eccodes.codes_set(ibufr, "updateSequenceNumber", 0) eccodes.codes_set(ibufr, "dataCategory", 12) eccodes.codes_set(ibufr, "dataSubCategory", 139) eccodes.codes_set(ibufr, "masterTablesVersionNumber", 13) eccodes.codes_set(ibufr, "localTablesVersionNumber", 1) eccodes.codes_set(ibufr, "numberOfSubsets", 492) eccodes.codes_set(ibufr, "localNumberOfObservations", 492) eccodes.codes_set(ibufr, "satelliteID", 4) eccodes.codes_set(ibufr, "observedData", 1) eccodes.codes_set(ibufr, "compressedData", 1) eccodes.codes_set(ibufr, "unexpandedDescriptors", 312061) eccodes.codes_set(ibufr, "pixelSizeOnHorizontal1", 1.25e04) eccodes.codes_set(ibufr, "orbitNumber", 31330) eccodes.codes_set(ibufr, "#1#beamIdentifier", 1) eccodes.codes_set(ibufr, "#4#likelihoodComputedForSolution", eccodes.CODES_MISSING_DOUBLE) eccodes.codes_set(ibufr, "pack", 1) output = tmpdir.join("test_bufr_encode.bufr") with open(str(output), "wb") as fout: eccodes.codes_write(ibufr, fout) eccodes.codes_release(ibufr)
def test_type_of_level(file_path): test_cases = [ TestCase(query=QueryOption( parameter="t", level_type="isobaricInhPa", level=850)), TestCase(query=QueryOption( parameter="t", level_type="isobaricInPa", level=50)), TestCase(query=QueryOption( parameter="asnow", level_type="surface", level=None)), TestCase(query=QueryOption( parameter="tmax", level_type="heightAboveGround", level=2)), TestCase(query=QueryOption( parameter="lcc", level_type="nominalTop", level=None)), TestCase(query=QueryOption( parameter="tciwv", level_type="atmosphere", level=None)), TestCase(query=QueryOption( parameter="prmsl", level_type="meanSea", level=None)), TestCase(query=QueryOption(parameter="t", level_type="depthBelowLandLayer", level={ "first_level": 0, "second_level": 0.1 })) ] for test_case in test_cases: message = load_message_from_file(file_path, **asdict(test_case.query)) assert message is not None eccodes.codes_release(message)
def get_point_index(path, point, model): # custom version of the function in container_information_point # get clat and clon 1D arrays filename_clat = 'icon-eu-eps_europe_icosahedral_time-invariant_2018121000_clat.grib2' filename_clon = 'icon-eu-eps_europe_icosahedral_time-invariant_2018121000_clon.grib2' with open(path['base'] + path['grid'] + filename_clat, 'rb') as file: grib_id = eccodes.codes_grib_new_from_file(file) clat = eccodes.codes_get_array(grib_id, 'values') eccodes.codes_release(grib_id) with open(path['base'] + path['grid'] + filename_clon, 'rb') as file: grib_id = eccodes.codes_grib_new_from_file(file) clon = eccodes.codes_get_array(grib_id, 'values') eccodes.codes_release(grib_id) # read out index of native point filter_distance = get_latlon_filter_distance(model) lat_near = list(np.where(abs(clat - point['lat']) < filter_distance)[0]) lon_near = list(np.where(abs(clon - point['lon']) < filter_distance)[0]) id_near = list(set(lat_near).intersection(lon_near)) id_near.sort() distances = np.sqrt( np.square(abs(clat[id_near] - point['lat']) * 111.2) \ + np.square(abs(clon[id_near] - point['lon']) * 111.2 \ * np.cos(point['lat']*np.pi/180)) ) index_nearest = id_near[np.argmin(distances)] #print(id_near) #print(distances) #print(index_nearest) #print(clat[index_nearest], clon[index_nearest]) return index_nearest
def get_message( data_path, parameter, level, level_type="isobaricInhPa", ): if parameter == "wind": u_message, v_message = get_wind_message( data_path=data_path, level=level, level_type=level_type, ) message = eccodes.codes_clone(u_message) u_array = eccodes.codes_get_double_array(u_message, "values") v_array = eccodes.codes_get_double_array(v_message, "values") w_array = np.sqrt(u_array * u_array + v_array * v_array) eccodes.codes_release(u_message) eccodes.codes_release(v_message) eccodes.codes_set_double_array(message, "values", w_array) else: message = load_message_from_file(file_path=data_path, parameter=parameter, level_type=level_type, level=level) return message
def cli(file_path): with open(file_path, 'rb') as f: handle = eccodes.codes_grib_new_from_file(f, headers_only=False) if handle is None: print("ERROR: unable to create handle from file " + file_path) sys.exit(-1) eccodes.codes_release(handle)
def test_bufr_set_float(): ibufr = eccodes.codes_bufr_new_from_samples("BUFR4") eccodes.codes_set(ibufr, "unpack", 1) eccodes.codes_set(ibufr, "totalPrecipitationPast24Hours", np.float32(1.26e04)) eccodes.codes_set(ibufr, "totalPrecipitationPast24Hours", np.float16(1.27e04)) eccodes.codes_release(ibufr)
def test_short_name(file_path): test_cases = [ TestCase(query=QueryOption(parameter="t", level_type="pl", level=850)) ] for test_case in test_cases: message = load_message_from_file(file_path, **asdict(test_case.query)) assert message is not None eccodes.codes_release(message)
def test_grib_clone(): gid = eccodes.codes_grib_new_from_samples("GRIB2") clone = eccodes.codes_clone(gid) assert gid assert clone assert eccodes.codes_get(clone, "identifier") == "GRIB" assert eccodes.codes_get(clone, "totalLength") == 179 eccodes.codes_release(gid) eccodes.codes_release(clone)
def test_grib_get_array(): gid = eccodes.codes_grib_new_from_samples("reduced_gg_pl_160_grib1") pl = eccodes.codes_get_array(gid, "pl") assert pl[0] == 18 pli = eccodes.codes_get_array(gid, "pl", int) assert np.array_equal(pl, pli) pls = eccodes.codes_get_array(gid, "centre", str) assert pls == ["ecmf"] eccodes.codes_release(gid)
def close(self): # #[ """ close the file object """ if self.bufr_id != -1: eccodes.codes_release(self.bufr_id) self.fd.close()
def keep_only_first_grib(fname: str): with open(fname, "r+b") as infd: gid = eccodes.codes_grib_new_from_file(infd) try: if eccodes.codes_get_message_offset(gid) != 0: raise RuntimeError( f"{fname}: first grib does not start at offset 0") infd.truncate(eccodes.codes_get_message_size(gid)) finally: eccodes.codes_release(gid)
def scan_for_gid(filename, short_name): filee = open(filename, "rb") for j in range(ec.codes_count_in_file(filee)): gid = ec.codes_grib_new_from_file(filee, headers_only=True) if ec.codes_get(gid, "shortName") == short_name: filee.close() return gid else: ec.codes_release(gid) filee.close() exit(1)
def close(self): self._log(f'Closing gribs messages from {self._grib_file}') for g in self._selected_grbs: codes_release(g) self._selected_grbs = None if self._grbindx: codes_index_release(self._grbindx) self._grbindx = None if self._file_handler: self._file_handler.close() self._file_handler = None
def scan_for_gid(filename, short_name, time_since_init, level): filee = open(filename, "rb") for j in np.arange(0, ec.codes_count_in_file(filee)): gid = ec.codes_grib_new_from_file(filee, headers_only = True) if ec.codes_get(gid, "shortName") == short_name and ec.codes_get(gid, "forecastTime") == time_since_init and ec.codes_get(gid, "level") == level: filee.close() return gid else: ec.codes_release(gid) filee.close() exit(1)
def test_grib_key(modelvar_file_path): test_cases = [ TestCase(query=QueryOption(parameter="u", level_type={"typeOfFirstFixedSurface": 131}, level=10)) ] for test_case in test_cases: message = load_message_from_file(modelvar_file_path, **asdict(test_case.query)) assert message is not None eccodes.codes_release(message)
def test_grib_nearest_multiple(): gid = eccodes.codes_new_from_samples("reduced_gg_ml_grib2", eccodes.CODES_PRODUCT_GRIB) inlats = (30, 13) inlons = (-20, 234) is_lsm = False nearest = eccodes.codes_grib_find_nearest_multiple(gid, is_lsm, inlats, inlons) eccodes.codes_release(gid) assert nearest[0].index == 1770 assert nearest[1].index == 2500
def test_grib_uuid_get_set(): # ECC-1167 gid = eccodes.codes_grib_new_from_samples("GRIB2") eccodes.codes_set(gid, "gridType", "unstructured_grid") uuid = eccodes.codes_get_string(gid, "uuidOfHGrid") assert uuid == "00000000000000000000000000000000" eccodes.codes_set_string(gid, "uuidOfHGrid", "DEfdBEef10203040b00b1e50001100FF") uuid = eccodes.codes_get_string(gid, "uuidOfHGrid") assert uuid == "defdbeef10203040b00b1e50001100ff" eccodes.codes_release(gid)
def test_bufr_read_write(tmpdir): bid = eccodes.codes_new_from_samples("BUFR4", eccodes.CODES_PRODUCT_BUFR) eccodes.codes_set(bid, "unpack", 1) assert eccodes.codes_get(bid, "typicalYear") == 2012 assert eccodes.codes_get(bid, "centre", str) == "ecmf" eccodes.codes_set(bid, "totalSunshine", 13) eccodes.codes_set(bid, "pack", 1) output = tmpdir.join("test_bufr_write.bufr") with open(str(output), "wb") as fout: eccodes.codes_write(bid, fout) assert eccodes.codes_get(bid, "totalSunshine") == 13 eccodes.codes_release(bid)
def confirm_packing_type(gribfile, packing_type): """Confirm that gribfile contains only GRIBs with specified packingType.""" comparisons = [] with open(gribfile) as infile: while True: gid = codes_grib_new_from_file(infile) if gid is None: break encoded_type = codes_get(gid, "packingType") codes_release(gid) comparisons.append(encoded_type == packing_type) return comparisons
def _load_message_from_file_by_count(file_path, count): current_index = 0 with open(file_path, "rb") as f: while True: message_id = eccodes.codes_grib_new_from_file(f) if message_id is None: return None current_index += 1 if current_index == count: return message_id else: eccodes.codes_release(message_id)
def read_data(grid): with open(grid['file_name'], "rb") as fp: fp.seek(grid['offset'], 0) buff = fp.read(grid['length']) fp.close() gid = ecc.codes_new_from_message(buff) values = np.array(ecc.codes_get_values(gid)) ecc.codes_release(gid) return values
def repack(input_file, outfile, packing_type): """Repack infile with packing_type, write result to outfile.""" with open(input_file) as infile: i = 1 while True: in_gid = codes_grib_new_from_file(infile) if in_gid is None: break info("Repacking GRIB #{}".format(i)) payload = codes_get_values(in_gid) clone_id = codes_clone(in_gid) codes_set(clone_id, "packingType", packing_type) codes_set_values(clone_id, payload) if i == 1: mode = "w" else: mode = "a" with open(outfile, mode) as output: codes_write(clone_id, output) codes_release(clone_id) codes_release(in_gid) i += 1 if not confirm_packing_type(outfile, packing_type): raise EncodingError("Reencoding silently failed.")
def release_ecc_gids(gids): for gid in gids: ecc.codes_release(gid)