def _ignore_satellites(dset: "Dataset", orb_flag: list) -> None: """Remove GNSS observations with unavailable apriori satellite orbits from Dataset The remover can be used for precise and broadcast ephemeris and also for the GNSS and SISRE technique/analysis. - GNSS: The apriori orbit is chosen via the configuration. The available satellites of the apriori orbits are compared against the satellites given in the GNSS observation files. - SISRE: Both apriori orbits, broadcast and precise, has to be checked against a set of satellites defined in the configuration file. Args: dset: A Dataset containing model data, which is decimated by unavailable satellite observations. orb_flag: List with used orbit types (e.g. 'broadcast' and/or 'precise' orbit) """ for orb in orb_flag: orbit = apriori.get( "orbit", apriori_orbit=orb, rundate=dset.rundate, station=dset.vars["station"], day_offset=0, # check satellite availability only for current rundate and not for the days before/after # rundate in addition. TODO: This does not work for broadcast ephemeris at the moment. ) not_available_sat = set(dset.satellite) - set(orbit.dset_raw.satellite) file_paths = orbit.dset_raw.meta["parser"]["file_path"] if not_available_sat: log.warn( f"The following satellites are not given in apriori {orb} orbit file {', '.join(file_paths)}: " f"{', '.join(sorted(not_available_sat))}" ) cleaners.apply_remover("ignore_satellite", dset, satellites=not_available_sat)
def gnss_clean_orbit(dset: "Dataset", orbit_flag: str) -> None: """Remove GNSS observations with unavailable apriori satellite orbits or which does not fulfill requirements Args: dset: A Dataset containing model data. orbit_flag: Specification of which apriori orbit is used ("broadcast" or "precise") Returns: numpy.ndarray: Array containing False for observations to throw away. """ check_nav_validity_length = config.tech[ _SECTION].check_nav_validity_length.bool ignore_unhealthy_satellite = config.tech[ _SECTION].ignore_unhealthy_satellite.bool # GNSS observations are rejected from Dataset 'dset', if apriori satellite orbits are not given _ignore_satellites(dset, orbit_flag) # Remove unhealthy satellites if (orbit_flag == "broadcast") and ignore_unhealthy_satellite: cleaners.apply_remover("gnss_ignore_unhealthy_satellite", dset) # Remove GNSS observations which exceeds the validity length of broadcast ephemeris if (orbit_flag == "broadcast") and check_nav_validity_length: keep_idx = _ignore_epochs_exceeding_validity(dset) # Remove GNSS observations which exceeds the interpolation boundaries if orbit_flag == "precise": keep_idx = _ignore_epochs_exceeding_interpolation_boundaries(dset) return keep_idx
def _calculate(self, dset_out: "Dataset", dset_in: "Dataset", time: str = "time") -> None: """Calculate precise orbits and satellite clock correction for given observation epochs As a first step observations are removed from unavailable satellites and for exceeding the interpolation boundaries. The input Dataset contains observation epochs for which the broadcast ephemeris and satellite clock correction should be determined. The satellite position is determined for each observation epoch by interpolating within the given SP3 orbit time entries. The satellite velocities are calculated based on satellite positions 0.5 second before and after the observation epoch. Args: dset_out (Dataset): Dataset representing calculated precise orbits with following fields: ======================== =============== ======= ======================================================== Field Type Unit Description ======================== =============== ======= ======================================================== gnss_satellite_clock numpy.ndarray m Satellite clock correction gnss_relativistic_clock numpy.ndarray m Relativistic clock correction due to orbit eccentricity sat_posvel PosVelTable m Satellite position and velocity satellite numpy.ndarray Satellite numbers system numpy.ndarray GNSS identifiers time TimeTable Observation epochs ======================= =============== ======= ======================================================== dset_in: Input Dataset containing model data for which broadcast ephemeris should be determined. time: Define time fields to be used. It can be for example 'time' or 'sat_time'. 'time' is related to observation time and 'sat_time' to satellite transmission time. """ # Clean orbits by removing unavailable satellites, unhealthy satellites and checking interpolation boundaries cleaners.apply_remover("gnss_clean_orbit", dset_in, orbit_flag="precise") # TODO: Another solution has to be found for satellites not given in SP3 file, e.g. use of broadcast # ephemeris. log.info( f"Calculating satellite position/velocity (precise) based on SP3 precise orbit file " f"{', '.join(self.dset_edit.meta['parser']['file_path'])}") sat_pos = np.zeros((dset_in.num_obs, 3)) sat_vel = np.zeros((dset_in.num_obs, 3)) ref_time = dset_in[time][0] # Reference epoch used for interpolation # Loop over all given satellites for sat in set(dset_in.satellite): log.debug(f"Interpolation for satellite: {sat}") # Get array with information about, when observation are available for the given satellite (indicated by # True) idx = dset_in.filter(satellite=sat) orb_idx = self.dset_edit.filter(satellite=sat) if np.min(dset_in[time][idx].gps.mjd) < np.min( self.dset_edit.time[orb_idx].mjd): log.fatal( f"Interpolation range is exceeded by satellite {sat} ({np.max(dset_in[time][idx].gps.datetime)} " f"[epoch] < {np.max(self.dset_edit.time[orb_idx].gps.datetime)} [precise orbit])" ) if np.max(dset_in[time][idx].gps.mjd) > np.max( self.dset_edit.time[orb_idx].mjd): log.fatal( f"Interpolation range is exceeded by satellite {sat} ({np.max(dset_in[time][idx].gps.datetime)} " f"[epoch] > {np.max(self.dset_edit.time[orb_idx].gps.datetime)} [precise orbit])" ) # Interpolation for given observation epochs (transmission time) diff_time_points = ref_time.gps - self.dset_edit.time.gps[orb_idx] diff_time_obs = ref_time.gps - dset_in[time].gps[idx] sat_pos[idx], sat_vel[ idx] = interpolation.interpolate_with_derivative( # self.dset_edit.time[orb_idx].gps.sec_to_reference(ref_time), diff_time_points.seconds, self.dset_edit.sat_pos.trs[orb_idx], # dset_in[time][idx].gps.sec_to_reference(ref_time), diff_time_obs.seconds, kind="lagrange", window=10, dx=0.5, ) if np.isnan(np.sum(sat_pos[idx])) or np.isnan(np.sum( sat_vel[idx])): log.fatal( f"NaN occurred by determination of precise satellite position and velocity for satellite {sat}" ) # Copy fields from model data Dataset dset_out.num_obs = dset_in.num_obs dset_out.add_text("satellite", val=dset_in.satellite) dset_out.add_text("system", val=dset_in.system) dset_out.add_time("time", val=dset_in[time]) dset_out.vars["orbit"] = self.name # Add float fields dset_out.add_float("gnss_relativistic_clock", val=self.relativistic_clock_correction( sat_pos, sat_vel), unit="meter") dset_out.add_float("gnss_satellite_clock", val=self.satellite_clock_correction(dset_in, time=time), unit="meter") # Add satellite position and velocity to Dataset dset_out.add_posvel("sat_posvel", time=dset_out.time, system="trs", val=np.hstack((sat_pos, sat_vel)))
def _get_brdc_block_idx(self, dset: "Dataset", time: str = "time") -> List[int]: """Get GNSS broadcast ephemeris block indices for given observation epochs The indices relate the observation epoch to the correct set of broadcast ephemeris. First the time difference between the observation epoch and a selected time is calculated to determine the correct broadcast ephemeris block. The seleted time can be either the navigation epoch (time of clock (TOC)), the time of ephemeris (TOE) or the transmission time. Afterwards the broastcast block is selected with the smallest time difference. Following option can be choosen for configuration file option 'brdc_block_nearest_to': ============================== ================================================================================ Option Description ============================== ================================================================================ toc Broadcast block for given observation epoch is selected nearest to navigation epoch (time of clock (TOC)). toc:positive Same as 'toc' option, but the difference between observation epoch and TOC has to be positive. toe Broadcast block for given observation epoch is selected nearest to time of ephemeris (TOE). toe:positive Same as 'toe' option, but the difference between observation epoch and TOE has to be positive. transmission_time Broadcast block for given observation epoch is selected nearest to transmission time. transmission_time:positive Same as 'transmission_time' option, but the difference between observation epoch and transmission time has to be positive. ============================= ================================================================================= Args: dset: A Dataset containing model data. time: Define time fields to be used. It can be for example 'time' or 'sat_time'. 'time' is related to observation time and 'sat_time' to satellite transmission time. Returns: Broadcast ephemeris block indices for given observation epochs. """ brdc_block_nearest_to_options = [ "toc", "toc:positive", "toe", "toe:positive", "transmission_time", "transmission_time:positive", ] brdc_idx = list() # Get configuration option brdc_block_nearest_to = config.tech.get( "brdc_block_nearest_to", default="toe:positive").str.rsplit(":", 1) if ":".join( brdc_block_nearest_to) not in brdc_block_nearest_to_options: log.fatal( f"Unknown value {':'.join(brdc_block_nearest_to)!r} for configuration option 'brdc_block_nearest_to'. " f"The following values can be selected: {', '.join(brdc_block_nearest_to_options)}" ) time_key = brdc_block_nearest_to[0] positive = True if "positive" in brdc_block_nearest_to else False log.debug( f"Broadcast block is selected nearest to '{'+' if positive else '+/-'}{time_key}' time." ) # Check if broadcast orbits are available not_available_sat = sorted( set(dset.satellite) - set(self.dset_edit.satellite)) if not_available_sat: log.warn( f"The following satellites are not given in apriori broadcast orbit file " f"{', '.join(self.dset_edit.meta['parser']['file_path'])}: {', '.join(not_available_sat)}" ) cleaners.apply_remover("ignore_satellite", dset, satellites=not_available_sat) # Determine broadcast ephemeris block index for a given satellite and observation epoch for sat, time in zip(dset.satellite, dset[time]): idx = self.dset_edit.filter(satellite=sat) diff = time.gps.mjd - self.dset_edit[time_key].gps.mjd[idx] if positive: nearest_idx = np.array([99999 if v < 0 else v for v in diff]).argmin() else: nearest_idx = np.array([abs(diff)]).argmin() brdc_idx.append(idx.nonzero()[0][nearest_idx]) return brdc_idx
def _calculate(self, dset_out: "Dataset", dset_in: "Dataset", time: str = 'time') -> None: """Calculate broadcast ephemeris and satellite clock correction for given observation epochs As a first step observations are removed from unavailable satellites, unhealthy satellites and for exceeding the validity length of navigation records. The input Dataset contains observation epochs for which the broadcast ephemeris and satellite clock correction should be determined. Args: dset_out: Output Dataset representing calculated broadcast ephemeris with following fields: ======================== =============== ======= ======================================================== Field Type Unit Description ======================== =============== ======= ======================================================== gnss_satellite_clock numpy.ndarray m Satellite clock correction gnss_relativistic_clock numpy.ndarray m Relativistic clock correction due to orbit eccentricity sat_posvel PosVelTable m Satellite position and velocity satellite numpy.ndarray Satellite numbers system numpy.ndarray GNSS identifiers time TimeTable Observation epochs used_iode numpy.ndarray IODE of selected broadcast ephemeris block used_transmission_time TimeTable Transmission time of selected broadcast ephemeris block used_toe TimeTable Time of ephemeris (TOE) of selected broadcast ephemeris block ======================= =============== ======= ======================================================== dset_in: Input Dataset containing model data for which broadcast ephemeris should be determined. time: Define time fields to be used. It can be for example 'time' or 'sat_time'. 'time' is related to observation time and 'sat_time' to satellite transmission time. """ # Clean orbits by removing unavailable satellites, unhealthy satellites and checking validity length of # navigation records cleaners.apply_remover("gnss_clean_orbit", dset_in) not_implemented_sys = set(dset_in.system) - set("EG") if not_implemented_sys: log.warn( f"At the moment Where can provide broadcast ephemeris for GNSS 'E' and 'G', " f"but not for {', '.join(not_implemented_sys)}.") cleaners.apply_remover("gnss_ignore_system", dset_in, systems=not_implemented_sys) log.info( f"Calculating satellite position/velocity (broadcast) based on RINEX navigation file " f"{', '.join(self.dset_edit.meta['parser']['file_path'])}") # Get correct navigation block for given observations times by determining the indices to broadcast ephemeris # Dataset dset_brdc_idx = self._get_brdc_block_idx(dset_in, time=time) # Loop over all observations # TODO: Generation of vectorized solution, if possible? # BUG: Use of GPSSEC does not work for GPS WEEK crossovers. MJD * Unit.day2second() would a better solution. # The problem is that use of GPSSEC compared to MJD * Unit.day2second() is not consistent!!!! sat_pos = np.zeros((dset_in.num_obs, 3)) sat_vel = np.zeros((dset_in.num_obs, 3)) for obs_idx, (time_gpsweek, time_gpssec, brdc_idx, sys) in enumerate( zip(dset_in[time].gps.gpsweek, dset_in[time].gps.gpssec, dset_brdc_idx, dset_in.system)): # TODO: get_row() function needed for brdc -> brdc.get_row(kk) sat_pos[obs_idx], sat_vel[ obs_idx] = self._get_satellite_position_velocity( time_gpsweek, time_gpssec, brdc_idx, sys) # +DEBUG # print("DEBUG: {} obs_idx: {:>5d} brdc_idx: {:>5d} toc: {:>5.0f} {:>6.0f} toe: {:>6.0f} trans_time: {:>6.0f}" # " tk: {:>16.10f} iode: {:>3d} sqrt_a: {:>17.10f} sat_pos: {:>21.10f} {:>21.10f} {:>21.10f} " # "sat_vel: {:>17.10f} {:>17.10f} {:>17.10f} sat_clk_bias: {:>17.10f}, sat_clk_drft: {:>17.10f} " # ''.format(self.dset_edit.satellite[brdc_idx], obs_idx, brdc_idx, # dset_in[time].gps.jd_frac[obs_idx] * 86400, # dset_in[time].gps.gpssec[obs_idx], # self.dset_edit.toe.gps.gpssec[brdc_idx], # self.dset_edit.transmission_time.gps.gpssec[brdc_idx], # dset_in[time].gps.jd_frac[obs_idx]-self.dset_edit.toe.gps.gpssec[brdc_idx], # int(self.dset_edit.iode[brdc_idx]), # self.dset_edit.sqrt_a[brdc_idx], # sat_pos[obs_idx][0], sat_pos[obs_idx][1], sat_pos[obs_idx][2], # sat_vel[obs_idx][0], sat_vel[obs_idx][1], sat_vel[obs_idx][2], # self.dset_edit.sat_clock_bias[brdc_idx], # self.dset_edit.sat_clock_drift[brdc_idx],)) # -DEBUG # Copy fields from model data Dataset dset_out.num_obs = dset_in.num_obs dset_out.add_text("satellite", val=dset_in.satellite) dset_out.add_text("system", val=dset_in.system) dset_out.add_time("time", val=dset_in.time, scale=dset_in.time.scale) dset_out.vars["orbit"] = self.name # Add time field dset_out.add_time( "used_transmission_time", val=self.dset_edit.transmission_time[dset_brdc_idx], scale=self.dset_edit.transmission_time.scale, ) dset_out.add_time("used_toe", val=self.dset_edit.toe[dset_brdc_idx], scale=self.dset_edit.toe.scale) # Add float fields for field in [ "bgd_e1_e5a", "bgd_e1_e5b", "tgd", "tgd_b1_b3", "tgd_b2_b3" ]: if field in self.dset_edit.fields: dset_out.add_float(field, val=self.dset_edit[field][dset_brdc_idx]) dset_out.add_float("gnss_relativistic_clock", val=self.relativistic_clock_correction( sat_pos, sat_vel), unit="meter") dset_out.add_float("gnss_satellite_clock", val=self.satellite_clock_correction(dset_in, time=time), unit="meter") dset_out.add_float("used_iode", val=self.dset_edit.iode[dset_brdc_idx]) # Add satellite position and velocity to Dataset dset_out.add_posvel("sat_posvel", time="time", itrs=np.hstack((sat_pos, sat_vel)))
def read(stage, dset): """Read the GNSS RINEX data. Args: stage (str): Name of current stage. dset (Dataset): A dataset containing the data. """ dset.vars.update(file_vars()) station = dset.vars["station"] sampling_rate = config.tech.sampling_rate.float # Read GNSS observation data either from Android raw file or RINEX file # TODO: Maybe a gnss.py 'obs' modul should be added to ./where/obs? if config.tech.format.str == "android": parser = parsers.parse_key("gnss_android_raw_data", rundate=dset.analysis["rundate"], station=station) else: version, file_path = gnss.get_rinex_file_version("gnss_rinex_obs") log.info(f"Read RINEX file {file_path} with format version {version}.") if version.startswith("2"): parser = parsers.parse_key("rinex2_obs", file_path=file_path, sampling_rate=sampling_rate) elif version.startswith("3"): parser = parsers.parse_file("rinex3_obs", file_path=file_path, sampling_rate=sampling_rate) else: log.fatal( f"Unknown RINEX format {version} is used in file {file_path}") dset.update_from(parser.as_dataset()) # Select GNSS observation to process cleaners.apply_remover("gnss_select_obs", dset) # Overwrite station coordinates given in RINEX header # TODO: Should be a apriori function with, where a station coordinate can be select for a given station. # "check_coordinate"/"limit" -> station coordinate given in RINEX header and "database" could be checked # -> warning could be given p = parsers.parse_key(parser_name="gnss_bernese_crd", file_key="gnss_station_crd") sta_crd = p.as_dict() if station in sta_crd: pos = np.array([ sta_crd[station]["pos_x"], sta_crd[station]["pos_y"], sta_crd[station]["pos_z"] ]) # Check station coordinates against RINEX header station coordinates limit = 10 diff = pos - dset.site_pos.trs[0].val if not (diff < limit).all(): log.warn( f"Difference between station database (xyz: {pos[0]:.3f} m, {pos[1]:.3f} m, {pos[2]:.3f} m) " f"and RINEX header (xyz: {dset.site_pos.trs.x[0]:.3f} m, {dset.site_pos.trs.y[0]:.3f} m, " f"{dset.site_pos.trs.z[0]:.3f} m) station coordinates exceeds the limit of {limit} m " f"(xyz: {diff[0]:.3f} m, {diff[1]:.3f} m, {diff[2]:.3f} m).") # pos = apriori.get("gnss_station_coord", rundate=dset.analysis["rundate"], station=station) dset.site_pos[:] = np.repeat(pos[None, :], dset.num_obs, axis=0) dset.meta["pos_x"] = sta_crd[station]["pos_x"] dset.meta["pos_y"] = sta_crd[station]["pos_y"] dset.meta["pos_z"] = sta_crd[station]["pos_z"] # Write dataset to file dset.write_as(stage=stage)