Beispiel #1
0
def ignore_epochs(dset):
    """Edits data based on data quality

    Args:
        dset:     A Dataset containing model data.

    Returns:
        Array containing False for observations to throw away
    """
    intervals = config.tech[_SECTION].intervals.as_list(split_re=", *")

    keep_idx = np.ones(dset.num_obs, dtype=bool)
    for interval in intervals:
        interval = interval.split()
        start_time = Time(" ".join(interval[-4:-2]), scale="utc", format="iso")
        end_time = Time(" ".join(interval[-2:]), scale="utc", format="iso")
        # station name may contain spaces
        station = " ".join(interval[:-4])

        remove_idx = np.logical_and(start_time < dset.time,
                                    dset.time < end_time)
        if len(interval) == 5:
            remove_idx = dset.filter(station=station, idx=remove_idx)
        keep_idx = np.logical_and(keep_idx, np.logical_not(remove_idx))

    return keep_idx
Beispiel #2
0
    def _calculate_pos_itrs(self, site):
        """Calculate positions for the given time epochs

        The positions are calculated as simple linear offsets based on the reference epoch.

        Args:
            site (String):    Key saying which site to calculate position for.

        Returns:
            Array:  Positions, one 3-vector for each time epoch.
        """
        station_info = self.data[site]
        ref_epoch = Time(station_info["ref_epoch"], scale="utc")

        pos = np.zeros((self.time.size, 3))
        for pv in station_info["pos_vel"].values():
            idx = np.logical_and(self.time.utc.datetime >= pv["start"],
                                 self.time.utc.datetime < pv["end"])
            if idx.size == 1:
                idx = np.array([idx])
            if not any(idx):
                continue
            ref_pos = np.array([pv["STAX"], pv["STAY"], pv["STAZ"]])
            ref_vel = np.array([pv["VELX"], pv["VELY"], pv["VELZ"]])
            interval_years = (self.time - ref_epoch).jd * Unit.day2julian_years
            if isinstance(interval_years, float):
                interval_years = np.array([interval_years])
            pos[idx, :] = ref_pos + interval_years[idx,
                                                   None] * ref_vel[None, :]

        if self.time.size == 1:
            pos = pos[0, :]
        return pos
Beispiel #3
0
    def get_events(self, *event_types):
        """Retrieve events from the datasets

        One or several `event_types` can be specified, and only events of that type will be retrieved. If no event type
        is specified, all events are returned.

        TODO: Possible speed-up by creating one Time array instead of one object per event?
        TODO: Using set to remove duplicate events, how can these be handled so that duplicates are not written?

        Args:
            event_types (List):  Strings with event types like 'clock_break', 'cycle_slip', etc.

        Returns:
            List:  3-tuples describing each event, timestamp (Time), event_type (String), description (String).
        """
        all_events = self.meta.get("__events__", dict())
        if not event_types:
            event_types = all_events.keys()

        return sorted(
            set(
                [
                    (Time(e[0], format="isot", scale="utc"), k, e[1])
                    for k, v in all_events.items()
                    for e in v
                    if k in event_types
                ]
            )
        )
Beispiel #4
0
def parse_clock_breaks(dset, clock_breaks):
    """Parses the clock breaks string from the edit file

    Examples:
        > parse_clock_breaks(dset, '')
        (OrderedDict(), 0)
        > parse_clock_breaks(dset, 'SVETLOE 2015/01/23 05:36:00,
                                    SVETLOE 2015/01/23 16:53:00,
                                    SVETLOE 2015/01/23 12:30:00')
        (OrderedDict([(5, [106560.0, 131400.0, 147180.0])]), 3)

    Args:
        dset:                A Dataset containing model data.
        clock_breaks_str:    A string with clock break information

    Returns:
        OrderedDict with clock breaks and total number of clock breaks
     """
    # Parse clock breaks from file and store in the station_breaks dictionary
    station_breaks = {
        s:
        [min(dset.time.utc),
         max(dset.time.utc) + TimeDelta(1, format="sec")]
        for s in dset.unique("station")
    }
    if clock_breaks:
        log.info("Applying clock breaks: {}", ", ".join(clock_breaks))

    for cb in clock_breaks:
        # Station names may contain spaces
        cb = cb.split()
        cb_date = cb[-2:]
        cb_station = " ".join(cb[:-2])
        # cb_station, *cb_date = cb.split()
        cb_time = Time(" ".join(cb_date), scale="utc", format="iso")
        if cb_station not in station_breaks:
            log.warn(
                "Station '{}' with clock break unknown. Available options are {}",
                cb_station,
                ", ".join(station_breaks.keys()),
            )
            continue
        station_breaks[cb_station].append(cb_time)
        dset.add_event(cb_time, "clock_break", cb_station)

    # Convert the station_breaks dict to lists of (station, (time_start, time_end))-tuples
    stations = list()
    time_intervals = list()
    for station in sorted(station_breaks.keys(),
                          key=lambda s: (len(station_breaks[s]), s),
                          reverse=True):
        station_times = sorted(station_breaks[station])
        for t_start, t_end in zip(station_times[:-1], station_times[1:]):
            stations.append(station)
            time_intervals.append((t_start, t_end))

    return stations, time_intervals
Beispiel #5
0
    def calculate_leap_second_offset(self):
        """Calculate leap second offsets for each day

        Use the difference between UTC and TAI as a proxy for the leap second offset. The leap second offset is
        calculated and stored to the EOP data-dictionary. This is used to correct for the leap second jumps when
        interpolating the UT1 - UTC values.
        """
        days = Time(np.array(list(self.data.keys())), format="mjd", scale="utc")
        leap_offset = np.round((days.utc.mjd - days.tai.mjd) * unit.day2seconds)
        daily_offset = {int(d): lo for d, lo in zip(days.mjd, leap_offset)}

        for d, lo in daily_offset.items():
            self.data[d]["leap_offset"] = lo
Beispiel #6
0
def date_vars(date):
    """Construct a dict of date variables

    From a given date, construct a dict containing all relevant date variables. This dict can be used to for instance
    replace variables in file names.

    Examples:
        >>> from datetime import date
        >>> date_vars = date_vars(date(2009, 11, 2))
        >>> sorted(date_vars.items())    # doctest: +NORMALIZE_WHITESPACE
        [('MMM', 'NOV'), ('ce', '20'), ('d', '2'), ('dd', '02'), ('dow', '1'), ('doy', '306'), ('gpsweek', '1556'),
         ('m', '11'), ('mm', '11'), ('mmm', 'nov'), ('yy', '09'), ('yyyy', '2009')]

    Args:
        date (Date/Datetime):      The date.

    Returns:
        Dict: Dictionary with date variables for the given date.
    """
    if date is None:
        return dict()

    # Import Time locally to avoid circular imports
    from where.lib.time import Time

    month = [
        "jan", "feb", "mar", "apr", "may", "jun", "jul", "aug", "sep", "oct",
        "nov", "dec"
    ]

    # Create the dict of date variables
    return dict(
        date=date.strftime("%Y%m%d"),
        yyyy=date.strftime("%Y"),
        ce=date.strftime("%Y")[:2],
        yy=date.strftime("%y"),
        m=str(date.month),
        mm=date.strftime("%m"),
        mmm=month[date.month - 1].lower(),
        MMM=month[date.month - 1].upper(),
        d=str(date.day),
        dd=date.strftime("%d"),
        hh=date.strftime("%H"),
        doy=date.strftime("%j"),
        dow=date.strftime("%w"),
        gpsweek=str(
            int(
                Time(date.strftime("%Y-%m-%d %H:%M:%S"),
                     format="iso").gpsweek)),
    )
Beispiel #7
0
    def remove_low_frequency_tides(self):
        """Remove the effect of low frequency tides.

        Tidal variations in the Earth's rotation with periods from 5 days to 18.6 years is present in the UT1-UTC time
        series as described in the IERS Conventions 2010 chapter 8.1. To improve the interpolation of the UT1-UTC time
        series this effect can be removed. In that case the effect needs to be added again to the final interpolated
        values.
        """
        for mjd in self.data.keys():
            # Julian centuries since J2000
            t = Time(mjd, format="mjd")
            t_julian_centuries = (t.tt.jd - 2_451_545.0) / 36525
            dut1_corr = iers.rg_zont2(t_julian_centuries)[0]
            self.data[mjd]["ut1_utc"] -= dut1_corr
Beispiel #8
0
def _check_first_epoch_sample_point(dset: "Dataset", precise, epoch_interval):
    """Keep first observation epoch depending on existing precise orbit sample points

    Precise orbit sample points are needed to carry out interpolation of precise orbits for the first observation
    epoch. If no precise orbit sample point is available before the first satellite observation epoch, then this
    epoch will be removed for this satellite.

    Args:
        dset (Dataset):            A Dataset containing model data.
        dset_idx (numpy.ndarray):  Array containing False for observations to throw away. The array is returned by
                                   function `ignore_unavailable_orbit_satellites()`, which deletes unavailable
                                   apriori orbit satellites.
        precise (PreciseOrbit):    Precise orbit object with precise orbit information.
        epoch_interval (float):    Epoch interval of precise orbit sample points

    Returns:
        tuple: Tuple with array containing False for first observations to throw away and indices indicating first
               observation epoch.
    """

    # Get indices for first observation epochs
    first_idx = 0
    first_epoch_idx = np.ones(dset.num_obs, dtype=bool)
    first_epoch_idx = dset.time.gps.mjd == dset.time.gps.mjd[first_idx]

    # Get set with satellite and time entries for getting corresponding precise orbit sample points
    satellites = dset.satellite[first_epoch_idx]
    time = Time(
        val=dset.time.gps.datetime[first_epoch_idx], format="datetime", scale=dset.time.data.scale
    ) - TimeDelta(epoch_interval, format="sec")
    precise_idx = precise._get_nearest_sample_point(satellites, np.full(len(satellites), time))

    # Keep observations epochs, where a precise orbit sample point exists before the first observation epoch
    diff_time = (dset.time.gps.mjd[first_epoch_idx] - precise.dset_edit.time.gps.mjd[precise_idx]) * Unit.day2second
    keep_idx = np.logical_and(diff_time < (epoch_interval + 1), diff_time > 0)

    removed_entries = "DEBUG: ".join(
        [
            f"{s} {t.strftime('  %Y-%m-%d %H:%M:%S (GPS)')}, dt = {dt:8.2f} s (0 < dt < {epoch_interval + 1})\n"
            for s, t, dt in zip(
                satellites[np.logical_not(keep_idx)],
                dset.time.gps.datetime[first_epoch_idx][np.logical_not(keep_idx)],
                diff_time[np.logical_not(keep_idx)],
            )
        ]
    )
    log.info(f"Following first epoch entries are removed: \nDEBUG: {removed_entries}")

    return keep_idx, first_epoch_idx
Beispiel #9
0
def ignore_observation(dset):
    """Removes given observations

    Args:
        dset:     A Dataset containing model data.

    Returns:
        Array containing False for observations to throw away
    """
    observations = config.tech[_SECTION].observations.as_list(split_re=", *")
    keep_idx = np.ones(dset.num_obs, dtype=bool)

    for observation in observations:
        date, time, *stations = observation.split()
        epoch = Time(f"{date} {time}", scale="utc", format="iso")
        stations = (" ".join(stations)).split("/")  # station names may contain spaces, split at slash instead

        remove_idx = dset.time.utc.iso == epoch.utc.iso
        for station in stations:
            remove_idx = dset.filter(station=station, idx=remove_idx)
        keep_idx[remove_idx] = False

    return keep_idx
Beispiel #10
0
    def _calculate_pos_itrs(self, site):
        """Calculate positions for the given time epochs

        The positions are calculated as simple linear offsets based on the reference epoch. Makes sure to pick out the
        correct time interval to use.

        Args:
            key:    Key saying which site to calculate position for, type might depend on the Trf.

        Returns:
            Array:  Positions, one 3-vector for each time epoch.
        """
        station_info = self.data[site]
        ref_epoch = Time(float(station_info["ref_epoch"]),
                         format="decimalyear",
                         scale="utc")
        pos = np.full((self.time.size, 3), fill_value=np.nan)

        ref_pos = np.array(station_info["pos"])
        ref_vel = np.array(station_info["vel"])
        interval_years = (self.time - ref_epoch).jd * unit.day2julian_years
        pos[:, :] = ref_pos + interval_years[:, None] * ref_vel[None, :]

        return pos
Beispiel #11
0
    def write_to_dataset(self, dset):
        """Store SLR data in a dataset

        Args:
           dset_out: The Dataset where data are stored.
        """
        dset.num_obs = len(self.meta["time"])
        dset.add_time("time",
                      val=Time(val=self.rundate.isoformat()).mjd,
                      val2=self.meta.pop("time"),
                      scale="utc",
                      format="mjd")
        for field, value in self.meta.items():
            dset.add_text(field, val=value)

        # Positions
        trf = apriori.get("trf", time=dset.time)
        for station in dset.unique("station"):
            trf_site = trf[station]
            station_pos = trf_site.pos.itrs
            log.debug(
                "Station position for {} ({}) according to ITRF is (x,y,z) = {}",
                station,
                trf_site.name,
                station_pos.mean(axis=0),
            )
            domes = trf_site.meta["domes"]

            if False:  # TODO: Add these missing stations to trf-file
                domes = "00000"
                log.warn("No information about station {} on ITRF file",
                         station)
                if station == "7407":
                    station_pos = np.repeat(
                        [[4119502.13, -4553595.23, -1722855.13]],
                        dset.num_obs,
                        axis=0)
                elif station == "1889":
                    station_pos = np.repeat(
                        [[3451136.221, 3060335.064, 4391970.241]],
                        dset.num_obs,
                        axis=0)
                elif station == "1888":
                    station_pos = np.repeat(
                        [[2730139.097, 1562328.629, 5529998.585]],
                        dset.num_obs,
                        axis=0)
                elif station == "1891":
                    station_pos = np.repeat(
                        [[-968340.32, 3794415.10, 5018178.10]],
                        dset.num_obs,
                        axis=0)
                elif station == "1887":
                    station_pos = np.repeat(
                        [[2001873.3346, 3987633.3547, 4542477.6716]],
                        dset.num_obs,
                        axis=0)
                elif station == "1886":
                    station_pos = np.repeat(
                        [[3466773.394, 3059757.864, 4381456.782]],
                        dset.num_obs,
                        axis=0)
                elif station == "1874":
                    station_pos = np.repeat(
                        [[2844591.641, 2161111.997, 5266356.839]],
                        dset.num_obs,
                        axis=0)
                elif station == "1890":
                    station_pos = np.repeat(
                        [[-838299.699, 3865738.865, 4987640.921]],
                        dset.num_obs,
                        axis=0)
                else:
                    log.error("Unknown station {}", station)
                    station_pos = np.zeros((dset.num_obs, 3))
                log.warn("Using coordinates {} for {}",
                         np.mean(station_pos, axis=0), station)

            self.data["pos_" + station] = station_pos
            self.data["station-other_" + station] = dict(domes=domes,
                                                         cdp=station,
                                                         site_id=station)

        dset.add_position("site_pos",
                          time="time",
                          itrs=np.array([
                              self.data["pos_" + s][idx]
                              for idx, s in enumerate(dset.station)
                          ]))

        # Station data
        sta_fields = set().union(*[
            v.keys() for k, v in self.data.items() if k.startswith("station_")
        ])
        for field in sta_fields:
            dset.add_float(field,
                           val=np.array([
                               float(self.data["station_" + s][field])
                               for s in dset.station
                           ]))
        sta_fields = set().union(*[
            v.keys() for k, v in self.data.items()
            if k.startswith("station-other_")
        ])
        for field in sta_fields:
            dset.add_text(field,
                          val=[
                              self.data["station-other_" + s][field]
                              for s in dset.station
                          ])

        # Satellite data
        sat_fields = set().union(*[
            v.keys() for k, v in self.data.items()
            if k.startswith("satellite_")
        ])
        for field in sat_fields:
            dset.add_float(field,
                           val=np.array([
                               float(self.data["satellite_" + s][field])
                               for s in dset.satellite
                           ]))

        # Observations
        for field, values in self.data["obs"].items():
            dset.add_float(field, val=np.array(values))

        for field, values in self.data["obs_str"].items():
            dset.add_text(field, val=values)

        # Meterological data
        met_fields = set().union(
            *[v.keys() for k, v in self.data.items() if k.startswith("met_")])
        for field in met_fields:
            dset.add_float(field,
                           val=np.diag([
                               self.data["met_" + s][field]
                               for s in dset.station
                           ]))
Beispiel #12
0
    def _calculate_pos_itrs(self, site):
        """Calculate positions for the given time epochs

        The positions are calculated as simple linear offsets based on the reference epoch. If there is a post-seismic
        deformations model for a station the motion due to that model is added to the linear velocity model. Makes sure
        to pick out the correct time interval to use.

        Args:
            site (String):    Key saying which site to calculate position for.

        Returns:
            Array:  Positions, one 3-vector for each time epoch.
        """
        station_info = self.data[site]
        ref_epoch = Time(station_info["ref_epoch"], scale="utc")

        pos = np.zeros((self.time.size, 3))
        for pv in station_info["pos_vel"].values():
            idx = np.logical_and(self.time.utc.datetime >= pv["start"],
                                 self.time.utc.datetime < pv["end"])
            if self.time.size == 1:
                idx = np.array([idx])
            if not any(idx):
                continue
            ref_pos = np.array([pv["STAX"], pv["STAY"], pv["STAZ"]])
            ref_vel = np.array([pv["VELX"], pv["VELY"], pv["VELZ"]])
            interval_years = (self.time - ref_epoch).jd * Unit.day2julian_years
            if isinstance(interval_years, float):
                interval_years = np.array([interval_years])
            pos[idx, :] = ref_pos + interval_years[idx,
                                                   None] * ref_vel[None, :]

        # Post-seismic deformations, see Appendix C in :cite:'itrf2014'
        if "psd" in station_info:
            llh = sofa.vectorized_llh(pos)
            psd = station_info["psd"]
            denu = dict(H=np.zeros(self.time.size),
                        E=np.zeros(self.time.size),
                        N=np.zeros(self.time.size))
            for param in psd.values():
                t_0 = Time(param["epoch"], format="datetime", scale="utc")
                delta_t = (self.time - t_0).jd * Unit.day2julian_years
                if isinstance(delta_t, float):
                    delta_t = np.array([delta_t])
                idx = delta_t > 0
                for L in "ENH":
                    aexp = np.array(param.get("AEXP_" + L, list()))
                    texp = np.array(param.get("TEXP_" + L, list()))
                    for a, t in zip(aexp, texp):
                        denu[L][idx] += a * (1 - np.exp(-delta_t[idx] / t))
                    alog = np.array(param.get("ALOG_" + L, list()))
                    tlog = np.array(param.get("TLOG_" + L, list()))
                    for a, t in zip(alog, tlog):
                        denu[L][idx] += a * np.log(1 + delta_t[idx] / t)

            rot = rotation.enu2trs(llh[:, 0], llh[:, 1])
            denu = np.vstack((denu["E"], denu["N"], denu["H"])).T
            dxyz = (rot @ denu[:, :, None])[:, :, 0]
            pos += dxyz

        if self.time.size == 1:
            pos = pos[0, :]
        return pos
Beispiel #13
0
def _check_last_epoch_sample_point(dset, precise, epoch_interval):
    """Keep last observation epoch depending on existing precise orbit sample points

    Precise orbit sample points are needed to carry out interpolation of precise orbits for the last observation
    epochs. If no precise orbit sample point is available after the last satellite observation epochs, then this
    epochs will be removed for this satellite.

    The time difference between the last observation epochs and the next precise orbit sample point is determined. 'Last
    observation epoch' + 'sampling rate' is chosen as reference time for the selection of the nearest orbit sample
    point, which corresponds normally to 0:00 GPS time. If the time difference exceeds the following intervall, then the
    observation epochs are rejected:
                       -(precise orbit epoch interval + 1) < time difference < 0

    Args:
        dset (Dataset):            A Dataset containing model data.
        dset_idx (numpy.ndarray):  Array containing False for observations to throw away. The array is returned by
                                   function `ignore_unavailable_orbit_satellites()`, which deletes unavailable
                                   apriori orbit satellites.
        precise (PreciseOrbit):    Precise orbit object with precise orbit information.
        epoch_interval (float):    Epoch interval of precise orbit sample points

    Returns:
        tuple: Tuple with array containing False for last observations to throw away and indices indicating last
               observation epoch.
    """
    sampling_rate = config.tech.sampling_rate.float

    # Get indices for last observation epochs
    last_idx = -1
    last_epoch_idx = np.ones(dset.num_obs, dtype=bool)
    last_epoch_idx = (
        dset.time.gps.mjd
        >= dset.time.gps.mjd[last_idx] - (epoch_interval - sampling_rate) * Unit.second2day
    )

    # Get set with satellite and time entries for getting corresponding precise orbit sample points
    # Note: Sample point reference time is 'last observation epoch' + 'sampling rate', which corresponds normally to
    #       0:00 GPS time.
    satellites = dset.satellite[last_epoch_idx]
    time = Time(val=dset.time.gps.datetime[last_idx], format="datetime", scale=dset.time.data.scale) + TimeDelta(
        sampling_rate, format="sec"
    )
    precise_idx = precise._get_nearest_sample_point(satellites, np.full(len(satellites), time))

    # Keep observations epochs, where a precise orbit sample point exists after the last observation epoch
    diff_time = (dset.time.gps.mjd[last_epoch_idx] - precise.dset_edit.time.gps.mjd[precise_idx]) * Unit.day2second
    keep_idx = np.logical_and(diff_time > -(epoch_interval + 1), diff_time < 0)

    removed_entries = "DEBUG: ".join(
        [
            f"{s} {t.strftime('  %Y-%m-%d %H:%M:%S (GPS)')}, dt = {dt:8.2f} s ({-(epoch_interval + 1)} < dt < 0)\n"
            for s, t, dt in zip(
                satellites[np.logical_not(keep_idx)],
                dset.time.gps.datetime[last_epoch_idx][np.logical_not(keep_idx)],
                diff_time[np.logical_not(keep_idx)],
            )
        ]
    )
    log.info(f"Following last epoch entries are removed: \nDEBUG: {removed_entries}")

    return keep_idx, last_epoch_idx
def detect_clockbreaks(dset):
    """Try to detect a clock break

    The suspected clock breaks are added to the dataset as events. They will not be corrected automatically.

    TODO: Clean up code / better variable names, remove "magic" numbers, possibly put settings in config?
          Handle gaps in observations better, these are often misinterpreted as clock breaks
          These might be made faster by using a filter to test all observations at the same time?

    Args:
        dset (Dataset):  Information about model run.
    """
    log.info("Looking for clock breaks")

    clock_breaks = list()
    order_of_polynomial = config.tech.get("order_of_polynomial", section="vlbi_clock_correction", default=2).int

    for station in dset.unique("station"):
        # Merge together data where station is station 1 and station 2
        idx_1 = dset.filter(station_1=station)
        idx_2 = dset.filter(station_2=station)
        time = np.hstack((dset.time.utc.mjd[idx_1], dset.time.utc.mjd[idx_2])) - dset.time.utc.mjd[0]
        residual = np.hstack((dset.residual[idx_1], -dset.residual[idx_2]))

        # Make sure data are chronological
        idx_sort = np.argsort(time)
        time = time[idx_sort]
        residual = residual[idx_sort]

        # Add fields to dset for debug
        idx_site = np.hstack((np.where(idx_1)[0], np.where(idx_2)[0]))[idx_sort]
        dset.add_float(f"cb_{station}_residual", write_level="operational")
        dset[f"cb_{station}_residual"][idx_site] = residual
        dset.add_float(f"cb_{station}_value", write_level="detail")
        dset.add_float(f"cb_{station}_limit", write_level="detail")
        dset.add_float(f"cb_{station}_pred", write_level="detail")
        dset.add_float(f"cb_{station}_ratio", write_level="detail")

        # Test each observation for clock break
        start_obs = 0
        for obs in range(len(time) - 25):
            if obs - start_obs < 25:  # Need some observations to do polyfit
                continue

            # Fit a polynomial to the given data
            idx_fit = slice(np.maximum(start_obs, obs - 500), obs)  # Possibly better to limit on time instead of obs?
            p = np.polyfit(time[idx_fit], residual[idx_fit], order_of_polynomial)  # Same degree as clock correction
            poly = np.polyval(p, time[idx_fit])
            res = residual[idx_fit] - poly
            std_lim = 2 * np.std(res) * (1 + 4 * np.exp(-10 * ((obs - np.maximum(start_obs, obs - 500)) / 500) ** 2))
            # Gives higher limit when there are fewer observations in res

            # Test next observations
            model = np.polyval(
                p, time[obs + 1 : obs + 26]
            )  # Use many (=25) observations to avoid problems with outliers
            obs_res = residual[obs + 1 : obs + 26]
            dset[f"cb_{station}_value"][idx_site[obs]] = np.min(np.abs(obs_res - model))
            dset[f"cb_{station}_limit"][idx_site[obs]] = std_lim
            dset[f"cb_{station}_pred"][idx_site[obs]] = model[0]
            dset[f"cb_{station}_ratio"][idx_site[obs]] = np.min(np.abs(obs_res - model)) / std_lim

            # Register possible clock break
            if np.all(np.abs(obs_res - model) > std_lim):
                start_obs = np.min(np.where(time > time[obs])[0])  # Next epoch with observations
                time_cb = Time(dset.time.utc.mjd[0] + (time[obs] + time[start_obs]) / 2, format="mjd", scale="utc")
                clock_breaks.append((np.min(np.abs(obs_res - model)) / std_lim, time_cb, station))

    # Only actually add the biggest clock breaks, because big clock breaks creates smaller false clock breaks
    ratio_lim = max(cb[0] for cb in clock_breaks) / 3 if clock_breaks else 1
    for ratio, time, station in clock_breaks:
        if ratio > ratio_lim:
            dset.add_event(time, "suspected_clock_break", station)
            log.check(
                "Found possible clock break for {} at {} ({})",
                station,
                time.datetime.strftime(config.FMT_datetime),
                "*" * int(np.ceil(np.log2(ratio))),
            )
Beispiel #15
0
def data_handling(dset):
    """Edits data based on SLR handling file

    Args:
        dset:     A Dataset containing model data.

    Returns:
        Array containing False for observations to throw away
    """
    handling = apriori.get("slr_handling_file", time=dset.time)

    for station in dset.unique("station"):
        # Estimate range bias E
        intervals = handling.get(station, {}).get("E", [])
        for interval, info in intervals:
            start_x, end_x = interval
            int_idx = dset.filter(station=station) & (dset.time >= start_x) & (
                dset.time <= end_x)
            if np.any(int_idx):
                log.info(
                    f"ILRS handling: Estimating range bias for station {station} in interval {start_x}-{end_x}"
                )
                log.dev(
                    "ILRS Data Handling: What if there is a break in the middle of a pass?"
                )
                dset.estimate_range[:] = np.logical_or(int_idx,
                                                       dset.estimate_range)
        # Apply range bias R
        intervals = handling.get(station, {}).get("R", [])
        for interval, info in intervals:
            start_x, end_x = interval
            int_idx = dset.filter(station=station) & (dset.time >= start_x) & (
                dset.time <= end_x)
            if np.any(int_idx):
                log.info(
                    f"ILRS handling: Applying range bias for station {station} in interval {start_x}-{end_x}"
                )
                RB = info["e_value"]
                if info["unit"] == "mm":
                    dset.range_bias[:] += int_idx * RB * Unit.mm2m
                elif info["unit"] == "ms":
                    dset.range_bias[:] += int_idx * RB * Unit.millisec2seconds * constant.c
                else:
                    log.fatal(
                        "Unknown unit on ILRS Data handling file for range bias applied"
                    )
        # Estimate time bias U
        intervals = handling.get(station, {}).get("U", [])
        for interval, info in intervals:
            start_x, end_x = interval
            int_idx = dset.filter(station=station) & (dset.time >= start_x) & (
                dset.time <= end_x)
            if np.any(int_idx):
                log.warn(
                    f"ILRS handling: Estimating time bias for station {station} in interval {start_x}-{end_x}"
                )
                dset.estimate_time |= int_idx
        # Apply time bias T
        intervals = handling.get(station, {}).get("T", [])
        for interval, info in intervals:
            start_x, end_x = interval
            int_idx = dset.filter(station=station) & (dset.time >= start_x) & (
                dset.time <= end_x)
            if np.any(int_idx):
                log.info(
                    f"ILRS handling: Applying time bias for station {station} in interval {start_x}-{end_x}"
                )
                t_midInterval = Time(start_x + 1 / 2 * (end_x - start_x),
                                     format="datetime")
                TB = info["e_value"]
                drift = info["e_rate"]
                if info["unit"] == "us":
                    time_drifted = (dset.time - t_midInterval).jd * drift
                    dset.time_bias[:] += int_idx * (
                        -np.repeat(TB, dset.num_obs) -
                        time_drifted) * Unit.microsec2sec
                else:
                    log.fatal(
                        "Unknown unit on ILRS Data handling file for time bias applied"
                    )
        # Apply pressure bias P
        intervals = handling.get(station, {}).get("P", [])
        for interval, info in intervals:
            start_x, end_x = interval
            int_idx = dset.filter(station=station) & (dset.time >= start_x) & (
                dset.time <= end_x)
            if np.any(int_idx):
                log.fatal(f"ILRS handling: TODO: Implement pressure bias!")
        # Target signature bias C
        intervals = handling.get(station, {}).get("P", [])
        for interval, info in intervals:
            start_x, end_x = interval
            int_idx = dset.filter(station=station) & (dset.time >= start_x) & (
                dset.time <= end_x)
            if np.any(int_idx):
                log.fatal(
                    f"ILRS handling: TODO: Implement target signature bias!")
    return
Beispiel #16
0
    def setUp(self):
        """

        The first test set up is based on the bc_velo.c program, which is published in :cite:`remondi2004` and
         following RINEX navigation file sample:

        /* Sample Broadcast Message in unit of radians, seconds, meters.
        20 01  7 23  2  0  0.0 -.857324339449D-04 -.272848410532D-11  .000000000000D+00
             .200000000000D+02  .886875000000D+02  .465376527657D-08  .105827953357D+01
             .457651913166D-05  .223578442819D-02  .177137553692D-05  .515379589081D+04
             .936000000000D+05  .651925802231D-07  .164046615454D+01 -.856816768646D-07
             .961685061380D+00  .344968750000D+03  .206374037770D+01 -.856928551657D-08
             .342514267094D-09  .000000000000D+00  .112400000000D+04  .000000000000D+00
             .200000000000D+01  .000000000000D+00 -.651925802231D-08  .276000000000D+03
             .865800000000D+05  .000000000000D+00  .000000000000D+00  .000000000000D+00
        */


        The second test set up compares results from Where against gLAB solution for satellite G20 and epoch
        2016-03-01 00:00:00.0.

        /* Sample Broadcast Message in unit of radians, seconds, meters for satellite G20 and
        /  epoch 2016-03-01 00:00:00.0
        20 16  3  1  0  0  0.0 0.396233052015D-03 0.261479726760D-11 0.000000000000D+00
            0.100000000000D+02-0.231562500000D+02 0.530236372187D-08 0.253477496869D+00
           -0.111199915409D-05 0.483385741245D-02 0.810064375401D-05 0.515369705963D+04
            0.172800000000D+06-0.141561031342D-06 0.304306271006D+00 0.372529029846D-08
            0.926615731710D+00 0.207250000000D+03 0.133849764271D+01-0.843427989304D-08
           -0.164292557730D-09 0.100000000000D+01 0.188600000000D+04 0.000000000000D+00
            0.200000000000D+01 0.000000000000D+00-0.838190317154D-08 0.100000000000D+02
            0.172770000000D+06 0.400000000000D+01 0.000000000000D+00 0.000000000000D+00

        The second test set up compares results from Where against gLAB solution for satellite E11 and epoch
        2016-03-01 00:00:00.0.

        /* Sample Broadcast Message in unit of radians, seconds, meters for satellite E11 and
        /  epoch 2016-03-01 00:00:00.0
        E11 2016 03 01 00 00 00 6.643886445090e-05 1.097077984014e-11 0.000000000000e+00
             3.200000000000e+01-3.243750000000e+01 3.015839907552e-09 2.397505637802e+00
            -1.462176442146e-06 3.306962316856e-04 8.240342140198e-06 5.440621692657e+03
             1.728000000000e+05 9.313225746155e-09-1.259905024101e+00 5.960464477539e-08
             9.679475503522e-01 1.663125000000e+02-6.590241211713e-01-5.572732126661e-09
             2.775115594704e-10 2.580000000000e+02 1.886000000000e+03                   
             3.120000000000e+00 0.000000000000e+00-2.328306436539e-08 0.000000000000e+00
             1.735000000000e+05 

        """

        # Get GNSS ephemeris data for testing
        if TEST == "test_1":
            file_key = "test_apriori_orbit_broadcast_1"
            year = 2001
            month = 7
            day = 23
            hour = 2
            minute = 0
            second = 0.0
            satellite = "G20"
            self.system = "G"  # GNSS identifier

            # Satellite transmission time
            self.t_sat = 86400.00

        elif TEST == "test_2":
            file_key = "test_apriori_orbit_broadcast_2"
            year = 2016
            month = 3
            day = 1
            hour = 0
            minute = 0
            second = 0.0
            satellite = "G20"
            self.system = "G"  # GNSS identifier

            # Satellite transmission time
            self.t_sat = 172799.92312317

        elif TEST == "test_3":
            file_key = "test_apriori_orbit_broadcast_3"
            year = 2016
            month = 3
            day = 1
            hour = 0
            minute = 0
            second = 0.0
            satellite = "E11"
            self.system = "E"  # GNSS identifier

            # Satellite transmission time
            self.t_sat = 173699.999

        rundate = datetime(year, month, day, hour, minute)
        time = Time(
            [
                (
                    "{year}-{month:02d}-{day:02d}T{hour:02d}:{minute:02d}:{second:010.7f}"
                    "".format(year=year, month=month, day=day, hour=hour, minute=minute, second=second)
                )
            ],
            scale="gps",
        )

        self.brdc = apriori.get(
            "orbit",
            apriori_orbit="broadcast",
            rundate=rundate,
            time=time,
            satellite=tuple({satellite}),
            system=tuple({self.system}),
            station="test",
            file_key=file_key,
        )

        self.idx = 0  # Broadcast ephemeris index
Beispiel #17
0
def _get_time(dset):
    """Determine time field
    """
    # TODO hjegei: Workaround -> better would it be if Time object can handle gpsweek as input format!!!
    jd_day, jd_frac = gnss.gpssec2jd(dset.gpsweek, dset.gpssec)
    return Time(val=jd_day, val2=jd_frac, format="jd", scale="gps")
Beispiel #18
0
    def setUp(self):

        # TODO: Configuration has to be defined? How? where.set_config(2016, 3, 1, 'gps')?
        time = Time(2457448.5, format="jd",
                    scale="tdb")  # Julian Day in TDB time scale
        self.eph = apriori.get("ephemerides", time=time, ephemerides="de430")