Exemplo n.º 1
0
def ignore_epochs(dset):
    """Edits data based on data quality

    Args:
        dset:     A Dataset containing model data.

    Returns:
        Array containing False for observations to throw away
    """
    intervals = config.tech[_SECTION].intervals.as_list(split_re=", *")

    keep_idx = np.ones(dset.num_obs, dtype=bool)
    for interval in intervals:
        interval = interval.split()
        start_time = Time(" ".join(interval[-4:-2]), scale="utc", fmt="iso")
        end_time = Time(" ".join(interval[-2:]), scale="utc", fmt="iso")
        # station name may contain spaces
        station = " ".join(interval[:-4])

        remove_idx = np.logical_and(start_time < dset.time,
                                    dset.time < end_time)
        if len(interval) == 5:
            if "/" in station:
                remove_idx = dset.filter(baseline=station, idx=remove_idx)
            else:
                remove_idx = dset.filter(station=station, idx=remove_idx)
        keep_idx = np.logical_and(keep_idx, np.logical_not(remove_idx))

    return keep_idx
Exemplo n.º 2
0
Arquivo: vascc.py Projeto: uasau/where
    def _calculate_pos_trs(self, site):
        """Calculate positions for the given time epochs

        The positions are calculated as simple linear offsets based on the reference epoch. Makes sure to pick out the
        correct time interval to use.

        Args:
            key:    Key saying which site to calculate position for, type might depend on the Trf.

        Returns:
            Array:  Positions, one 3-vector for each time epoch.
        """
        station_info = self.data[site]
        ref_epoch = Time(float(station_info["ref_epoch"]),
                         fmt="decimalyear",
                         scale="utc")
        pos = np.full((self.time.size, 3), fill_value=np.nan)

        ref_pos = np.array(station_info["pos"])
        ref_vel = np.array(station_info["vel"])
        interval_years = (self.time - ref_epoch).jd * Unit.day2julian_years
        pos[:, :] = ref_pos + interval_years[:, None] * ref_vel[None, :]

        ell = ellipsoid.get("GRS80")
        pos_trs = Position(pos, system="trs", ellipsoid=ell, time=self.time)

        return np.squeeze(pos_trs)
Exemplo n.º 3
0
Arquivo: slrf.py Projeto: uasau/where
    def _calculate_pos_trs(self, site):
        """Calculate positions for the given time epochs

        The positions are calculated as simple linear offsets based on the reference epoch.

        Args:
            site (String):    Key saying which site to calculate position for.

        Returns:
            Array:  Positions, one 3-vector for each time epoch.
        """
        station_info = self.data[site]
        ref_epoch = Time(station_info["ref_epoch"], scale="utc", fmt="datetime")

        pos = np.zeros((self.time.size, 3))
        for pv in station_info["pos_vel"].values():
            idx = np.logical_and(self.time.utc.datetime >= pv["start"], self.time.utc.datetime < pv["end"])
            if idx.size == 1:
                idx = np.array([idx])
            if not any(idx):
                continue
            ref_pos = np.array([pv["STAX"], pv["STAY"], pv["STAZ"]])
            ref_vel = np.array([pv["VELX"], pv["VELY"], pv["VELZ"]])
            interval_years = (self.time - ref_epoch).jd * Unit.day2julian_years
            if isinstance(interval_years, float):
                interval_years = np.array([interval_years])
            pos[idx, :] = ref_pos + interval_years[idx, None] * ref_vel[None, :]

        ell = ellipsoid.get(config.tech.reference_ellipsoid.str.upper())
        pos_trs = Position(pos, system="trs", ellipsoid=ell, time=self.time)

        return np.squeeze(pos_trs)
Exemplo n.º 4
0
def _check_last_epoch_sample_point(dset, precise, epoch_interval):
    """Keep last observation epoch depending on existing precise orbit sample points

    Precise orbit sample points are needed to carry out interpolation of precise orbits for the last observation
    epochs. If no precise orbit sample point is available after the last satellite observation epochs, then this
    epochs will be removed for this satellite.

    The time difference between the last observation epochs and the next precise orbit sample point is determined. 'Last
    observation epoch' + 'sampling rate' is chosen as reference time for the selection of the nearest orbit sample
    point, which corresponds normally to 0:00 GPS time. If the time difference exceeds the following intervall, then the
    observation epochs are rejected:
                       -(precise orbit epoch interval + 1) < time difference < 0

    Args:
        dset (Dataset):            A Dataset containing model data.
        dset_idx (numpy.ndarray):  Array containing False for observations to throw away. The array is returned by
                                   function `ignore_unavailable_orbit_satellites()`, which deletes unavailable
                                   apriori orbit satellites.
        precise (PreciseOrbit):    Precise orbit object with precise orbit information.
        epoch_interval (float):    Epoch interval of precise orbit sample points

    Returns:
        tuple: Tuple with array containing False for last observations to throw away and indices indicating last
               observation epoch.
    """
    sampling_rate = config.tech.sampling_rate.float

    # Get indices for last observation epochs
    last_idx = -1
    last_epoch_idx = np.ones(dset.num_obs, dtype=bool)
    last_epoch_idx = (dset.time.gps.mjd >= dset.time.gps.mjd[last_idx] -
                      (epoch_interval - sampling_rate) * Unit.second2day)

    # Get set with satellite and time entries for getting corresponding precise orbit sample points
    # Note: Sample point reference time is 'last observation epoch' + 'sampling rate', which corresponds normally to
    #       0:00 GPS time.
    satellites = dset.satellite[last_epoch_idx]
    time = Time(val=dset.time.gps.datetime[last_idx],
                fmt="datetime",
                scale=dset.time.scale) + TimeDelta(
                    sampling_rate, fmt="seconds", scale=dset.time.scale)
    precise_idx = precise._get_nearest_sample_point(satellites, time)

    # Keep observations epochs, where a precise orbit sample point exists after the last observation epoch
    diff_time = (dset.time.gps.mjd[last_epoch_idx] -
                 precise.dset_edit.time.gps.mjd[precise_idx]) * Unit.day2second
    keep_idx = np.logical_and(diff_time > -(epoch_interval + 1), diff_time < 0)

    removed_entries = "DEBUG: ".join([
        f"{s} {t.strftime('  %Y-%m-%d %H:%M:%S (GPS)')}, dt = {dt:8.2f} s ({-(epoch_interval + 1)} < dt < 0)\n"
        for s, t, dt in zip(
            satellites[np.logical_not(keep_idx)],
            dset.time.gps.datetime[last_epoch_idx][np.logical_not(keep_idx)],
            diff_time[np.logical_not(keep_idx)],
        )
    ])
    log.debug(f"Following last epoch entries are removed: \n{removed_entries}")

    return keep_idx, last_epoch_idx
Exemplo n.º 5
0
def date_vars(date):
    """Construct a dict of date variables

    From a given date, construct a dict containing all relevant date variables. This dict can be used to for instance
    replace variables in file names.

    Examples:
        >>> from datetime import date
        >>> date_vars = date_vars(date(2009, 11, 2))
        >>> sorted(date_vars.items())    # doctest: +NORMALIZE_WHITESPACE
        [('MMM', 'NOV'), ('ce', '20'), ('d', '2'), ('dd', '02'), ('dow', '1'), ('doy', '306'), ('gpsweek', '1556'),
         ('m', '11'), ('mm', '11'), ('mmm', 'nov'), ('yy', '09'), ('yyyy', '2009')]

    Args:
        date (Date/Datetime):      The date.

    Returns:
        Dict: Dictionary with date variables for the given date.
    """
    if date is None:
        return dict()

    # Import Time locally to avoid circular imports
    from where.data.time import Time

    month = [
        "jan", "feb", "mar", "apr", "may", "jun", "jul", "aug", "sep", "oct",
        "nov", "dec"
    ]

    # Create the dict of date variables
    try:
        gpsweek = str(
            int(
                Time(date.strftime("%Y-%m-%d %H:%M:%S"),
                     fmt="iso",
                     scale="utc").gps.gps_ws.week))
    except ValueError:
        # gps-scale is not defined for 1970-01-01, which is used by timeseries
        gpsweek = ""
    return dict(
        date=date.strftime("%Y%m%d"),
        yyyy=date.strftime("%Y"),
        ce=date.strftime("%Y")[:2],
        yy=date.strftime("%y"),
        m=str(date.month),
        mm=date.strftime("%m"),
        mmm=month[date.month - 1].lower(),
        MMM=month[date.month - 1].upper(),
        d=str(date.day),
        dd=date.strftime("%d"),
        hh=date.strftime("%H"),
        doy=date.strftime("%j"),
        dow=date.strftime("%w"),
        gpsweek=gpsweek,
    )
Exemplo n.º 6
0
Arquivo: eop.py Projeto: yxw027/where
    def remove_low_frequency_tides(self):
        """Remove the effect of low frequency tides.

        Tidal variations in the Earth's rotation with periods from 5 days to 18.6 years is present in the UT1-UTC time
        series as described in the IERS Conventions 2010 chapter 8.1. To improve the interpolation of the UT1-UTC time
        series this effect can be removed. In that case the effect needs to be added again to the final interpolated
        values.
        """
        for mjd in self.data.keys():
            t = Time(mjd, fmt="mjd", scale="utc")
            self.data[mjd]["ut1_utc"] -= iers.rg_zont2(t)[0]
Exemplo n.º 7
0
Arquivo: eop.py Projeto: mfkiwl/where
    def calculate_leap_second_offset(self):
        """Calculate leap second offsets for each day

        Use the difference between UTC and TAI as a proxy for the leap second offset. The leap second offset is
        calculated and stored to the EOP data-dictionary. This is used to correct for the leap second jumps when
        interpolating the UT1 - UTC values.
        """
        days = Time(np.array(list(self.data.keys())), fmt="mjd", scale="utc")
        leap_offset = np.round((days.utc.mjd - days.tai.mjd) * Unit.day2seconds)
        daily_offset = {int(d): lo for d, lo in zip(days.mjd, leap_offset)}

        for d, lo in daily_offset.items():
            self.data[d]["leap_offset"] = lo
Exemplo n.º 8
0
def _check_first_epoch_sample_point(dset: "Dataset", precise, epoch_interval):
    """Keep first observation epoch depending on existing precise orbit sample points

    Precise orbit sample points are needed to carry out interpolation of precise orbits for the first observation
    epoch. If no precise orbit sample point is available before the first satellite observation epoch, then this
    epoch will be removed for this satellite.

    Args:
        dset (Dataset):            A Dataset containing model data.
        dset_idx (numpy.ndarray):  Array containing False for observations to throw away. The array is returned by
                                   function `ignore_unavailable_orbit_satellites()`, which deletes unavailable
                                   apriori orbit satellites.
        precise (PreciseOrbit):    Precise orbit object with precise orbit information.
        epoch_interval (float):    Epoch interval of precise orbit sample points

    Returns:
        tuple: Tuple with array containing False for first observations to throw away and indices indicating first
               observation epoch.
    """

    # Get indices for first observation epochs
    first_idx = 0
    first_epoch_idx = np.ones(dset.num_obs, dtype=bool)
    first_epoch_idx = dset.time.gps.mjd == dset.time.gps.mjd[first_idx]

    # Get set with satellite and time entries for getting corresponding precise orbit sample points
    satellites = dset.satellite[first_epoch_idx]
    time = Time(val=dset.time.gps.datetime[first_epoch_idx],
                fmt="datetime",
                scale=dset.time.scale) - TimeDelta(
                    epoch_interval, fmt="seconds", scale=dset.time.scale)
    precise_idx = precise._get_nearest_sample_point(satellites, time)

    # Keep observations epochs, where a precise orbit sample point exists before the first observation epoch
    diff_time = (dset.time.gps.mjd[first_epoch_idx] -
                 precise.dset_edit.time.gps.mjd[precise_idx]) * Unit.day2second
    keep_idx = np.logical_and(diff_time < (epoch_interval + 1), diff_time > 0)

    removed_entries = "DEBUG: ".join([
        f"{s} {t.strftime('  %Y-%m-%d %H:%M:%S (GPS)')}, dt = {dt:8.2f} s (0 < dt < {epoch_interval + 1})\n"
        for s, t, dt in zip(
            satellites[np.logical_not(keep_idx)],
            dset.time.gps.datetime[first_epoch_idx][np.logical_not(keep_idx)],
            diff_time[np.logical_not(keep_idx)],
        )
    ])
    log.info(
        f"Following first epoch entries are removed: \nDEBUG: {removed_entries}"
    )

    return keep_idx, first_epoch_idx
Exemplo n.º 9
0
def parse_clock_breaks(dset, clock_breaks):
    """Parses the clock breaks string from the edit file

    Examples:
        > parse_clock_breaks(dset, '')
        (OrderedDict(), 0)
        > parse_clock_breaks(dset, 'SVETLOE 2015/01/23 05:36:00,
                                    SVETLOE 2015/01/23 16:53:00,
                                    SVETLOE 2015/01/23 12:30:00')
        (OrderedDict([(5, [106560.0, 131400.0, 147180.0])]), 3)

    Args:
        dset:                A Dataset containing model data.
        clock_breaks_str:    A string with clock break information

    Returns:
        OrderedDict with clock breaks and total number of clock breaks
     """
    # Parse clock breaks from file and store in the station_breaks dictionary
    station_breaks = {
        s: [min(dset.time.utc), max(dset.time.utc) + TimeDelta(1, fmt="seconds", scale="utc")]
        for s in dset.unique("station")
    }
    if clock_breaks:
        log.info(f"Applying clock breaks: {', '.join(clock_breaks)}")

    for cb in clock_breaks:
        # Station names may contain spaces
        cb = cb.split()
        cb_date = cb[-2:]
        cb_station = " ".join(cb[:-2])
        # cb_station, *cb_date = cb.split()
        cb_time = Time(" ".join(cb_date), scale="utc", fmt="iso")
        if cb_station not in station_breaks:
            log.warn(
                f"Station {cb_station} with clock break unknown. Available options are {', '.join(station_breaks)}"
            )
            continue
        station_breaks[cb_station].append(cb_time)
        dset.meta.add_event(cb_time, "clock_break", cb_station)

    # Convert the station_breaks dict to lists of (station, (time_start, time_end))-tuples
    stations = list()
    time_intervals = list()
    for station in sorted(station_breaks.keys(), key=lambda s: (len(station_breaks[s]), s), reverse=True):
        station_times = sorted(station_breaks[station])
        for t_start, t_end in zip(station_times[:-1], station_times[1:]):
            stations.append(station)
            time_intervals.append((t_start, t_end))

    return stations, time_intervals
Exemplo n.º 10
0
def parse_clock_breaks(dset):
    """Parses the clock breaks string from the edit file

    Args:
        dset:                A Dataset containing model data.
        clock_breaks_str:    A string with clock break information

    Returns:
        OrderedDict with clock breaks and total number of clock breaks
     """
    station_breaks = {
        s: [min(dset.time.utc), max(dset.time.utc) + TimeDelta(1, fmt="seconds", scale="utc")]
        for s in dset.unique("station")
    }
    
    clock_breaks = config.tech.get("clock_breaks", section=MODEL).as_list(split_re=", *")
    
    if clock_breaks:
        log.info(f"Applying clock breaks: {', '.join(clock_breaks)}")

    for cb in clock_breaks:
        # Station names may contain spaces
        cb = cb.split()
        cb_date = cb[-2:]
        cb_station = " ".join(cb[:-2])
        cb_time = Time(" ".join(cb_date), scale="utc", fmt="iso")
        if cb_station not in station_breaks:
            log.warn(
                f"Station {cb_station} with clock break unknown. Available options are {', '.join(station_breaks)}"
            )
            continue
        station_breaks[cb_station].append(cb_time)
        dset.meta.add_event(cb_time, "clock_break", cb_station)

    # Convert the station_breaks dict to lists of (station, (time_start, time_end))-tuples
    stations = list()
    time_intervals = list()
    for station in sorted(station_breaks.keys(), key=lambda s: (len(station_breaks[s]), s), reverse=True):
        station_times = sorted(station_breaks[station])
        for t_start, t_end in zip(station_times[:-1], station_times[1:]):
            stations.append(station)
            time_intervals.append((t_start, t_end))

    return stations, time_intervals
Exemplo n.º 11
0
def ignore_observation(dset):
    """Removes given observations

    Args:
        dset:     A Dataset containing model data.

    Returns:
        Array containing False for observations to throw away
    """
    observations = config.tech[_SECTION].observations.as_list(split_re=", *")
    keep_idx = np.ones(dset.num_obs, dtype=bool)

    for observation in observations:
        date, time, *stations = observation.split()
        epoch = Time(f"{date} {time}", scale="utc", fmt="iso")
        stations = (" ".join(stations)).split(
            "/")  # station names may contain spaces, split at slash instead

        remove_idx = dset.time.utc.iso == epoch.utc.iso
        for station in stations:
            remove_idx = dset.filter(station=station, idx=remove_idx)
        keep_idx[remove_idx] = False

    return keep_idx
Exemplo n.º 12
0
def detect_clockbreaks(dset):
    """Try to detect a clock break

    The suspected clock breaks are added to the dataset as events. They will not be corrected automatically.

    TODO: Clean up code / better variable names, remove "magic" numbers, possibly put settings in config?
          Handle gaps in observations better, these are often misinterpreted as clock breaks
          These might be made faster by using a filter to test all observations at the same time?

    Args:
        dset (Dataset):  Information about model run.
    """
    log.info("Looking for clock breaks")

    clock_breaks = list()
    order_of_polynomial = config.tech.get("order_of_polynomial", section="vlbi_clock_poly", default=2).int

    for station in dset.unique("station"):
        # Merge together data where station is station 1 and station 2
        idx_1 = dset.filter(station_1=station)
        idx_2 = dset.filter(station_2=station)
        time = np.hstack((dset.time.utc.mjd[idx_1], dset.time.utc.mjd[idx_2])) - dset.time.utc.mjd[0]
        residual = np.hstack((dset.residual[idx_1], -dset.residual[idx_2]))

        # Make sure data are chronological
        idx_sort = np.argsort(time)
        time = time[idx_sort]
        residual = residual[idx_sort]

        # Add fields to dset for debug
        idx_site = np.hstack((np.where(idx_1)[0], np.where(idx_2)[0]))[idx_sort]
        dset.add_float(f"cb_{station}_residual", np.zeros(dset.num_obs), write_level="operational")
        dset[f"cb_{station}_residual"][idx_site] = residual
        dset.add_float(f"cb_{station}_value", np.zeros(dset.num_obs), write_level="detail")
        dset.add_float(f"cb_{station}_limit", np.zeros(dset.num_obs), write_level="detail")
        dset.add_float(f"cb_{station}_pred", np.zeros(dset.num_obs), write_level="detail")
        dset.add_float(f"cb_{station}_ratio", np.zeros(dset.num_obs), write_level="detail")

        # Test each observation for clock break
        start_obs = 0
        for obs in range(len(time) - 25):
            if obs - start_obs < 25:  # Need some observations to do polyfit
                continue

            # Fit a polynomial to the given data
            idx_fit = slice(np.maximum(start_obs, obs - 500), obs)  # Possibly better to limit on time instead of obs?
            p = np.polyfit(time[idx_fit], residual[idx_fit], order_of_polynomial)  # Same degree as clock correction
            poly = np.polyval(p, time[idx_fit])
            res = residual[idx_fit] - poly
            std_lim = 2 * np.std(res) * (1 + 4 * np.exp(-10 * ((obs - np.maximum(start_obs, obs - 500)) / 500) ** 2))
            # Gives higher limit when there are fewer observations in res

            # Test next observations
            model = np.polyval(
                p, time[obs + 1 : obs + 26]
            )  # Use many (=25) observations to avoid problems with outliers
            obs_res = residual[obs + 1 : obs + 26]
            dset[f"cb_{station}_value"][idx_site[obs]] = np.min(np.abs(obs_res - model))
            dset[f"cb_{station}_limit"][idx_site[obs]] = std_lim
            dset[f"cb_{station}_pred"][idx_site[obs]] = model[0]
            dset[f"cb_{station}_ratio"][idx_site[obs]] = np.min(np.abs(obs_res - model)) / std_lim

            # Register possible clock break
            if np.all(np.abs(obs_res - model) > std_lim):
                start_obs = np.min(np.where(time > time[obs])[0])  # Next epoch with observations
                time_cb = Time(dset.time.utc.mjd[0] + (time[obs] + time[start_obs]) / 2, fmt="mjd", scale="utc")
                clock_breaks.append((np.min(np.abs(obs_res - model)) / std_lim, time_cb, station))

    # Only actually add the biggest clock breaks, because big clock breaks creates smaller false clock breaks
    ratio_lim = max(cb[0] for cb in clock_breaks) / 3 if clock_breaks else 1
    for ratio, time, station in clock_breaks:
        if ratio > ratio_lim:
            dset.meta.add_event(time, "suspected_clock_break", station)
            cb_time = time.datetime.strftime(config.FMT_datetime)
            stars = "*" * int(np.ceil(np.log2(ratio)))
            log.check(f"Found possible clock break for {station} at {cb_time} ({stars})")
Exemplo n.º 13
0
    def setUp(self):

        # TODO: Configuration has to be defined? How? where.set_config(2016, 3, 1, 'gps')?
        time = Time(2457448.5, fmt="jd", scale="tdb")  # Julian Day in TDB time scale
        self.eph = apriori.get("ephemerides", time=time, ephemerides="de430")
Exemplo n.º 14
0
    def setUp(self):
        """

        The first test set up is based on the bc_velo.c program, which is published in :cite:`remondi2004` and
         following RINEX navigation file sample:

        /* Sample Broadcast Message in unit of radians, seconds, meters.
        20 01  7 23  2  0  0.0 -.857324339449D-04 -.272848410532D-11  .000000000000D+00
             .200000000000D+02  .886875000000D+02  .465376527657D-08  .105827953357D+01
             .457651913166D-05  .223578442819D-02  .177137553692D-05  .515379589081D+04
             .936000000000D+05  .651925802231D-07  .164046615454D+01 -.856816768646D-07
             .961685061380D+00  .344968750000D+03  .206374037770D+01 -.856928551657D-08
             .342514267094D-09  .000000000000D+00  .112400000000D+04  .000000000000D+00
             .200000000000D+01  .000000000000D+00 -.651925802231D-08  .276000000000D+03
             .865800000000D+05  .000000000000D+00  .000000000000D+00  .000000000000D+00
        */


        The second test set up compares results from Where against gLAB solution for satellite G20 and epoch
        2016-03-01 00:00:00.0.

        /* Sample Broadcast Message in unit of radians, seconds, meters for satellite G20 and
        /  epoch 2016-03-01 00:00:00.0
        20 16  3  1  0  0  0.0 0.396233052015D-03 0.261479726760D-11 0.000000000000D+00
            0.100000000000D+02-0.231562500000D+02 0.530236372187D-08 0.253477496869D+00
           -0.111199915409D-05 0.483385741245D-02 0.810064375401D-05 0.515369705963D+04
            0.172800000000D+06-0.141561031342D-06 0.304306271006D+00 0.372529029846D-08
            0.926615731710D+00 0.207250000000D+03 0.133849764271D+01-0.843427989304D-08
           -0.164292557730D-09 0.100000000000D+01 0.188600000000D+04 0.000000000000D+00
            0.200000000000D+01 0.000000000000D+00-0.838190317154D-08 0.100000000000D+02
            0.172770000000D+06 0.400000000000D+01 0.000000000000D+00 0.000000000000D+00

        The second test set up compares results from Where against gLAB solution for satellite E11 and epoch
        2016-03-01 00:00:00.0.

        /* Sample Broadcast Message in unit of radians, seconds, meters for satellite E11 and
        /  epoch 2016-03-01 00:00:00.0
        E11 2016 03 01 00 00 00 6.643886445090e-05 1.097077984014e-11 0.000000000000e+00
             3.200000000000e+01-3.243750000000e+01 3.015839907552e-09 2.397505637802e+00
            -1.462176442146e-06 3.306962316856e-04 8.240342140198e-06 5.440621692657e+03
             1.728000000000e+05 9.313225746155e-09-1.259905024101e+00 5.960464477539e-08
             9.679475503522e-01 1.663125000000e+02-6.590241211713e-01-5.572732126661e-09
             2.775115594704e-10 2.580000000000e+02 1.886000000000e+03                   
             3.120000000000e+00 0.000000000000e+00-2.328306436539e-08 0.000000000000e+00
             1.735000000000e+05 

        """

        # Get GNSS ephemeris data for testing
        if TEST == "test_1":
            file_key = "test_apriori_orbit_broadcast_1"
            year = 2001
            month = 7
            day = 23
            hour = 2
            minute = 0
            second = 0.0
            satellite = "G20"
            self.system = "G"  # GNSS identifier

            # Satellite transmission time
            self.t_sat = 86400.00

        elif TEST == "test_2":
            file_key = "test_apriori_orbit_broadcast_2"
            year = 2016
            month = 3
            day = 1
            hour = 0
            minute = 0
            second = 0.0
            satellite = "G20"
            self.system = "G"  # GNSS identifier

            # Satellite transmission time
            self.t_sat = 172799.92312317

        elif TEST == "test_3":
            file_key = "test_apriori_orbit_broadcast_3"
            year = 2016
            month = 3
            day = 1
            hour = 0
            minute = 0
            second = 0.0
            satellite = "E11"
            self.system = "E"  # GNSS identifier

            # Satellite transmission time
            self.t_sat = 173699.999

        rundate = datetime(year, month, day, hour, minute)
        time = Time(
            [("{year}-{month:02d}-{day:02d}T{hour:02d}:{minute:02d}:{second:010.7f}"
              "".format(year=year,
                        month=month,
                        day=day,
                        hour=hour,
                        minute=minute,
                        second=second))],
            fmt="isot",
            scale="gps",
        )

        self.brdc = apriori.get(
            "orbit",
            apriori_orbit="broadcast",
            rundate=rundate,
            time=time,
            satellite=tuple({satellite}),
            system=tuple({self.system}),
            station="test",
            file_key=file_key,
        )

        self.idx = 0  # Broadcast ephemeris index
Exemplo n.º 15
0
Arquivo: itrf.py Projeto: mfkiwl/where
    def _calculate_pos_trs(self, site):
        """Calculate positions for the given time epochs

        The positions are calculated as simple linear offsets based on the reference epoch. If there is a post-seismic
        deformations model for a station the motion due to that model is added to the linear velocity model. Makes sure
        to pick out the correct time interval to use.

        Args:
            site (String):    Key saying which site to calculate position for.

        Returns:
            Array:  Positions, one 3-vector for each time epoch.
        """
        station_info = self.data[site]
        ref_epoch = Time(station_info["ref_epoch"],
                         scale="utc",
                         fmt="datetime")

        pos = np.zeros((self.time.size, 3))
        for pv in station_info["pos_vel"].values():
            idx = np.logical_and(self.time.utc.datetime >= pv["start"],
                                 self.time.utc.datetime < pv["end"])
            if idx.ndim == 0:
                idx = np.array([idx])
            if not any(idx):
                continue
            ref_pos = np.array([pv["STAX"], pv["STAY"], pv["STAZ"]])
            ref_vel = np.array([pv["VELX"], pv["VELY"], pv["VELZ"]])
            interval_years = (self.time - ref_epoch).jd * Unit.day2julian_years
            if isinstance(interval_years, float):
                interval_years = np.array([interval_years])
            pos[idx, :] = ref_pos + interval_years[idx,
                                                   None] * ref_vel[None, :]

        ell = ellipsoid.get(config.tech.reference_ellipsoid.str.upper())
        pos_trs = Position(np.squeeze(pos),
                           system="trs",
                           ellipsoid=ell,
                           time=self.time)

        # Post-seismic deformations, see Appendix C in :cite:'itrf2014'
        if "psd" in station_info:
            psd = station_info["psd"]
            denu = dict(H=np.zeros(self.time.size),
                        E=np.zeros(self.time.size),
                        N=np.zeros(self.time.size))
            for param in psd.values():
                t_0 = Time(param["epoch"], fmt="datetime", scale="utc")
                delta_t = (self.time - t_0).jd * Unit.day2julian_years
                if isinstance(delta_t, float):
                    delta_t = np.array([delta_t])
                idx = delta_t > 0
                for L in "ENH":
                    aexp = np.array(param.get("AEXP_" + L, list()))
                    texp = np.array(param.get("TEXP_" + L, list()))
                    for a, t in zip(aexp, texp):
                        denu[L][idx] += a * (1 - np.exp(-delta_t[idx] / t))
                    alog = np.array(param.get("ALOG_" + L, list()))
                    tlog = np.array(param.get("TLOG_" + L, list()))
                    for a, t in zip(alog, tlog):
                        denu[L][idx] += a * np.log(1 + delta_t[idx] / t)

            denu = np.vstack((denu["E"], denu["N"], denu["H"])).T

            pos_delta = PositionDelta(np.squeeze(denu),
                                      system="enu",
                                      ellipsoid=ell,
                                      ref_pos=pos_trs,
                                      time=self.time)
            pos_trs += pos_delta.trs

        return np.squeeze(pos_trs)
Exemplo n.º 16
0
def _get_time(dset):
    """Determine time field
    """
    # TODO hjegei: Workaround -> better would it be if Time object can handle gpsweek as input format!!!
    jd_day, jd_frac = gnss.gpssec2jd(dset.gpsweek, dset.gpssec)
    return Time(val=jd_day, val2=jd_frac, fmt="jd", scale="gps")
Exemplo n.º 17
0
Arquivo: slr.py Projeto: mfkiwl/where
def calculate(stage, dset):
    """
    Integrate differential equation of motion of the satellite

    Args:
        stage:  Name of current stage
        dset:   Dataset containing the data
    """

    iterations = config.tech.iterations.int

    # Run models adjusting station positions
    site.calculate_site("site", dset)
    delta_pos = site.add("site", dset)
    dset.site_pos[:] = (dset.site_pos.gcrs + delta_pos[0].gcrs).trs

    dset.add_float("obs",
                   val=dset.time_of_flight * constant.c / 2,
                   unit="meter")
    dset.add_float("calc", np.zeros(dset.num_obs), unit="meter")
    dset.add_float("residual", np.zeros(dset.num_obs), unit="meter")
    dset.add_float("up_leg", np.zeros(dset.num_obs), unit="second")
    dset.add_posvel("sat_pos",
                    np.zeros((dset.num_obs, 6)),
                    system="gcrs",
                    time=dset.time)
    arc_length = config.tech.arc_length.float

    dset.site_pos.other = dset.sat_pos

    # First guess for up_leg:
    dset.up_leg[:] = dset.time_of_flight / 2

    for iter_num in itertools.count(start=1):
        log.blank()
        log.info(f"Calculating model corrections for iteration {iter_num}")

        sat_time_list = dset.obs_time + dset.time_bias + dset.up_leg
        apriori_orbit_provider = config.tech.apriori_orbit.str
        sat_name = dset.vars["sat_name"]

        rundate = dset.analysis["rundate"]

        if apriori_orbit_provider:
            version = config.tech.apriori_orbit_version.str
            log.info(
                f"Using external orbits from {apriori_orbit_provider}, version {version}"
            )
            apriori_orbit = apriori.get(
                "orbit",
                rundate=rundate + timedelta(days=arc_length),
                time=None,
                day_offset=6,
                satellite=sat_name,
                apriori_orbit="slr",
                file_key="slr_external_orbits",
            )
            dset_external = apriori_orbit._read(dset, apriori_orbit_provider,
                                                version)

            sat_pos = dset_external.sat_pos.gcrs_pos
            t_sec = TimeDelta(
                dset_external.time -
                Time(datetime(rundate.year, rundate.month, rundate.day),
                     scale="utc",
                     fmt="datetime"),
                fmt="seconds",
            )
            t_sec = t_sec.value
        else:
            sat_pos, sat_vel, t_sec = orbit.calculate_orbit(
                datetime(rundate.year, rundate.month, rundate.day),
                sat_name,
                sat_time_list,
                return_full_table=True)

        sat_pos_ip, sat_vel_ip = interpolation.interpolate_with_derivative(
            np.array(t_sec),
            sat_pos,
            sat_time_list,
            kind="interpolated_univariate_spline")
        dset.sat_pos.gcrs[:] = np.concatenate((sat_pos_ip, sat_vel_ip), axis=1)
        delay.calculate_delay("kinematic_models", dset)

        # We observe the time when an observation is done, and the time of flight of the laser pulse. We estimate
        # the up-leg time with Newton's method applied to the equation (8.84) of :cite:'beutler2005' Gerhard Beutler:
        # Methods of Celestial Mechanics, Vol I., 2005.
        for j in range(0, 4):
            reflect_time = dset.time + TimeDelta(
                dset.time_bias + dset.up_leg, fmt="seconds", scale="utc")
            site_pos_reflect_time = (rotation.trs2gcrs(reflect_time)
                                     @ dset.site_pos.trs.val[:, :, None])[:, :,
                                                                          0]
            sta_sat_vector = dset.sat_pos.gcrs.pos.val - site_pos_reflect_time
            unit_vector = sta_sat_vector / np.linalg.norm(sta_sat_vector,
                                                          axis=1)[:, None]

            rho12 = (np.linalg.norm(sta_sat_vector, axis=1) +
                     delay.add("kinematic_models", dset)) / constant.c
            correction = (-dset.up_leg + rho12) / (
                np.ones(dset.num_obs) - np.sum(
                    unit_vector / constant.c * dset.sat_pos.vel.val, axis=1))
            dset.up_leg[:] += correction
            sat_time_list = dset.obs_time + dset.time_bias + dset.up_leg
            sat_pos_ip, sat_vel_ip = interpolation.interpolate_with_derivative(
                np.array(t_sec),
                sat_pos,
                sat_time_list,
                kind="interpolated_univariate_spline")

            dset.sat_pos.gcrs[:] = np.concatenate((sat_pos_ip, sat_vel_ip),
                                                  axis=1)

        delay.calculate_delay("satellite_models", dset)
        dset.calc[:] = delay.add("satellite_models", dset)
        dset.residual[:] = dset.obs - dset.calc
        log.info(
            f"{dset.num_obs} observations, residual = {dset.rms('residual'):.4f}"
        )
        if not apriori_orbit_provider:
            orbit.update_orbit(sat_name, dset.site_pos.gcrs, dset.sat_pos.pos,
                               dset.sat_pos.vel, dset.residual, dset.bin_rms)

        dset.write_as(stage=stage, label=iter_num, sat_name=sat_name)
        if iter_num >= iterations:
            break
Exemplo n.º 18
0
def data_handling(dset):
    """Edits data based on SLR handling file

    Args:
        dset:     A Dataset containing model data.

    Returns:
        Array containing False for observations to throw away
    """
    handling = apriori.get("slr_handling_file", time=dset.time)

    for station in dset.unique("station"):
        # Estimate range bias E
        intervals = handling.get(station, {}).get("E", [])
        for interval, info in intervals:
            start_x, end_x = interval
            int_idx = dset.filter(station=station) & (
                dset.time.datetime >= start_x) & (dset.time.datetime <= end_x)

            if np.any(int_idx):
                log.info(
                    f"ILRS handling: Will estimate range bias for station {station} in interval {start_x}-{end_x} in estimate stage"
                )
                dset.estimate_range[:] = np.logical_or(int_idx,
                                                       dset.estimate_range)
        # Apply range bias R
        intervals = handling.get(station, {}).get("R", [])
        for interval, info in intervals:
            start_x, end_x = interval
            int_idx = dset.filter(station=station) & (
                dset.time.datetime >= start_x) & (dset.time.datetime <= end_x)
            if np.any(int_idx):
                log.info(
                    f"ILRS handling: Applying range bias for station {station} in interval {start_x}-{end_x}"
                )
                RB = info["e_value"]
                if info["unit"] == "mm":
                    dset.range_bias[:] += int_idx * RB * Unit.mm2m
                elif info["unit"] == "ms":
                    dset.range_bias[:] += int_idx * RB * Unit.millisec2seconds * constant.c
                else:
                    log.fatal(
                        "Unknown unit on ILRS Data handling file for range bias applied"
                    )
        # Estimate time bias U
        intervals = handling.get(station, {}).get("U", [])
        for interval, info in intervals:
            start_x, end_x = interval
            int_idx = dset.filter(station=station) & (
                dset.time.datetime >= start_x) & (dset.time.datetime <= end_x)
            if np.any(int_idx):
                log.info(
                    f"ILRS handling: Will estimate time bias for station {station} in interval {start_x}-{end_x} in estimate stage"
                )
                dset.estimate_time[:] = np.logical_or(int_idx,
                                                      dset.estimate_time)
        # Apply time bias T
        intervals = handling.get(station, {}).get("T", [])
        for interval, info in intervals:
            start_x, end_x = interval
            int_idx = dset.filter(station=station) & (
                dset.time.datetime >= start_x) & (dset.time.datetime <= end_x)
            if np.any(int_idx):
                log.info(
                    f"ILRS handling: Applying time bias for station {station} in interval {start_x}-{end_x}"
                )
                t_midInterval = Time(start_x + 1 / 2 * (end_x - start_x),
                                     fmt="datetime",
                                     scale="utc")
                TB = info["e_value"]
                drift = info["e_rate"]
                if info["unit"] == "us":
                    time_drifted = (dset.time.datetime -
                                    t_midInterval).jd * drift
                    dset.time_bias[:] += int_idx * (
                        -np.repeat(TB, dset.num_obs) -
                        time_drifted) * Unit.microsec2sec
                else:
                    log.fatal(
                        "Unknown unit on ILRS Data handling file for time bias applied"
                    )
        # Apply pressure bias P
        intervals = handling.get(station, {}).get("P", [])
        for interval, info in intervals:
            start_x, end_x = interval
            int_idx = dset.filter(station=station) & (
                dset.time.datetime >= start_x) & (dset.time.datetime <= end_x)
            if np.any(int_idx):
                log.info(
                    f"ILRS handling: Applying pressure bias for station {station} in interval {start_x}-{end_x}"
                )
                PB = info["e_value"]
                if info["unit"] == "mB":
                    dset.pressure[:] += int_idx * PB
                else:
                    log.fatal(
                        "Unknown unit on ILRS Data handling file for pressure bias applied"
                    )
        # Target signature bias C
        intervals = handling.get(station, {}).get("C", [])
        for interval, info in intervals:
            start_x, end_x = interval
            int_idx = dset.filter(station=station) & (
                dset.time.datetime >= start_x) & (dset.time.datetime <= end_x)
            if np.any(int_idx):
                log.fatal(
                    f"ILRS handling: TODO: Implement target signature bias!")