コード例 #1
0
ファイル: gnss_clean_orbit.py プロジェクト: yxw027/where
def _check_last_epoch_sample_point(dset, precise, epoch_interval):
    """Keep last observation epoch depending on existing precise orbit sample points

    Precise orbit sample points are needed to carry out interpolation of precise orbits for the last observation
    epochs. If no precise orbit sample point is available after the last satellite observation epochs, then this
    epochs will be removed for this satellite.

    The time difference between the last observation epochs and the next precise orbit sample point is determined. 'Last
    observation epoch' + 'sampling rate' is chosen as reference time for the selection of the nearest orbit sample
    point, which corresponds normally to 0:00 GPS time. If the time difference exceeds the following intervall, then the
    observation epochs are rejected:
                       -(precise orbit epoch interval + 1) < time difference < 0

    Args:
        dset (Dataset):            A Dataset containing model data.
        dset_idx (numpy.ndarray):  Array containing False for observations to throw away. The array is returned by
                                   function `ignore_unavailable_orbit_satellites()`, which deletes unavailable
                                   apriori orbit satellites.
        precise (PreciseOrbit):    Precise orbit object with precise orbit information.
        epoch_interval (float):    Epoch interval of precise orbit sample points

    Returns:
        tuple: Tuple with array containing False for last observations to throw away and indices indicating last
               observation epoch.
    """
    sampling_rate = config.tech.sampling_rate.float

    # Get indices for last observation epochs
    last_idx = -1
    last_epoch_idx = np.ones(dset.num_obs, dtype=bool)
    last_epoch_idx = (dset.time.gps.mjd >= dset.time.gps.mjd[last_idx] -
                      (epoch_interval - sampling_rate) * Unit.second2day)

    # Get set with satellite and time entries for getting corresponding precise orbit sample points
    # Note: Sample point reference time is 'last observation epoch' + 'sampling rate', which corresponds normally to
    #       0:00 GPS time.
    satellites = dset.satellite[last_epoch_idx]
    time = Time(val=dset.time.gps.datetime[last_idx],
                fmt="datetime",
                scale=dset.time.scale) + TimeDelta(
                    sampling_rate, fmt="seconds", scale=dset.time.scale)
    precise_idx = precise._get_nearest_sample_point(satellites, time)

    # Keep observations epochs, where a precise orbit sample point exists after the last observation epoch
    diff_time = (dset.time.gps.mjd[last_epoch_idx] -
                 precise.dset_edit.time.gps.mjd[precise_idx]) * Unit.day2second
    keep_idx = np.logical_and(diff_time > -(epoch_interval + 1), diff_time < 0)

    removed_entries = "DEBUG: ".join([
        f"{s} {t.strftime('  %Y-%m-%d %H:%M:%S (GPS)')}, dt = {dt:8.2f} s ({-(epoch_interval + 1)} < dt < 0)\n"
        for s, t, dt in zip(
            satellites[np.logical_not(keep_idx)],
            dset.time.gps.datetime[last_epoch_idx][np.logical_not(keep_idx)],
            diff_time[np.logical_not(keep_idx)],
        )
    ])
    log.debug(f"Following last epoch entries are removed: \n{removed_entries}")

    return keep_idx, last_epoch_idx
コード例 #2
0
ファイル: gnss_clean_orbit.py プロジェクト: uasau/where
def _check_first_epoch_sample_point(dset: "Dataset", precise, epoch_interval):
    """Keep first observation epoch depending on existing precise orbit sample points

    Precise orbit sample points are needed to carry out interpolation of precise orbits for the first observation
    epoch. If no precise orbit sample point is available before the first satellite observation epoch, then this
    epoch will be removed for this satellite.

    Args:
        dset (Dataset):            A Dataset containing model data.
        dset_idx (numpy.ndarray):  Array containing False for observations to throw away. The array is returned by
                                   function `ignore_unavailable_orbit_satellites()`, which deletes unavailable
                                   apriori orbit satellites.
        precise (PreciseOrbit):    Precise orbit object with precise orbit information.
        epoch_interval (float):    Epoch interval of precise orbit sample points

    Returns:
        tuple: Tuple with array containing False for first observations to throw away and indices indicating first
               observation epoch.
    """

    # Get indices for first observation epochs
    first_idx = 0
    first_epoch_idx = np.ones(dset.num_obs, dtype=bool)
    first_epoch_idx = dset.time.gps.mjd == dset.time.gps.mjd[first_idx]

    # Get set with satellite and time entries for getting corresponding precise orbit sample points
    satellites = dset.satellite[first_epoch_idx]
    time = Time(val=dset.time.gps.datetime[first_epoch_idx],
                fmt="datetime",
                scale=dset.time.scale) - TimeDelta(
                    epoch_interval, fmt="seconds", scale=dset.time.scale)
    precise_idx = precise._get_nearest_sample_point(satellites, time)

    # Keep observations epochs, where a precise orbit sample point exists before the first observation epoch
    diff_time = (dset.time.gps.mjd[first_epoch_idx] -
                 precise.dset_edit.time.gps.mjd[precise_idx]) * Unit.day2second
    keep_idx = np.logical_and(diff_time < (epoch_interval + 1), diff_time > 0)

    removed_entries = "DEBUG: ".join([
        f"{s} {t.strftime('  %Y-%m-%d %H:%M:%S (GPS)')}, dt = {dt:8.2f} s (0 < dt < {epoch_interval + 1})\n"
        for s, t, dt in zip(
            satellites[np.logical_not(keep_idx)],
            dset.time.gps.datetime[first_epoch_idx][np.logical_not(keep_idx)],
            diff_time[np.logical_not(keep_idx)],
        )
    ])
    log.info(
        f"Following first epoch entries are removed: \nDEBUG: {removed_entries}"
    )

    return keep_idx, first_epoch_idx
コード例 #3
0
ファイル: vlbi_clock_poly.py プロジェクト: yxw027/where
def parse_clock_breaks(dset, clock_breaks):
    """Parses the clock breaks string from the edit file

    Examples:
        > parse_clock_breaks(dset, '')
        (OrderedDict(), 0)
        > parse_clock_breaks(dset, 'SVETLOE 2015/01/23 05:36:00,
                                    SVETLOE 2015/01/23 16:53:00,
                                    SVETLOE 2015/01/23 12:30:00')
        (OrderedDict([(5, [106560.0, 131400.0, 147180.0])]), 3)

    Args:
        dset:                A Dataset containing model data.
        clock_breaks_str:    A string with clock break information

    Returns:
        OrderedDict with clock breaks and total number of clock breaks
     """
    # Parse clock breaks from file and store in the station_breaks dictionary
    station_breaks = {
        s: [min(dset.time.utc), max(dset.time.utc) + TimeDelta(1, fmt="seconds", scale="utc")]
        for s in dset.unique("station")
    }
    if clock_breaks:
        log.info(f"Applying clock breaks: {', '.join(clock_breaks)}")

    for cb in clock_breaks:
        # Station names may contain spaces
        cb = cb.split()
        cb_date = cb[-2:]
        cb_station = " ".join(cb[:-2])
        # cb_station, *cb_date = cb.split()
        cb_time = Time(" ".join(cb_date), scale="utc", fmt="iso")
        if cb_station not in station_breaks:
            log.warn(
                f"Station {cb_station} with clock break unknown. Available options are {', '.join(station_breaks)}"
            )
            continue
        station_breaks[cb_station].append(cb_time)
        dset.meta.add_event(cb_time, "clock_break", cb_station)

    # Convert the station_breaks dict to lists of (station, (time_start, time_end))-tuples
    stations = list()
    time_intervals = list()
    for station in sorted(station_breaks.keys(), key=lambda s: (len(station_breaks[s]), s), reverse=True):
        station_times = sorted(station_breaks[station])
        for t_start, t_end in zip(station_times[:-1], station_times[1:]):
            stations.append(station)
            time_intervals.append((t_start, t_end))

    return stations, time_intervals
コード例 #4
0
ファイル: slr_site_pos.py プロジェクト: yxw027/where
def site_pos(dset):
    """Calculate the partial derivative of the site position for each station

    Args:
        data:     A Dataset containing model data.

    Returns:
        Tuple:    Array of partial derivatives, list of their names, and their unit
    """
    # Remove stations that should be fixed
    stations = np.asarray(dset.unique("station"))
    fix_stations = config.tech[PARAMETER].fix_stations.list
    if fix_stations:
        log.info(f"Not estimating stations {fix_stations}")

    # Remove stations with less than 10 observations
    stations_few_obs = []
    for station in stations:
        number_of_observations = np.sum(dset.filter(station=station))
        if number_of_observations < 10:
            stations_few_obs.append(station)

    fix_stations += stations_few_obs
    if stations_few_obs:
        log.info(
            f"Not estimating stations {stations_few_obs}, less than 10 observations"
        )

    fix_idx = np.in1d(stations, fix_stations)
    if fix_idx.any():
        stations = stations[np.logical_not(fix_idx)]

    reflect_time = dset.time + TimeDelta(
        dset.time_bias + dset.up_leg, fmt="seconds", scale="utc")
    i2g = rotation.trs2gcrs(reflect_time)
    site_pos_reflect_time = (i2g @ dset.site_pos.val[:, :, None])[:, :, 0]

    unit_vector = dset.sat_pos.pos.val[:] - site_pos_reflect_time[:]
    unit_vector = unit_vector / np.linalg.norm(unit_vector)
    # Calculate partials
    all_partials = -unit_vector[:, None, :] @ i2g
    partials = np.zeros((dset.num_obs, len(stations) * 3))
    for idx, station in enumerate(stations):
        filter_1 = dset.filter(station=station)
        partials[filter_1, idx * 3:idx * 3 + 3] = all_partials[filter_1][:, 0]

    column_names = [s + "_" + xyz for s in stations for xyz in "xyz"]

    return partials, column_names, "dimensionless"
コード例 #5
0
ファイル: vlbi_clock_poly.py プロジェクト: mfkiwl/where
def parse_clock_breaks(dset):
    """Parses the clock breaks string from the edit file

    Args:
        dset:                A Dataset containing model data.
        clock_breaks_str:    A string with clock break information

    Returns:
        OrderedDict with clock breaks and total number of clock breaks
     """
    station_breaks = {
        s: [min(dset.time.utc), max(dset.time.utc) + TimeDelta(1, fmt="seconds", scale="utc")]
        for s in dset.unique("station")
    }
    
    clock_breaks = config.tech.get("clock_breaks", section=MODEL).as_list(split_re=", *")
    
    if clock_breaks:
        log.info(f"Applying clock breaks: {', '.join(clock_breaks)}")

    for cb in clock_breaks:
        # Station names may contain spaces
        cb = cb.split()
        cb_date = cb[-2:]
        cb_station = " ".join(cb[:-2])
        cb_time = Time(" ".join(cb_date), scale="utc", fmt="iso")
        if cb_station not in station_breaks:
            log.warn(
                f"Station {cb_station} with clock break unknown. Available options are {', '.join(station_breaks)}"
            )
            continue
        station_breaks[cb_station].append(cb_time)
        dset.meta.add_event(cb_time, "clock_break", cb_station)

    # Convert the station_breaks dict to lists of (station, (time_start, time_end))-tuples
    stations = list()
    time_intervals = list()
    for station in sorted(station_breaks.keys(), key=lambda s: (len(station_breaks[s]), s), reverse=True):
        station_times = sorted(station_breaks[station])
        for t_start, t_end in zip(station_times[:-1], station_times[1:]):
            stations.append(station)
            time_intervals.append((t_start, t_end))

    return stations, time_intervals
コード例 #6
0
def get_initial_flight_time(dset, sat_clock_corr=None, rel_clock_corr=None):
    r"""Get initial flight time of GNSS signal between satellite and receiver

    In the following it will be described, how the satellite transmission time is determined. The GNSS receiver
    registers the observation time, i.e. when the satellite signal is tracked by the receiver. In addition the
    pseudorange :math:`P_r^s` between the satellite and the receiver is observed by the GNSS receiver. The first guess
    of time of transmission :math:`t^s` can be determined if we subtract from the receiver time :math:`t_r` the time of
    flight of the GNSS signal based on the pseudorange as follows:

    .. math::
          t_0^s  = t_r - \frac{P_r^s}{c}

    with the speed of light :math:`c` and the flight time of the GNSS signal fromt the satellite to the receiver
    :math:`\frac{P_r^s}{c}`, which is determined in this function.

    The time of satellite transmission has to be corrected like:

    .. math::
        \Delta t^s = t_0^s - \Delta t_{sv} - \Delta t_r,

    with the satellite clock correction :math:`\Delta t_{sv}`:

    .. math::
         \Delta t_{sv} = a_0 + a_1 (t_0^s) + a_2 (t_0^s)^2,

    and the relativistic correction due to orbit eccentricity :math:`\Delta t_r`.

    The satellite clock correction and the relativistic eccentricity correction are applied, if this information is
    already available by the routine call.

    Args:
        dset (Dataset):                   Model data.
        sat_clock_corr (numpy.ndarray):   Satellite clock correction
        rel_clock_corr (numpy.ndarray):   Relativistic clock correction due to orbit eccentricity corrections for each
                                          observation
    Return:
       TimeDelta: Flight time of GNSS signal between satellite and receiver
    """
    # Note: It can be that the observation table 'obs' is not given. For example if different orbit solutions are
    #       compared, it is not necessary to read GNSS observation data. In this case the Dataset time entries
    #       are not corrected for time of flight determined based on pseudorange observations. Instead the given
    #       Dataset time entries are directly used.
    flight_time = np.zeros(dset.num_obs)
    if "obs" in dset.fields:
        for sys in dset.unique("system"):

            # Get code observation type defined by given observation and observation type priority list
            # Note: First element of GNSS observation type list should be used.
            obstype = dset.meta["obstypes"][sys][0]
            log.debug(
                f"Code observation '{obstype}' for GNSS '{sys}' is selected for determination of initial flight time."
            )

            idx = dset.filter(system=sys)
            flight_time[idx] = dset.obs[obstype][idx] / constant.c

    if sat_clock_corr is not None:
        flight_time += sat_clock_corr / constant.c

    if rel_clock_corr is not None:
        flight_time += rel_clock_corr / constant.c

    return TimeDelta(flight_time, fmt="seconds", scale="gps")
コード例 #7
0
def vlbi_grav_delay(dset):
    """Calculate the gravitational delay

    The implementation is described in IERS Conventions [1]_, section 11.1, in particular equation (11.9).

    Args:
        dset:     A Dataset containing model data.

    Returns:
        Numpy array: Gravitational delay in meters for each observation.
    """
    eph = apriori.get("ephemerides", time=dset.time)
    grav_delay = np.zeros(dset.num_obs)

    # List of celestial bodies. Major moons are also recommended, like Titan, Ganymedes, ...
    bodies = [
        "mercury barycenter",
        "venus barycenter",
        "earth",
        "moon",
        "mars barycenter",
        "jupiter barycenter",
        "saturn barycenter",
        "uranus barycenter",
        "neptune barycenter",
        "pluto barycenter",
        "sun",
    ]

    bcrs_vel_earth = eph.vel_bcrs("earth")

    baseline_gcrs = dset.site_pos_2.gcrs.pos - dset.site_pos_1.gcrs.pos
    src_dot_baseline = (dset.src_dir.unit_vector[:, None, :] @ baseline_gcrs.mat)[:, 0, 0]

    # Equation 11.6
    bcrs_site1 = eph.pos_bcrs("earth") + dset.site_pos_1.gcrs.pos.val
    bcrs_site2 = eph.pos_bcrs("earth") + dset.site_pos_2.gcrs.pos.val

    for body in bodies:
        try:
            GM_name = "GM" if body == "earth" else f"GM_{body.split()[0]}"
            GM_body = constant.get(GM_name, source=eph.ephemerides)
        except KeyError:
            log.warn(
                f"The GM value of {body.split()[0].title()} is not defined for {eph.ephemerides}. "
                f"Correction set to zero."
            )
            continue
        bcrs_body_t1 = eph.pos_bcrs(body)

        # Equation 11.3
        delta_t = TimeDelta(
            np.maximum(0, dset.src_dir.unit_vector[:, None, :] @ (bcrs_body_t1 - bcrs_site1)[:, :, None])[:, 0, 0]
            * Unit.second2day
            / constant.c,
            fmt="jd",
            scale="tdb",
        )
        time_1J = dset.time.tdb - delta_t

        # Equation 11.4
        bcrs_body_t1J = eph.pos_bcrs(body, time=time_1J)
        vector_body_site1 = bcrs_site1 - bcrs_body_t1J

        # Equation 11.5
        vector_body_site2 = bcrs_site2 - bcrs_body_t1J - bcrs_vel_earth / constant.c * src_dot_baseline[:, None]

        # Needed for equation 11.1
        norm_body_site1 = np.linalg.norm(vector_body_site1, axis=1)
        src_dot_vector_body_site1 = (dset.src_dir.unit_vector[:, None, :] @ vector_body_site1[:, :, None])[:, 0, 0]
        nomJ = norm_body_site1 + src_dot_vector_body_site1
        denomJ = (
            np.linalg.norm(vector_body_site2, axis=1)
            + (dset.src_dir.unit_vector[:, None, :] @ vector_body_site2[:, :, None])[:, 0, 0]
        )

        # Main correction (equation 11.1)
        grav_delay += 2 * GM_body / constant.c ** 2 * np.log(nomJ / denomJ)

        # Higher order correction  (equation 11.14)
        baseline_dot_vector_body_site1 = (baseline_gcrs.val[:, None, :] @ vector_body_site1[:, :, None])[:, 0, 0]
        grav_delay += (
            4
            * GM_body ** 2
            / constant.c ** 4
            * (baseline_dot_vector_body_site1 / norm_body_site1 + src_dot_baseline)
            / (norm_body_site1 + src_dot_vector_body_site1) ** 2
        )

    # Denominator (equation 11.9)
    denominator = (
        1
        + (
            (bcrs_vel_earth + dset.site_pos_2.gcrs.vel.val)[:, None, :]
            @ dset.src_dir.unit_vector[:, :, None]
            / constant.c
        )[:, 0, 0]
    )

    return grav_delay / denominator
コード例 #8
0
ファイル: slr.py プロジェクト: mfkiwl/where
def calculate(stage, dset):
    """
    Integrate differential equation of motion of the satellite

    Args:
        stage:  Name of current stage
        dset:   Dataset containing the data
    """

    iterations = config.tech.iterations.int

    # Run models adjusting station positions
    site.calculate_site("site", dset)
    delta_pos = site.add("site", dset)
    dset.site_pos[:] = (dset.site_pos.gcrs + delta_pos[0].gcrs).trs

    dset.add_float("obs",
                   val=dset.time_of_flight * constant.c / 2,
                   unit="meter")
    dset.add_float("calc", np.zeros(dset.num_obs), unit="meter")
    dset.add_float("residual", np.zeros(dset.num_obs), unit="meter")
    dset.add_float("up_leg", np.zeros(dset.num_obs), unit="second")
    dset.add_posvel("sat_pos",
                    np.zeros((dset.num_obs, 6)),
                    system="gcrs",
                    time=dset.time)
    arc_length = config.tech.arc_length.float

    dset.site_pos.other = dset.sat_pos

    # First guess for up_leg:
    dset.up_leg[:] = dset.time_of_flight / 2

    for iter_num in itertools.count(start=1):
        log.blank()
        log.info(f"Calculating model corrections for iteration {iter_num}")

        sat_time_list = dset.obs_time + dset.time_bias + dset.up_leg
        apriori_orbit_provider = config.tech.apriori_orbit.str
        sat_name = dset.vars["sat_name"]

        rundate = dset.analysis["rundate"]

        if apriori_orbit_provider:
            version = config.tech.apriori_orbit_version.str
            log.info(
                f"Using external orbits from {apriori_orbit_provider}, version {version}"
            )
            apriori_orbit = apriori.get(
                "orbit",
                rundate=rundate + timedelta(days=arc_length),
                time=None,
                day_offset=6,
                satellite=sat_name,
                apriori_orbit="slr",
                file_key="slr_external_orbits",
            )
            dset_external = apriori_orbit._read(dset, apriori_orbit_provider,
                                                version)

            sat_pos = dset_external.sat_pos.gcrs_pos
            t_sec = TimeDelta(
                dset_external.time -
                Time(datetime(rundate.year, rundate.month, rundate.day),
                     scale="utc",
                     fmt="datetime"),
                fmt="seconds",
            )
            t_sec = t_sec.value
        else:
            sat_pos, sat_vel, t_sec = orbit.calculate_orbit(
                datetime(rundate.year, rundate.month, rundate.day),
                sat_name,
                sat_time_list,
                return_full_table=True)

        sat_pos_ip, sat_vel_ip = interpolation.interpolate_with_derivative(
            np.array(t_sec),
            sat_pos,
            sat_time_list,
            kind="interpolated_univariate_spline")
        dset.sat_pos.gcrs[:] = np.concatenate((sat_pos_ip, sat_vel_ip), axis=1)
        delay.calculate_delay("kinematic_models", dset)

        # We observe the time when an observation is done, and the time of flight of the laser pulse. We estimate
        # the up-leg time with Newton's method applied to the equation (8.84) of :cite:'beutler2005' Gerhard Beutler:
        # Methods of Celestial Mechanics, Vol I., 2005.
        for j in range(0, 4):
            reflect_time = dset.time + TimeDelta(
                dset.time_bias + dset.up_leg, fmt="seconds", scale="utc")
            site_pos_reflect_time = (rotation.trs2gcrs(reflect_time)
                                     @ dset.site_pos.trs.val[:, :, None])[:, :,
                                                                          0]
            sta_sat_vector = dset.sat_pos.gcrs.pos.val - site_pos_reflect_time
            unit_vector = sta_sat_vector / np.linalg.norm(sta_sat_vector,
                                                          axis=1)[:, None]

            rho12 = (np.linalg.norm(sta_sat_vector, axis=1) +
                     delay.add("kinematic_models", dset)) / constant.c
            correction = (-dset.up_leg + rho12) / (
                np.ones(dset.num_obs) - np.sum(
                    unit_vector / constant.c * dset.sat_pos.vel.val, axis=1))
            dset.up_leg[:] += correction
            sat_time_list = dset.obs_time + dset.time_bias + dset.up_leg
            sat_pos_ip, sat_vel_ip = interpolation.interpolate_with_derivative(
                np.array(t_sec),
                sat_pos,
                sat_time_list,
                kind="interpolated_univariate_spline")

            dset.sat_pos.gcrs[:] = np.concatenate((sat_pos_ip, sat_vel_ip),
                                                  axis=1)

        delay.calculate_delay("satellite_models", dset)
        dset.calc[:] = delay.add("satellite_models", dset)
        dset.residual[:] = dset.obs - dset.calc
        log.info(
            f"{dset.num_obs} observations, residual = {dset.rms('residual'):.4f}"
        )
        if not apriori_orbit_provider:
            orbit.update_orbit(sat_name, dset.site_pos.gcrs, dset.sat_pos.pos,
                               dset.sat_pos.vel, dset.residual, dset.bin_rms)

        dset.write_as(stage=stage, label=iter_num, sat_name=sat_name)
        if iter_num >= iterations:
            break