예제 #1
0
def calculate(stage, dset):

    # CALCULATE
    # -----------
    # Correction of station position in GCRS due to loading and tide effects
    site.calculate_site("site", dset)
    delta_pos = site.add("site", dset)
    dset.site_pos[:] = (dset.site_pos.gcrs + delta_pos[0].gcrs).trs

    # Initialize models given in configuration file by adding model fields to Dataset
    delay.calculate_delay("delay", dset)
    delta_delay = delay.add("delay", dset)

    if "observed" in dset.fields:
        dset.observed[:] = gnss.get_code_observation(dset)
    else:
        dset.add_float("observed",
                       val=gnss.get_code_observation(dset),
                       unit="meter")

    # Get model corrections
    if "calc" in dset.fields:
        dset.calc[:] = delta_delay
    else:
        dset.add_float("calc",
                       val=delta_delay,
                       unit="meter",
                       write_level="operational")

    if "residual" in dset.fields:
        dset.residual[:] = dset.observed - dset.calc
    else:
        dset.add_float("residual", val=dset.observed - dset.calc, unit="meter")

    # Store calculate results
    log.info(
        f"{dset.num_obs} observations, residual = {dset.rms('residual'):.4f}")
    dset.write_as(stage="calculate", dataset_id=0)
예제 #2
0
파일: vlbi.py 프로젝트: uasau/where
def calculate(stage, dset):
    """Estimate model parameters

    Args:
        rundate (Datetime):  The model run date.
        session (String):    Name of session.
        prev_stage (String): Name of previous stage.
        stage (String):      Name of current stage.
    """
    # Run models adjusting station positions
    log.info(f"Calculating station displacements")
    site.calculate_site("site", dset)
    delta_pos = site.add("site", dset)

    dset.site_pos_1[:] = (dset.site_pos_1.gcrs + delta_pos[0].gcrs).trs
    dset.site_pos_2[:] = (dset.site_pos_2.gcrs + delta_pos[1].gcrs).trs
    log.blank()

    # Run models for each term of the observation equation
    log.info(f"Calculating theoretical delays")
    delay.calculate_delay("delay", dset)

    delta_delay = delay.add("delay", dset)
    dset.add_float("obs",
                   val=dset.observed_delay,
                   unit="meter",
                   write_level="operational")
    dset.add_float("calc",
                   val=delta_delay,
                   unit="meter",
                   write_level="operational")
    dset.add_float("residual",
                   val=dset.obs - dset.calc,
                   unit="meter",
                   write_level="operational")
    log.blank()

    # Estimate clock polynomial
    log.info(f"Calculating clock polynomials")
    max_iterations = config.tech.calculate_max_iterations.int
    outlier_limit = config.tech.calculate_outlier_limit.float
    store_outliers = config.tech.store_outliers.bool

    for iter_num in itertools.count(start=1):
        delay.calculate_delay("delay_corr", dset, dset)
        delta_correction = delay.add("delay_corr", dset)

        dset.calc[:] = dset.calc + delta_correction
        dset.residual[:] = dset.obs - dset.calc
        rms = dset.rms("residual")
        log.info(f"{dset.num_obs} observations, residual = {rms:.4f}")

        # Store results
        dset.write_as(stage=stage, label=iter_num - 1)

        # Detect and remove extreme outliers
        idx = np.abs(dset.residual) < outlier_limit * rms
        if iter_num > max_iterations or idx.all():
            break

        if store_outliers:
            bad_idx = np.logical_not(idx)
            log.info(
                f"Adding {np.sum(bad_idx)} observations to ignore_observation")
            bad_obs = np.char.add(np.char.add(dset.time.utc.iso[bad_idx], " "),
                                  dset.baseline[bad_idx]).tolist()
            with config.update_tech_config(
                    dset.analysis["rundate"],
                    pipeline,
                    session=dset.vars["session"]) as cfg:
                current = cfg.ignore_observation.observations.as_list(", *")
                updated = ", ".join(sorted(current + bad_obs))
                cfg.update("ignore_observation",
                           "observations",
                           updated,
                           source=util.get_program_name())

        dset.subset(idx)
        log.info(
            f"Removing {sum(~idx)} observations with residuals bigger than {outlier_limit * rms}"
        )
        log.blank()

    # Try to detect clock breaks
    if config.tech.detect_clockbreaks.bool:
        writers.write_one("vlbi_detect_clockbreaks", dset=dset)
        dset.write()
예제 #3
0
파일: slr.py 프로젝트: mfkiwl/where
def calculate(stage, dset):
    """
    Integrate differential equation of motion of the satellite

    Args:
        stage:  Name of current stage
        dset:   Dataset containing the data
    """

    iterations = config.tech.iterations.int

    # Run models adjusting station positions
    site.calculate_site("site", dset)
    delta_pos = site.add("site", dset)
    dset.site_pos[:] = (dset.site_pos.gcrs + delta_pos[0].gcrs).trs

    dset.add_float("obs",
                   val=dset.time_of_flight * constant.c / 2,
                   unit="meter")
    dset.add_float("calc", np.zeros(dset.num_obs), unit="meter")
    dset.add_float("residual", np.zeros(dset.num_obs), unit="meter")
    dset.add_float("up_leg", np.zeros(dset.num_obs), unit="second")
    dset.add_posvel("sat_pos",
                    np.zeros((dset.num_obs, 6)),
                    system="gcrs",
                    time=dset.time)
    arc_length = config.tech.arc_length.float

    dset.site_pos.other = dset.sat_pos

    # First guess for up_leg:
    dset.up_leg[:] = dset.time_of_flight / 2

    for iter_num in itertools.count(start=1):
        log.blank()
        log.info(f"Calculating model corrections for iteration {iter_num}")

        sat_time_list = dset.obs_time + dset.time_bias + dset.up_leg
        apriori_orbit_provider = config.tech.apriori_orbit.str
        sat_name = dset.vars["sat_name"]

        rundate = dset.analysis["rundate"]

        if apriori_orbit_provider:
            version = config.tech.apriori_orbit_version.str
            log.info(
                f"Using external orbits from {apriori_orbit_provider}, version {version}"
            )
            apriori_orbit = apriori.get(
                "orbit",
                rundate=rundate + timedelta(days=arc_length),
                time=None,
                day_offset=6,
                satellite=sat_name,
                apriori_orbit="slr",
                file_key="slr_external_orbits",
            )
            dset_external = apriori_orbit._read(dset, apriori_orbit_provider,
                                                version)

            sat_pos = dset_external.sat_pos.gcrs_pos
            t_sec = TimeDelta(
                dset_external.time -
                Time(datetime(rundate.year, rundate.month, rundate.day),
                     scale="utc",
                     fmt="datetime"),
                fmt="seconds",
            )
            t_sec = t_sec.value
        else:
            sat_pos, sat_vel, t_sec = orbit.calculate_orbit(
                datetime(rundate.year, rundate.month, rundate.day),
                sat_name,
                sat_time_list,
                return_full_table=True)

        sat_pos_ip, sat_vel_ip = interpolation.interpolate_with_derivative(
            np.array(t_sec),
            sat_pos,
            sat_time_list,
            kind="interpolated_univariate_spline")
        dset.sat_pos.gcrs[:] = np.concatenate((sat_pos_ip, sat_vel_ip), axis=1)
        delay.calculate_delay("kinematic_models", dset)

        # We observe the time when an observation is done, and the time of flight of the laser pulse. We estimate
        # the up-leg time with Newton's method applied to the equation (8.84) of :cite:'beutler2005' Gerhard Beutler:
        # Methods of Celestial Mechanics, Vol I., 2005.
        for j in range(0, 4):
            reflect_time = dset.time + TimeDelta(
                dset.time_bias + dset.up_leg, fmt="seconds", scale="utc")
            site_pos_reflect_time = (rotation.trs2gcrs(reflect_time)
                                     @ dset.site_pos.trs.val[:, :, None])[:, :,
                                                                          0]
            sta_sat_vector = dset.sat_pos.gcrs.pos.val - site_pos_reflect_time
            unit_vector = sta_sat_vector / np.linalg.norm(sta_sat_vector,
                                                          axis=1)[:, None]

            rho12 = (np.linalg.norm(sta_sat_vector, axis=1) +
                     delay.add("kinematic_models", dset)) / constant.c
            correction = (-dset.up_leg + rho12) / (
                np.ones(dset.num_obs) - np.sum(
                    unit_vector / constant.c * dset.sat_pos.vel.val, axis=1))
            dset.up_leg[:] += correction
            sat_time_list = dset.obs_time + dset.time_bias + dset.up_leg
            sat_pos_ip, sat_vel_ip = interpolation.interpolate_with_derivative(
                np.array(t_sec),
                sat_pos,
                sat_time_list,
                kind="interpolated_univariate_spline")

            dset.sat_pos.gcrs[:] = np.concatenate((sat_pos_ip, sat_vel_ip),
                                                  axis=1)

        delay.calculate_delay("satellite_models", dset)
        dset.calc[:] = delay.add("satellite_models", dset)
        dset.residual[:] = dset.obs - dset.calc
        log.info(
            f"{dset.num_obs} observations, residual = {dset.rms('residual'):.4f}"
        )
        if not apriori_orbit_provider:
            orbit.update_orbit(sat_name, dset.site_pos.gcrs, dset.sat_pos.pos,
                               dset.sat_pos.vel, dset.residual, dset.bin_rms)

        dset.write_as(stage=stage, label=iter_num, sat_name=sat_name)
        if iter_num >= iterations:
            break
예제 #4
0
def calculate_estimate(stage, dset):
    """Calculate model parameters and estimate

    Args:
        stage (str):          Name of current stage.
        dset (Dataset):       A dataset containing the data.
    """
    max_iterations = config.tech.max_iterations.int

    for iter_num in itertools.count(start=1):

        # CALCULATE
        # -----------
        # Correction of station position in GCRS due to loading and tide effects
        site.calculate_site("site", dset, shape=(3, ))
        delta_pos = np.sum(dset.get_table("site").reshape(
            (dset.num_obs, -1, 3)),
                           axis=1)
        dset.site_pos.add_to_gcrs(delta_pos)

        # Initialize models given in configuration file by adding model fields to Dataset
        delay.calculate_delay("calc_models",
                              dset,
                              write_levels=dict(gnss_range="operational"))
        if "obs" in dset.fields:
            dset.obs[:] = gnss.get_code_observation(dset)
        else:
            dset.add_float("obs",
                           val=gnss.get_code_observation(dset),
                           unit="meter")

        # Get model corrections
        if "calc" in dset.fields:
            dset.calc[:] = np.sum(dset.get_table("calc_models"), axis=1)
        else:
            dset.add_float("calc",
                           val=np.sum(dset.get_table("calc_models"), axis=1),
                           unit="meter")

        if "residual" in dset.fields:
            dset.residual[:] = dset.obs - dset.calc
        else:
            dset.add_float("residual", val=dset.obs - dset.calc, unit="meter")

        # Store calculate results
        log.info(
            f"{dset.num_obs} observations, residual = {dset.rms('residual'):.4f}"
        )
        dset.write_as(stage="calculate", dataset_id=iter_num)
        dset.read()  # TODO: workaround because caching does not work correctly

        # ESTIMATE
        # ----------
        partial_vectors = estimation.partial_vectors(dset, "estimate_method")

        log.blank()  # Space between iterations for clarity
        log.info(f"Estimating parameters for iteration {iter_num}")
        estimation.call("estimate_method",
                        dset=dset,
                        partial_vectors=partial_vectors,
                        obs_noise=np.ones(dset.num_obs))
        rms = dset.rms("residual")
        log.info(f"{dset.num_obs} observations, postfit residual = {rms:.4f}")

        dset.write_as(stage="estimate", dataset_id=iter_num - 1)
        dset.read()  # TODO: workaround because caching does not work correctly

        # Detect and remove outliers based on residuals
        keep_idx = estimation.detect_outliers("estimate_outlier_detection",
                                              dset)

        if dset.meta["estimate_convergence_status"] and keep_idx.all():
            log.info(
                f"Estimation convergence limit of {config.tech.convergence_limit.float:.3e} is fulfilled."
            )
            break
        if iter_num >= max_iterations:
            break

        dset.subset(keep_idx)
        log.blank()