Exemple #1
0
    def satellite_clock_correction(self):
        """Determine satellite clock correction based on precise satellite clock product

        The GNSS satellite clock bias is read from RINEX clock files. Afterwards the satellite clock bias is determined
        via a cubic interpolation for the observation time.

        TODO:
            * Beware of the extrapolation (bounds_error=False in interpolate).
            * Check for satellite clock interpolation in:
              "Hesselbarth, A.: Statische und kinematische GNSS-Auswertung mittels PPP, 2011"

        Returns:
            numpy.ndarray:    GNSS satellite clock corrections for each observation
        """
        correction = np.zeros(self.dset.num_obs)
        sat_transmission_time = self.dset.time.gps.gpssec

        # Get precise GNSS satellite clock values
        clock_product = config.tech.get("clock_product", default="clk").str
        if clock_product == "sp3":
            all_sat_clk = self.dset_edit
        elif clock_product == "clk":

            # TODO: File path information has to be improved, because 3 consecutive days are read.
            log.info(
                "Calculating satellite clock correction (precise) based on RINEX clock file {}.",
                files.path(file_key="gnss_rinex_clk"),
            )

            all_sat_clk = data.Dataset(rundate=self.dset.rundate,
                                       tech=None,
                                       stage=None,
                                       dataset_name="gnss_sat_clk",
                                       dataset_id=0,
                                       empty=True)
            parser = parsers.parse("rinex_clk", rundate=self.dset.rundate)
            parser.write_to_dataset(
                all_sat_clk
            )  # TODO Read RINEX clock file, from day before and day after.
            #     Needed for interpolation. Add handling if these clk-files
            #     are not available. Remove first and last observations?
            #     If precise clock products are not available broadcast
            #     ephemeris should be used.
        else:
            log.fatal(
                "Unknown clock product '{}'. Configuration option 'clock_product' can only be 'sp3' or 'clk'.",
                clock_product,
            )

        # Loop over all satellites given in configuration file
        for sat in self.dset.unique("satellite"):

            # Skip satellites, which are not given in RINEX clock file
            if sat not in all_sat_clk.unique("satellite"):
                # TODO: Maybe satellite is available in SP3 file, which includes also
                #      satellite clock bias, but only for every 15 min instead of
                #      every 5 min (or 30 s by use of igs<wwwwd>.clk_30s).
                continue

            idx = self.dset.filter(satellite=sat)
            clk_idx = all_sat_clk.filter(satellite=sat)

            # Interpolation of GNSS precise satellite clock values
            # TODO: Check if interpolation method is ok.
            sat_clock_bias_ip = interpolate.interp1d(
                all_sat_clk.time.gps.gpssec[clk_idx],
                all_sat_clk.sat_clock_bias[clk_idx],
                axis=0,
                kind="cubic",
                bounds_error=False,
                fill_value=all_sat_clk.sat_clock_bias[clk_idx][-1],
            )
            correction[idx] = sat_clock_bias_ip(sat_transmission_time[idx])

        return correction
Exemple #2
0
def main(date: "datedoy", tech: "pipeline", items: "option",
         specifier: "option"):
    log.init(log_level="info")
    dsets = dict()

    # Additional options
    stage = util.read_option_value("--stage")
    writer_names = util.read_option_value("--writers").replace(",",
                                                               " ").split()
    items_ = [s.strip() for s in items.split(",")]

    # Get optional options
    dataset_id = util.read_option_value("--dset_id", default="last")
    dataset_id = "last" if dataset_id == "last" else int(dataset_id)
    dataset_name = util.read_option_value("--dset_name", default="")
    session = util.read_option_value("--session", default="")
    id_ = "-" + util.read_option_value(
        "--id", default="") if util.read_option_value("--id",
                                                      default="") else ""

    # Read datasets for given specifier
    if specifier == "id":
        for id_ in items_:
            dset = data.Dataset(rundate=date,
                                tech=tech,
                                stage=stage,
                                dataset_name=dataset_name,
                                dataset_id=dataset_id,
                                id="-" + id_)
            if dset.num_obs == 0:
                log.warn(f"Dataset '{id_}' is empty.")
                continue
            dsets.update({id_: dset})

    elif specifier == "session":
        for session in items_:
            dset = data.Dataset(rundate=date,
                                tech=tech,
                                stage=stage,
                                dataset_name=session,
                                dataset_id=dataset_id,
                                id=id_)
            if dset.num_obs == 0:
                log.warn(f"Dataset '{session}' is empty.")
                continue
            dsets.update({session: dset})

    elif specifier == "stage":
        for stage in items_:
            dset = data.Dataset(rundate=date,
                                tech=tech,
                                stage=stage,
                                dataset_name=dataset_name,
                                dataset_id=dataset_id,
                                id=id_)
            if dset.num_obs == 0:
                log.warn(f"Dataset '{stage}' is empty.")
                continue
            dsets.update({stage: dset})
    else:
        log.fatal(
            f"Specifier {specifier} is not defined. It should be either 'id', 'session' or 'stage'."
        )

    if len(dsets) == 0:
        log.fatal(f"All given datasets are empty [{', '.join(dsets.keys())}].")
    elif len(dsets) == 1:
        log.warn(
            f"Nothing to compare. Only dataset '{list(dsets.keys())[0]}' is available."
        )

    # Loop over writers
    for writer in writer_names:
        write(writer, dset=dsets)
Exemple #3
0
# Definition of dummy date
year = 2002
month = 1
day = 1
hour = 0
minute = 0
second = 0
rundate = datetime(year, month, day, hour, minute, second)
file_vars = config.date_vars(rundate)

# Define 15 min dataset
file_path = files.path(file_key="test_gnss_orbit_interpolation_15min",
                       file_vars=file_vars)
orb_15min = data.Dataset(rundate,
                         tech=None,
                         stage=None,
                         dataset_name="gnss_precise_orbit_15min",
                         dataset_id=0,
                         empty=True)
parser = parsers.parse(parser_name="orbit_sp3c",
                       file_path=file_path,
                       rundate=rundate)
parser.write_to_dataset(orb_15min)

# Define 5 min control dataset
file_path = files.path(file_key="test_gnss_orbit_interpolation_5min",
                       file_vars=file_vars)
orb_5min = data.Dataset(rundate,
                        tech=None,
                        stage=None,
                        dataset_name="gnss_precise_orbit_15min",
                        dataset_id=0,
Exemple #4
0
def add_to_full_timeseries(dset):
    """Write some key variables to the full timeseries

    Args:
        dset:  Dataset, data for a model run.
    """
    dset_id = int(config.tech.timeseries.dataset_id.str.format(**dset.vars))
    dset_name = config.tech.timeseries.dataset_name.str.format(**dset.vars)
    dset_ts = data.Dataset(date(1970, 1, 1),
                           dset.vars["tech"],
                           "timeseries",
                           dset_name,
                           dset_id,
                           session="")
    dset_session = data.Dataset.anonymous()

    # Add data to dset_session
    idx_fields = config.tech[WRITER].index.list
    field_values = [["all"] + dset.unique(f) for f in idx_fields]
    idx_values = dict(zip(idx_fields, zip(*itertools.product(*field_values))))
    # TODO: Remove combinations where filter leaves 0 observations

    num_obs = len(idx_values[
        idx_fields[0]])  # Length of any (in this case the first) field
    mean_epoch = dset.time.mean.utc
    rundate_str = dset.rundate.strftime(config.FMT_date)
    session = dset.dataset_name
    status = dset.meta.get("analysis_status", "unchecked")
    session_type = dset.meta.get("input", dict()).get("session_type", "")

    dset_session.num_obs = num_obs
    dset_session.add_time("time", val=[mean_epoch] * num_obs, scale="utc")
    dset_session.add_time("timestamp",
                          val=[datetime.now()] * num_obs,
                          scale="utc")
    dset_session.add_text("rundate", val=[rundate_str] * num_obs)
    dset_session.add_text("session", val=[session] * num_obs)
    dset_session.add_text("status", val=[status] * num_obs)
    dset_session.add_text("session_type", val=[session_type] * num_obs)

    for field, value in idx_values.items():
        dset_session.add_text(field, val=value)

    default_dset_str = f"{dset.vars['stage']}/{int(dset.dataset_id)}"
    dsets = {default_dset_str: dset}
    for method, cfg_entry in config.tech[WRITER].items():
        try:
            method_func = getattr(sys.modules[__name__],
                                  "method_{}".format(method))
        except AttributeError:
            log.warn("Method '{}' is unknown", method)
            continue

        for field_cfg in cfg_entry.as_list(split_re=", *"):
            field_out = re.sub("[ -/:]", "_", field_cfg)
            func, _, field_dset = field_cfg.rpartition(":")
            field_in, _, dset_str = field_dset.partition("-")
            func = func if func else field_in
            dset_str = dset_str if dset_str else default_dset_str
            if dset_str not in dsets:
                stage, _, dset_id = dset_str.partition("/")
                dset_id = int(dset_id) if dset_id else "last"
                dsets[dset_str] = data.Dataset(
                    dset.rundate,
                    tech=dset.vars["tech"],
                    stage=stage,
                    dataset_name=dset.dataset_name,
                    dataset_id=dset_id,
                )

            val, adder, unit = method_func(dsets[dset_str], field_in,
                                           idx_values, func)
            if adder:
                add_func = getattr(dset_session, adder)
                if val.ndim > 1:
                    add_func(field_out,
                             val=val,
                             shape=val.shape[1:],
                             unit=unit)
                else:
                    add_func(field_out, val=val, unit=unit)

    # hack to get solved neq data into the time series:
    # TODO: unhack this :P Add as a method_neq instead?
    if "normal equation" in dset.meta:
        _add_solved_neq_fields(dset, dset_session, idx_values)

    # Filter timeseries dataset to remove any previous data for this rundate and session
    keep_idx = np.logical_not(
        dset_ts.filter(rundate=rundate_str, session=session))
    dset_ts.subset(keep_idx)

    # Extend timeseries dataset with dset_session and write to disk
    if dset_ts.num_obs:
        dset_ts.extend(dset_session)
    else:
        dset_ts.copy_from(dset_session)

    log.info("Updating timeseries dataset '{}'", dset_ts.description)
    dset_ts.write()
Exemple #5
0
    def setUp(self):

        # Definition of dummy date
        year = 2016
        month = 3
        day = 1
        hour = 0
        minute = 0
        second = 0

        rundate = datetime(year, month, day, hour, minute, second)
        time = [(
            "{year}-{month:02d}-{day:02d}T{hour:02d}:{minute:02d}:{second:010.7f}"
            "".format(year=year,
                      month=month,
                      day=day,
                      hour=hour,
                      minute=minute,
                      second=second))]

        # Define 1st Dataset
        # ------------------
        self.dset = data.Dataset(rundate,
                                 tech=None,
                                 stage=None,
                                 dataset_name="test_where",
                                 dataset_id=0,
                                 empty=True)

        # Add 'sat_posvel' field to Dataset
        itrs_pos = np.array([10000000.0, 40000000.0, -5000000.0])  # [m]
        itrs_vel = np.array([-1500.0, 1000.0, -100.0])  # [m/s]
        itrs = np.hstack((itrs_pos, itrs_vel)).reshape(1, 6)
        self.dset.num_obs = 1
        self.dset.add_time("time", val=time, scale="gps")
        self.dset.add_posvel("sat_posvel", time="time", itrs=itrs)

        # Add 'kepler' array
        self.kepler = np.array([[
            25015181.018465,  # a [m]
            0.707977170873199,  # e
            0.121662175957290,  # i [rad]
            3.024483909022929,  # Omega [rad]
            1.597899323919624,  # omega [rad]
            2.772570719534964,
        ]])  # E [rad]

        # Define 2nd Dataset
        # ------------------
        self.dset2 = data.Dataset(rundate,
                                  tech=None,
                                  stage=None,
                                  dataset_name="test_where2",
                                  dataset_id=0,
                                  empty=True)

        # Add 'sat_posvel' field to Dataset
        itrs_pos = np.array(
            [-5499233.574326, -14614718.575397, 21564674.490672])  # [m]
        itrs_vel = np.array([3228.525601, -2037.522778, -599.319641])  # [m/s]
        itrs = np.hstack((itrs_pos, itrs_vel)).reshape(1, 6)
        self.dset2.num_obs = 1
        self.dset2.add_time("time", val=time, scale="gps")
        self.dset2.add_posvel("sat_posvel", time="time", itrs=itrs)

        # Define 3rd Dataset
        # ------------------
        self.dset3 = data.Dataset(rundate,
                                  tech=None,
                                  stage=None,
                                  dataset_name="test_where3",
                                  dataset_id=0,
                                  empty=True)

        # Add 'sat_posvel' field to Dataset
        itrs_pos = np.array(
            [-5499488.571916, -14614558.207892, 21564722.457044])  # [m]
        itrs_vel = np.array([0, 0, 0])  # [m/s]
        itrs = np.hstack((itrs_pos, itrs_vel)).reshape(1, 6)
        self.dset3.num_obs = 1
        self.dset3.add_time("time", val=time, scale="tdb")
        self.dset3.add_posvel("sat_posvel", time="time", itrs=itrs)
Exemple #6
0
    def _read(self, dset_raw):
        """Read RINEX navigation file data and save it in a Dataset

        One RINEX navigation file is normally written for each GNSS. The navigation file extension depends on the GNSS
        (GPS: *.n, Galileo: *.l, GLONASS: *.g, ...). Therefore we have defined for each GNSS the navigation file
        name in the Where configuration file `files.conf`. In addition mixed navigation files exists, which includes
        navigation messages of different GNSS. We use following file keys in `files.conf`:
            =========  ==================
             System     File key
            =========  ==================
             Galileo    gnss_rinex_nav_E
             GLONASS    gnss_rinex_nav_R
             GPS        gnss_rinex_nav_G
             Mixed      gnss_rinex_nav_M
            =========  ==================

        Depending on the configuration options `systems` and `use_mixed_brdc_file` following navigation files are read:

         ======================  ==================  =======================================
          Option                  File key            What kind of navigation file is read?
         ======================  ==================  =======================================
          systems = G             gnss_rinex_nav_G    Only the GPS navigation file
          systems = G E           gnss_rinex_nav_G    GPS and Galileo navigation files
                                  gnss_rinex_nav_E
          use_mixed_brdc_file     gnss_rinex_nav_M    Mixed GNSS navigation file
         ======================  ==================  =======================================

        Args:
            dset_raw (Dataset):     Dataset representing raw data from RINEX navigation file
        """
        use_mixed_brdc_file = config.tech.get("use_mixed_brdc_file",
                                              default=False).bool
        systems = {"M"} if use_mixed_brdc_file == True else set(self.system)
        file_paths = list()

        for sys in systems:

            # Generate temporary Dataset with orbit file data
            dset_temp = data.Dataset(dset_raw.rundate,
                                     dset_raw.vars["tech"],
                                     "temporary",
                                     "",
                                     0,
                                     empty=True)
            parser = parsers.parse(
                file_key=self.file_key.format(system=sys),
                rundate=dset_raw.rundate,
                file_vars=dict(dset_raw.vars,
                               file_key=self.file_key.format(system=sys)),
            )
            parser.write_to_dataset(dset_temp)
            file_paths.append(str(parser.file_path))

            # Extend Dataset dset_raw with temporary Dataset
            if dset_raw.num_obs:

                # Merge meta data information
                # TODO: Handle meta data information correctly. Meta data information based on different GNSS navigation
                #       message files has to be merged correctly together. What to do with 'sat_sys' and 'leap_seconds'?
                for date in dset_temp.meta.keys():
                    for key in ["iono_para", "time_sys_corr"]:
                        dset_temp.meta[date][key].update(
                            dset_raw.meta[date][key])

                dset_raw.extend(dset_temp)
            else:
                dset_raw.copy_from(dset_temp)
            dset_raw.add_to_meta("parser", "file_path", file_paths)