예제 #1
0
def delta_ut1_utc(time: "TimeArray", models=None) -> "np_float":

    if time.scale == "ut1":
        # pretend that ut1 is utc to get an approximate delta value
        eop = apriori.get("eop",
                          time=Time(time.mjd, fmt="mjd", scale="utc"),
                          models=models)
        tmp_utc_mjd = time.mjd - eop.ut1_utc * Unit.second2day

        # get a better delta value with a new computed utc value
        eop = apriori.get("eop",
                          time=Time(tmp_utc_mjd, fmt="mjd", scale="utc"),
                          models=models)
        return -eop.ut1_utc * Unit.second2day
    else:
        # time scale is utc
        eop = apriori.get("eop", time=time, models=models)
        return eop.ut1_utc * Unit.second2day
예제 #2
0
파일: dataset.py 프로젝트: uasau/midgard
    def get_events(self, *event_types: List[str]):
        """Get events from the metaset"""
        all_events = self.get("__events__", dict())
        if not event_types:
            event_types = all_events.keys()

        return sorted(
            set((Time(ts, scale="utc", fmt="isot"), k, desc)
                for k, v in all_events.items() for (ts, desc) in v
                if k in event_types))
예제 #3
0
    def ref_epoch(self) -> "UtcTime":
        """ Get reference epoch of site coordinate from SINEX file

        Returns:
            Reference epoch of site coordinate
        """
        return Time(
            val=self._info["ref_epoch"],
            scale="utc",
            fmt="datetime",
        )
예제 #4
0
    def _determine_pseudorange(self) -> None:
        """Determine pseudorange based on ION 2016 tutorial "Raw GNSS Measurements from Android Phones".
        """

        # Determine GPS week
        week = np.floor(-np.array(self.data["FullBiasNanos"]) *
                        Unit.nanosecond2second / 604800)

        # GNSS signal arriving time at measurement time (GPS time) referenced to GPS week
        tRxNanos = ((np.array(self.data["TimeNanos"], dtype=float) +
                     np.array(self.data["TimeOffsetNanos"], dtype=float)) -
                    (np.array(self.data["FullBiasNanos"], dtype=float) +
                     np.array(self.data["BiasNanos"], dtype=float)) -
                    (week * 604800e9))

        if np.all(tRxNanos >= 604800e9):
            log.fatal("tRxNanos should be <= GPS nanoseconds.")
        if np.all(tRxNanos <= 0.0):
            log.fatal("tRxNanos should be >= 0.")

        self.data["week"] = week
        self.data["tRxNanos"] = tRxNanos
        self.data["time"] = Time(val=week,
                                 val2=tRxNanos * Unit.nanosecond2second,
                                 fmt="gps_ws",
                                 scale="gps")

        # GNSS satellite transmission time at measurement time (GPS time) referenced to GPS week
        tTxNanos = np.array(self.data["ReceivedSvTimeNanos"], dtype=float)

        self.data["sat_time"] = Time(val=week,
                                     val2=tTxNanos * Unit.nanosecond2second,
                                     fmt="gps_ws",
                                     scale="gps")
        # TODO: Check GPS week rollover (see ProcessGnssMeas.m)

        self.data["pseudorange"] = (
            tRxNanos -
            tTxNanos) * Unit.nanosecond2second * constant.c  # in meters
예제 #5
0
    def as_dataset(self) -> "Dataset":
        """Return the parsed data as a Dataset

        Returns:
            Midgard Dataset where station coordinates and belonging information are stored with following fields:

       |  Field                   | Type           | Description                                                      |
       |--------------------------|----------------|------------------------------------------------------------------|
       | domes                    | numpy.ndarray  | Domes number                                                     |
       | flag                     | numpy.ndarray  | Station flag (see section 24.7.1 of Bernese GNSS software        |
       |                          |                | version 5.2, November 2015)                                      |
       | station                  | numpy.ndarray  | Station name                                                     |
       | site_pos                 | PositionTable  | Station coordinates given as PositionTable object                |

            and following Dataset `meta` data:

       |  Entry              | Type  | Description                                                                    |
       |---------------------|-------|--------------------------------------------------------------------------------|
       | \__data_path__      | str   | File path                                                                      |
        """

        # Generate dataset
        dset = dataset.Dataset(num_obs=len(self.data["station"]))
        dset.meta = self.meta.copy()

        # Remove unnecessary fields in meta
        for key in ["__params__", "__parser_name__"]:
            del dset.meta[key]

        # Add fields to dataset
        for field in ["domes", "flag", "station"]:

            if field == "station":
                dset.add_text(field, val=[v.lower() for v in self.data[field]])
            else:
                dset.add_text(field, val=self.data[field])

        dset.add_position(
            "site_pos",
            time=Time([dset.meta["epoch"] for ii in range(0, dset.num_obs)],
                      scale="gps",
                      fmt="datetime"),
            system="trs",
            val=np.stack(
                (np.array(self.data["pos_x"]), np.array(
                    self.data["pos_y"]), np.array(self.data["pos_z"])),
                axis=1),
        )

        return dset
예제 #6
0
 def _append_empty(self, num_obs, memo):
     # Use datetime.min as "empty" value
     empty = Time([datetime.min] * num_obs, scale="utc", fmt="datetime")
     empty_id = id(empty)
     self.data = TimeArray.insert(self.data, self.num_obs, empty, memo)
     memo.pop(empty_id, None)
예제 #7
0
    def as_dataset(self) -> "Dataset":
        """Store Gipsy time dependent parameter data in a dataset

        Returns:
            Midgard Dataset where time dependent parameter data are stored with following fields:


       | Field               | Type              | Description                                                        |
       |---------------------|-------------------|--------------------------------------------------------------------|
       | receiver_clock      | numpy.ndarray     | Receiver clock parameter                                           |
       | satellite           | numpy.ndarray     | Satellite PRN number together with GNSS identifier (e.g. G07)      |
       | satellite_clock     | numpy.ndarray     | Satellite clock parameter                                          |
       | satellite_ant_pco   | PositionTable     | Satellite antenna phase center offset                              |
       | site_posvel         | PosVel            | Station coordinates and velocities                                 |
       | source_id           | numpy.ndarray     | Source ID                                                          |
       | station             | numpy.ndarray     | Station name list                                                  |
       | system              | numpy.ndarray     | GNSS identifier (e.g. G or E)                                      |
       | time                | Time              | Parameter time given as TimeTable object                           |
       | troposphere_zhd     | numpy.ndarray     | Zenith hydrostatic troposphere delay parameter                     |
       | troposphere_zwd     | numpy.ndarray     | Zenith hydrostatic troposphere delay parameter                     |
       | troposphere_ge      | numpy.ndarray     | Horizontal delay gradient in the East direction                    |
       | troposphere_gn      | numpy.ndarray     | Horizontal delay gradient in the North direction                   |
       
       The fields above are given for 'apriori', 'value' and 'sigma' Dataset collections.
        
        """
        # TODO: Handling of unit. Should be added to dataset fields.

        field = {
            "Clk Bias":
            DatasetField(
                None, None,
                "float"),  # can be either receiver or satellite clock bias
            "Antennas Antenna1 MapCenterOffset All Z":
            DatasetField("satellite_ant_pco", "Satellite", "position"),
            "State Pos Z":
            DatasetField("site_posvel", "Station", "posvel"),
            "Source":
            DatasetField("source_id", "Source", "float"),
            "Trop GradEast":
            DatasetField("troposphere_ge", "Station", "float"),
            "Trop GradNorth":
            DatasetField("troposphere_gn", "Station", "float"),
            "Trop DryZ":
            DatasetField("troposphere_zhd", "Station", "float"),
            "Trop WetZ":
            DatasetField("troposphere_zwd", "Station", "float"),
        }

        not_used_parameter = [
            "Antennas Antenna1 MapCenterOffset All X",
            "Antennas Antenna1 MapCenterOffset All Y",
            "State Pos X",
            "State Pos Y",
            "State Vel X",
            "State Vel Y",
            "State Vel Z",
        ]

        dset = dataset.Dataset(num_obs=len(self.data["time_past_j2000"]))
        dset.meta.update(self.meta)

        # Note: GipsyX uses continuous seconds past Jan. 1, 2000 11:59:47 UTC time format in TDP files. That means,
        #       GipsyX does not follow convention of J2000:
        #           1.01.2000 12:00:00     TT  (TT = GipsyX(t) + 13s)
        #           1.01.2000 11:59:27.816 TAI (TAI = TT - 32.184s)
        #           1.01.2000 11:58:55.816 UTC (UTC = TAI + leap_seconds = TAI - 32s)
        #           1.01.2000 11:59:08.816 GPS (GPS = TAI - 19s)
        #
        #       Therefore Time object initialized with TT time scale has to be corrected about 13 seconds.
        #
        # TODO: Introduce j2000 = 2451545.0 as constant or unit?
        dset.add_time(
            "time",
            val=Time((self.data["time_past_j2000"] + 13.0) * Unit.second2day +
                     2451545.0,
                     scale="tt",
                     fmt="jd").gps,
        )
        keep_idx = np.ones(dset.num_obs, dtype=bool)
        collections = ["apriori", "value", "sigma"]

        # Loop over all existing parameter names
        for name in set(self.data["name"]):
            category, identifier, parameter = name.replace(
                ".", " ").split(maxsplit=2)

            if parameter in not_used_parameter:
                continue

            # Add station and satellite field to Dataset by first occurence
            if "Satellite" in category:
                if "satellite" not in dset.fields:
                    dset.add_text("satellite",
                                  val=np.repeat(None, dset.num_obs))
                    dset.add_text("system", val=np.repeat(None, dset.num_obs))

            if "Station" in category:
                if "station" not in dset.fields:
                    dset.add_text("station",
                                  val=np.repeat(identifier.lower(),
                                                dset.num_obs))

            if "Source" in category:
                idx = name == self.data["name"]

                for collection in collections:
                    field_name = f"{collection}.{field['Source'].name}"
                    dset.add_float(field_name,
                                   val=np.full(dset.num_obs, np.NaN))
                    dset[field_name][idx] = self.data["value"][idx]
                continue

            # Add parameter solution to Dataset
            if parameter in field.keys():

                idx = name == self.data["name"]

                if category == "Satellite":
                    sys = enums.get_value("gnss_3digit_id_to_id",
                                          identifier[0:3])
                    dset.system[idx] = sys
                    dset.satellite[idx] = sys + identifier[3:5]

                # Loop over 'apriori', 'value' and 'sigma' solutions, which are saved in separated Dataset collections
                for collection in collections:
                    field_name = f"{collection}.{field[parameter].name}"
                    log.debug(
                        f"Add dataset field '{field_name}' for parameter '{parameter}' and identifier '{identifier}'."
                    )

                    # Add float fields to Dataset
                    if field[parameter].dtype == "float":

                        # Note: "Clk Bias" parameter exists for receiver and satellite, therefore it has to be
                        #       distinguished based on the length of the 'identifier' (e.g. USNO or GPS64).
                        if parameter == "Clk Bias":
                            field_name = (f"{collection}.satellite_clock"
                                          if len(identifier) == 5 else
                                          f"{collection}.receiver_clock")

                        if field_name not in dset.fields:
                            dset.add_float(field_name,
                                           val=np.full(dset.num_obs, np.NaN))
                            dset[field_name][idx] = self.data[collection][idx]

                    # Add position fields to Dataset
                    elif field[parameter].dtype == "position":

                        if field_name not in dset.fields:
                            dset.add_position(field_name,
                                              time=dset.time,
                                              system="trs",
                                              val=np.full((dset.num_obs, 3),
                                                          np.NaN))

                        # Fill position field with data
                        tmp_sol = dict()

                        for item in [".X", ".Y", ".Z"]:
                            idx_item = name.replace(".Z",
                                                    item) == self.data["name"]
                            tmp_sol[item] = self.data["value"][idx_item]
                            # Note: Only .Z dataset indices are used for saving position field in Dataset. .X and .Y are
                            #       not necessary anymore and are removed from Dataset by using "keep_idx" variable.
                            if not item == ".Z":
                                keep_idx[idx_item] = False

                        dset[field_name][idx] = np.vstack(
                            (tmp_sol[".X"], tmp_sol[".Y"], tmp_sol[".Z"])).T

                    # Add posvel fields to Dataset
                    elif field[parameter].dtype == "posvel":

                        if field_name not in dset.fields:
                            dset.add_posvel(field_name,
                                            time=dset.time,
                                            system="trs",
                                            val=np.full((dset.num_obs, 6),
                                                        np.NaN))

                        # Fill position field with data
                        tmp_sol = dict()
                        for item in [
                                "State.Pos.X",
                                "State.Pos.Y",
                                "State.Pos.Z",
                                "State.Vel.X",
                                "State.Vel.Y",
                                "State.Vel.Z",
                        ]:
                            idx_item = name.replace("State.Pos.Z",
                                                    item) == self.data["name"]
                            tmp_sol[item] = self.data["value"][idx_item]
                            if not item == "State.Pos.Z":
                                keep_idx[idx_item] = False

                        dset[field_name][idx] = np.vstack((
                            tmp_sol["State.Pos.X"],
                            tmp_sol["State.Pos.Y"],
                            tmp_sol["State.Pos.Z"],
                            tmp_sol["State.Vel.X"],
                            tmp_sol["State.Vel.Y"],
                            tmp_sol["State.Vel.Z"],
                        )).T

            else:
                log.fatal(f"Parameter {parameter} is not defined.")

        dset.subset(
            keep_idx)  # Remove unnecessary entries (e.g. '.X' and '.Y' )

        return dset
예제 #8
0
    def as_dataset(self) -> "Dataset":
        """Store Gipsy residual data in a dataset

        Returns:
            Midgard Dataset where residual data are stored with following fields:


       | Field               | Type              | Description                                                        |
       |---------------------|-------------------|--------------------------------------------------------------------|
       | azimuth             | numpy.ndarray     | Azimuth from receiver                                              |
       | azimuth_sat         | numpy.ndarray     | Azimuth from satellite                                             |
       | elevation           | numpy.ndarray     | Elevation from receiver                                            |
       | elevation_sat       | numpy.ndarray     | Elevation from satellite                                           |
       | data_type           | numpy.ndarray     | Data type (e.g. IonoFreeC_1P_2P, IonoFreeL_1P_2P)                  |
       | residual            | numpy.ndarray     | Post-fit residual                                                  |
       | satellite           | numpy.ndarray     | Satellite PRN number together with GNSS identifier (e.g. G07)      |
       | station             | numpy.ndarray     | Station name list                                                  |
       | system              | numpy.ndarray     | GNSS identifier (e.g. G or E)                                      |
       | time                | Time              | Parameter time given as TimeTable object                           |
        
        """
        # TODO: Handling of unit. Should be added to dataset fields.

        dset = dataset.Dataset(num_obs=len(self.data["time_past_j2000"]))
        dset.meta.update(self.meta)

        # Note: GipsyX uses continuous seconds past Jan. 1, 2000 11:59:47 UTC time format in TDP files. That means,
        #       GipsyX does not follow convention of J2000:
        #           1.01.2000 12:00:00     TT  (TT = GipsyX(t) + 13s)
        #           1.01.2000 11:59:27.816 TAI (TAI = TT - 32.184s)
        #           1.01.2000 11:58:55.816 UTC (UTC = TAI + leap_seconds = TAI - 32s)
        #           1.01.2000 11:59:08.816 GPS (GPS = TAI - 19s)
        #
        #       Therefore Time object initialized with TT time scale has to be corrected about 13 seconds.
        #
        # TODO: Introduce j2000 = 2451545.0 as constant or unit?
        dset.add_time(
            "time",
            val=Time((np.array(self.data["time_past_j2000"]) + 13.0) *
                     Unit.second2day + 2451545.0,
                     scale="tt",
                     fmt="jd").gps,
        )

        # Loop over Dataset fields
        for field in self.data.keys():
            if field == "time_past_j2000":
                continue

            if field in ["data_type", "satellite", "station"]:

                if field == "satellite":
                    dset.add_text("satellite",
                                  val=np.repeat(None, dset.num_obs))
                    dset.add_text("system", val=np.repeat(None, dset.num_obs))
                    for sat in set(self.data["satellite"]):
                        idx = sat == np.array(self.data["satellite"])
                        sys = enums.get_value("gnss_3digit_id_to_id", sat[0:3])
                        dset.system[idx] = sys
                        dset.satellite[idx] = sys + sat[3:5]
                else:
                    dset.add_text(field, val=self.data[field])

            elif field == "deleted":
                dset.add_bool(field, val=self.data[field])

            else:
                dset.add_float(field, val=self.data[field])

        return dset
예제 #9
0
    def as_dataset(self) -> "Dataset":
        """Store GipsyX estimates and covariance information in a dataset

        Returns:
            Midgard Dataset where time dependent parameter data are stored with following fields:


       | Field               | Type              | Description                                                        |
       |---------------------|-------------------|--------------------------------------------------------------------|
       | correlation_x       | numpy.ndarray     | Correlation for x station coordinate                               |
       | correlation_y       | numpy.ndarray     | Correlation for y station coordinate                               |
       | correlation_z       | numpy.ndarray     | Correlation for z station coordinate                               |
       | sigma_x             | numpy.ndarray     | Standard deviation for x station coordinate                        |
       | sigma_y             | numpy.ndarray     | Standard deviation for y station coordinate                        |
       | sigma_z             | numpy.ndarray     | Standard deviation for z station coordinate                        |
       | site_pos            | Position          | x, y and z station coordinates                                     |
       | station             | numpy.ndarray     | Station name list                                                  |
       | time                | Time              | Parameter time given as TimeTable object                           |
       
       The fields above are given for 'apriori', 'value' and 'sigma' Dataset collections.
        
        """
        idx_x = "STA.X" == self.data["parameter"]
        idx_y = "STA.Y" == self.data["parameter"]
        idx_z = "STA.Z" == self.data["parameter"]
        dset = dataset.Dataset(
            num_obs=len(self.data["time_past_j2000"][idx_x]))
        dset.meta.update(self.meta)

        # Note: GipsyX uses continuous seconds past Jan. 1, 2000 11:59:47 UTC time format in TDP files. That means,
        #       GipsyX does not follow convention of J2000:
        #           1.01.2000 12:00:00     TT  (TT = GipsyX(t) + 13s)
        #           1.01.2000 11:59:27.816 TAI (TAI = TT - 32.184s)
        #           1.01.2000 11:58:55.816 UTC (UTC = TAI + leap_seconds = TAI - 32s)
        #           1.01.2000 11:59:08.816 GPS (GPS = TAI - 19s)
        #
        #       Therefore Time object initialized with TT time scale has to be corrected about 13 seconds.
        #
        dset.add_time(
            "time",
            val=Time(
                (np.array(self.data["time_past_j2000"][idx_x]) + 13.0) *
                Unit.second2day + 2451545.0,
                scale="tt",
                fmt="jd",
            ).gps,
        )

        dset.add_text("station", val=self.data["station"][idx_x])
        dset.add_float("sigma_x", val=self.data["sigma"][idx_x], unit="meter")
        dset.add_float("sigma_y", val=self.data["sigma"][idx_y], unit="meter")
        dset.add_float("sigma_z", val=self.data["sigma"][idx_z], unit="meter")
        dset.add_position(
            "site_pos",
            time=dset.time,
            system="trs",
            val=np.vstack(
                (self.data["estimate"][idx_x], self.data["estimate"][idx_y],
                 self.data["estimate"][idx_z])).T,
        )
        # TODO: how to use dset.add_sigma? how to save correlation?
        # tmp = dict()
        # for num_sta, station in enumerate(self.data["station"]):
        #    tmp.setdefault("correlation_x", list()).append()
        #    tmp.setdefault("correlation_y", list()).append()
        #    tmp.setdefault("correlation_z", list()).append()

        #   |1
        #   |2  3
        #   ------
        #    4  5  6
        #    7  8  9 |10
        #   11 12 13 |14 15
        #            --------
        #   16 17 18  19 20 21
        #   22 23 24  25 26 27 |28
        #   29 30 31  32 33 34 |35 36
        #                      -------

        return dset