示例#1
0
def test_difference_2():
    _dset1 = dataset.Dataset(3)
    _dset1.add_float("numbers", [1, 2, 3], unit="meter")
    _dset1.add_time(
        "t",
        [datetime(2009, 1, 1),
         datetime(2009, 1, 2),
         datetime(2009, 2, 2)],
        scale="utc",
        fmt="datetime")
    _dset1.add_bool("flag", [True, False, True])
    _dset1.add_text("letters", list("abc"))
    _dset1.add_position("pos", [[1, 2, 3], [3, 2, 1], [2, 2, 2]], system="trs")
    _dset1.add_float("group.more", [5, 5, 5])
    _dset1.add_text("group.even_more", list("fgh"))

    _dset2 = dataset.Dataset(4)
    _dset2.add_float("numbers", [3, 4, 5, 6], unit="meter")
    _dset2.add_time(
        "t",
        [
            datetime(2009, 1, 1),
            datetime(2009, 2, 1),
            datetime(2009, 2, 2),
            datetime(2009, 3, 3)
        ],
        scale="utc",
        fmt="datetime",
    )
    _dset2.add_bool("flag", [False, False, True, True])
    _dset2.add_text("letters", list("cdef"))
    _dset2.add_position("pos", [[4, 5, 6], [6, 5, 4], [7, 8, 9], [2, 3, 4]],
                        system="trs")
    _dset2.add_float("group.more", [4, 3, 4, 3])
    _dset2.add_text("group.even_more", list("jilh"))

    _dset3 = _dset1.difference(_dset2, index_by="t")

    assert np.equal(_dset3.numbers, [-2, -2]).all()
    assert np.equal(np.asarray(_dset3.pos), [[-3, -3, -3], [-5, -6, -7]]).all()
    assert np.equal(_dset3.group.more, [1, 1]).all()
    assert np.equal(
        _dset3.t,
        [datetime(2009, 1, 1), datetime(2009, 2, 2)]).all()

    _dset4 = _dset1.difference(_dset2, index_by="t, flag")
    assert np.equal(_dset4.numbers, [-2]).all()
    assert np.equal(np.asarray(_dset4.pos), [[-5, -6, -7]]).all()
    assert np.equal(_dset4.group.more, [1]).all()
    assert np.equal(_dset4.t, [datetime(2009, 2, 2)]).all()
    assert np.equal(_dset4.flag, [True]).all()

    _dset5 = _dset1.difference(_dset2,
                               index_by="t, flag",
                               copy_self_on_error=True,
                               copy_other_on_error=True)
    assert np.char.equal(_dset5.letters_self, list("c")).all()
    assert np.char.equal(_dset5.letters_other, list("e")).all()
    assert np.char.equal(_dset5.group.even_more_self, list("h")).all()
    assert np.char.equal(_dset5.group.even_more_other, list("l")).all()
示例#2
0
def test_difference_1():
    _dset1 = dataset.Dataset(2)
    _dset1.add_float("numbers", [1, 2], unit="meter")
    _dset1.add_time(
        "t", [datetime(2009, 1, 1), datetime(2009, 2, 1)],
        scale="utc",
        fmt="datetime")
    _dset1.add_bool("flag", [True, False])
    _dset1.add_text("letters", list("ab"))
    _dset1.add_position("pos", [[1, 2, 3], [3, 2, 1]], system="trs")
    _dset1.add_float("group.more", [5, 5])

    _dset2 = dataset.Dataset(2)
    _dset2.add_float("numbers", [4, 5], unit="meter")
    _dset2.add_time(
        "t", [datetime(2009, 1, 1), datetime(2009, 2, 2)],
        scale="utc",
        fmt="datetime")
    _dset2.add_bool("flag", [False, True])
    _dset2.add_text("letters", list("cd"))
    _dset2.add_position("pos", [[4, 5, 6], [6, 5, 4]], system="trs")
    _dset2.add_float("group.more", [4, 3])

    _dset3 = _dset1.difference(_dset2)
    assert np.equal(_dset3.numbers, [-3, -3]).all()
    assert np.equal(np.asarray(_dset3.pos), [[-3, -3, -3], [-3, -3, -3]]).all()
    assert np.equal(_dset3.group.more, [1, 2]).all()
示例#3
0
def test_unit():
    _dset1 = dataset.Dataset(3)
    _dset1.add_float("numbers_1", [1, 2, 3], unit="meter")
    _dset1.add_float("numbers_2", [1, 2, 3], unit="second")
    _dset1.add_bool("idx", [True, False, True])
    _dset1.add_float("group.numbers", [1, 2, 3], unit="watt")
    _dset1.add_sigma("sigma", [1, 2, 3], sigma=[0.1, 0.2, 0.3], unit="meter")

    assert _dset1.unit("numbers_1") == ("meter", )
    assert _dset1.unit("numbers_2") == ("second", )
    assert _dset1.unit("group.numbers") == ("watt", )
    with pytest.raises(exceptions.UnitError):
        _dset1.unit("idx")
    assert _dset1.unit_short("numbers_1") == ("m", )
    assert _dset1.unit_short("numbers_2") == ("s", )
    with pytest.raises(exceptions.UnitError):
        _dset1.unit_short("idx")
    assert _dset1.unit_short("group.numbers") == ("W", )
    assert _dset1.unit("sigma") == ("meter", )
    assert _dset1.unit("sigma.sigma") == ("meter", )
    assert _dset1.unit_short("sigma") == ("m", )
    assert _dset1.unit_short("sigma.sigma") == ("m", )

    _dset1.set_unit("sigma", "seconds")

    assert _dset1.unit("sigma") == ("seconds", )
    assert _dset1.unit("sigma.sigma") == ("seconds", )
    assert _dset1.unit_short("sigma") == ("s", )
    assert _dset1.unit_short("sigma.sigma") == ("s", )
示例#4
0
def dset_meta_error_msg(type_):
    # Too complex datatype is being saved to meta and should give error message

    _dset = dataset.Dataset()
    _dset.meta.add("a", type_)
    with pytest.raises(TypeError):
        _dset.write("test.hdf5")
    def as_dataset(self):
        """Store GNSS data in a dataset

        Returns:
            Midgard Dataset where data are stored.
        """
        dset = dataset.Dataset(num_obs=len(self.data["pseudorange"]))

        dset.add_float("C1C", val=self.data["pseudorange"])

        import IPython

        IPython.embed()
        # TODO: why does dset.add_time("time", val=self.data["time"]) not work?
        dset.add_time("time",
                      val=self.data["time"].gps.datetime,
                      scale="gps",
                      fmt="datetime")
        # dset.add_time('sat_time', val=self.data["sat_time"].gps.datetime, scale="gps", fmt="datetime")

        dset.add_text("station",
                      val=np.full(dset.num_obs, "android", dtype=str))
        dset.add_text("satellite", val=self.data["satellite"])
        dset.add_text("satnum", val=self.data["Svid"])
        dset.add_text("system", val=self.data["system"])
        dset.add_position("site_pos",
                          time=dset.time,
                          val=self.data["site_pos"])

        return dset
示例#6
0
    def as_dataset(self) -> "Dataset":
        """Return the parsed data as a Dataset

        Returns:
            dset (Dataset): The Dataset where water level observation are stored with following fields:

       |  Field               | Type              | Description                                                       |
       |----------------------|-------------------|-------------------------------------------------------------------|
       | flag                 | numpy.ndarray     | Data flag (obs: observation, pre: prediction, weather: weather    |
       |                      |                   | effect, forecast: forecast)                                       |
       | time                 | TimeTable         | Observation time given as TimeTable object                        |
       | water_level          | numpy.ndarray     | Water level in [m]                                                |

            and following Dataset `meta` data:

       |  Entry               | Type              | Description                                                       |
       |----------------------|-------------------|-------------------------------------------------------------------|
       | __data_path__        | str               | File path                                                         |
       | __url__              | str               | URL of water level API                                            |

        """
        dset = dataset.Dataset(num_obs=len(self.data["time"]))

        time = [
            d.astimezone(tz=pytz.utc).replace(tzinfo=None)
            for d in self.data["time"]
        ]  # Convert time to UTC
        dset.add_time("time", val=time, scale="utc", fmt="datetime")
        dset.add_float("water_level",
                       val=np.array(self.data["value"]) *
                       Unit.centimeter2meter)
        dset.add_text("flag", val=self.data["flag"])

        dset.meta.update(self.meta)
        return dset
示例#7
0
def test_filter():
    _dset = dataset.Dataset(7)
    _dset.add_text("text_1", list("abcdefg"))
    _dset.add_text("text_2", list("gfedcba"))
    _dset.add_time("time", [datetime(2015, 1, 1, i) for i in range(0, 7)],
                   fmt="datetime",
                   scale="utc")
    _dset.add_text("group.text_1", list("hijklmn"))
    _dset.add_text("group.text_2", list("nmlkjih"))

    # Normal filter with and without group
    idx1 = _dset.filter(text_1="a")
    idx2 = _dset.filter(text_1="h", collection="group")
    idx3 = _dset.filter(time=datetime(2015, 1, 1, 0))
    idx = np.array([True, False, False, False, False, False, False],
                   dtype=bool)
    assert np.equal(idx1, idx).all()
    assert np.equal(idx2, idx).all()
    assert np.equal(idx3, idx).all()

    # Underscore filter with and without group
    idx1 = _dset.filter(text="a")
    idx2 = _dset.filter(text="h", collection="group")
    idx = np.array([True, False, False, False, False, False, True], dtype=bool)
    assert np.equal(idx1, idx).all()
    assert np.equal(idx2, idx).all()

    # Wrong field
    with pytest.raises(AttributeError):
        _dset.filter(tull="a")
示例#8
0
    def as_dataset(self) -> "Dataset":
        """Return the parsed data as a Dataset

        Returns:
            A dataset containing the data.
        """

        # Initialize dataset
        dset = dataset.Dataset()
        if not self.data:
            log.warn("No data in {self.file_path}.")
            return dset
        dset.num_obs = len(self.data["year"])

        # Add time
        epochs = list()
        for year, doy, seconds in zip(self.data["year"], self.data["doy"],
                                      self.data["seconds"]):
            epochs.append(
                datetime.strptime("{:.0f} {:.0f}".format(year, doy), "%Y %j") +
                timedelta(seconds=seconds))

        dset.add_time(name="time",
                      val=epochs,
                      scale="gps",
                      fmt="datetime",
                      write_level="operational")

        # Add system field
        if "system" in self.data.keys():
            systems = []
            for system in self.data["system"]:
                systems.append(enums.gnss_name_to_id[system.lower()].value)

            dset.add_text("system", val=systems)

        # Add system field
        if "satellite" in self.data.keys():
            satellites = []
            for system, satellite in zip(dset.system, self.data["satellite"]):
                satellites.append(system + str(satellite).zfill(2))

            dset.add_text("satellite", val=satellites)

        # Add text and float fields
        fields = set(self.data.keys()) - {
            "year", "doy", "seconds", "system", "satellite"
        }
        for field in fields:
            if self.data[field].dtype.kind in {
                    "U", "S"
            }:  # Check if numpy type is string
                dset.add_text(field, val=self.data[field])
                continue

            dset.add_float(field, val=self.data[field])

        return dset
示例#9
0
def test_nested_collections():
    _dset = dataset.Dataset(2)
    _dset.add_float("group.group.tall", [5, 6])

    assert np.equal(_dset.group.group.tall, [5, 6]).all()

    idx = np.array([True, False], dtype=bool)
    _dset.subset(idx)

    assert np.equal(_dset.group.group.tall, [5]).all()

    _dset1 = dataset.Dataset(2)
    _dset1.add_float("group.group.tall", [5, 6])

    _dset2 = dataset.Dataset(3)
    _dset2.add_float("group.group.tall", [1, 2, 3])

    _dset1.extend(_dset2)
    assert np.equal(_dset1.group.group.tall, [5, 6, 1, 2, 3]).all()
示例#10
0
    def as_dataset(self) -> "Dataset":
        """Return the parsed data as a Dataset

        Returns:
            Midgard Dataset where troposphere observation are stored with following fields:

       |  Field                   | Type           | Description                                                      |
       |--------------------------|----------------|------------------------------------------------------------------|
       | flag                     | numpy.ndarray  | Station flag (see section 24.7.1 of Bernese GNSS software        |
       |                          |                | version 5.2, November 2015)                                      |
       | gradients_ew             | numpy.ndarray  | Gradients in East/West in [m]                                    |
       | gradients_ns             | numpy.ndarray  | Gradients in North/South in [m]                                  |
       | sigma_gradients_ew       | numpy.ndarray  | Standard deviation of gradients in East/West in [m]              |
       | sigma_gradients_ns       | numpy.ndarray  | Standard deviation of gradients in North/South in [m]            |
       | sigma_zwd                | numpy.ndarray  | Standard devivation of ZWD in [m]                                |
       | station                  | numpy.ndarray  | Station name                                                     |
       | time                     | TimeTable      | Observation time given as TimeTable object                       |
       | zhd                      | numpy.ndarray  | Zenith Hydrostatic Delay (ZHD) from a-priori model in [m]        |
       | ztd                      | numpy.ndarray  | Zenith Total Delay (ZTD) in [m]                                  |
       | zwd                      | numpy.ndarray  | Zenith Wet Delay (ZWD) in [m]                                    |
        """

        skip_fields = ["to_time"]
        text_fields = ["flag", "station"]

        # Generate dataset
        dset = dataset.Dataset(num_obs=len(self.data["time"]))
        dset.meta = self.meta

        for field in self.data.keys():

            if field in skip_fields:
                continue

            if field in text_fields:

                if field == "station":
                    dset.add_text(field,
                                  val=[v.lower() for v in self.data[field]])
                else:
                    dset.add_text(field, val=self.data[field])

            elif field == "time":
                dset.add_time(field,
                              val=self.data[field],
                              scale="gps",
                              fmt="datetime")

            else:
                dset.add_float(field, val=self.data[field], unit="meter")

        return dset
示例#11
0
def test_suffix():
    _dset = dataset.Dataset(2)
    _dset.add_float("numbers_1", [1, 1], multiplier=10)
    _dset.add_float("numbers_2", [2, 2], multiplier=-10)
    _dset.add_float("group.numbers_1", [3, 3], multiplier=10)
    _dset.add_float("group.numbers_2", [4, 4], multiplier=-10)

    answer = np.array([[10, 10], [-20, -20]])
    for i, multiplier in enumerate(_dset.for_each_suffix("numbers")):
        assert np.equal(answer[i], multiplier * _dset.numbers).all()

    answer2 = np.array([[30, 30], [-40, -40]])
    for i, multiplier in enumerate(_dset.for_each_suffix("group.numbers")):
        assert np.equal(answer2[i], multiplier * _dset.group.numbers).all()
示例#12
0
    def as_dataset(self) -> "Dataset":
        """Return the parsed data as a Dataset

        Returns:
            Midgard Dataset where station coordinates and belonging information are stored with following fields:

       |  Field                   | Type           | Description                                                      |
       |--------------------------|----------------|------------------------------------------------------------------|
       | domes                    | numpy.ndarray  | Domes number                                                     |
       | flag                     | numpy.ndarray  | Station flag (see section 24.7.1 of Bernese GNSS software        |
       |                          |                | version 5.2, November 2015)                                      |
       | station                  | numpy.ndarray  | Station name                                                     |
       | site_pos                 | PositionTable  | Station coordinates given as PositionTable object                |

            and following Dataset `meta` data:

       |  Entry              | Type  | Description                                                                    |
       |---------------------|-------|--------------------------------------------------------------------------------|
       | \__data_path__      | str   | File path                                                                      |
        """

        # Generate dataset
        dset = dataset.Dataset(num_obs=len(self.data["station"]))
        dset.meta = self.meta.copy()

        # Remove unnecessary fields in meta
        for key in ["__params__", "__parser_name__"]:
            del dset.meta[key]

        # Add fields to dataset
        for field in ["domes", "flag", "station"]:

            if field == "station":
                dset.add_text(field, val=[v.lower() for v in self.data[field]])
            else:
                dset.add_text(field, val=self.data[field])

        dset.add_position(
            "site_pos",
            time=Time([dset.meta["epoch"] for ii in range(0, dset.num_obs)],
                      scale="gps",
                      fmt="datetime"),
            system="trs",
            val=np.stack(
                (np.array(self.data["pos_x"]), np.array(
                    self.data["pos_y"]), np.array(self.data["pos_z"])),
                axis=1),
        )

        return dset
示例#13
0
def test_difference_3():
    _dset1 = dataset.Dataset(3)
    _dset1.add_float("numbers", [1, 2, 3], unit="meter")

    _dset2 = dataset.Dataset(3)
    _dset2.add_float("numbers", [1, 2, 3], unit="cm")

    _dset3 = _dset1.difference(_dset2)
    assert np.equal(_dset3.numbers, [0.99, 1.98, 2.97]).all()
    assert _dset3.unit("numbers") == ("meter", )

    _dset4 = dataset.Dataset(3)
    _dset4.add_float("numbers", [1, 2, 3], unit="second")

    # Incompatible units
    with pytest.raises(ValueError):
        _dset1.difference(_dset4)

    _dset5 = dataset.Dataset(1)
    _dset5.add_float("numbers", [1], unit="meter")

    # Different length datasets (without specifying index_by)
    with pytest.raises(ValueError):
        _dset1.difference(_dset5)
示例#14
0
    def as_dataset(self, ref_pos: Union[np.ndarray, List[float]]) -> "Dataset":
        """Return the parsed data as a Dataset

        Args:
            ref_pos: Reference position given in terrestrial reference system and meters

        Returns:
            A dataset containing the data.
        """

        # Initialize dataset
        dset = dataset.Dataset()
        if not self.data:
            log.warn("No data in {self.file_path}.")
            return dset
        dset.num_obs = len(self.data["date"])

        # Add position
        ref_pos = position.Position(np.repeat(np.array([ref_pos]),
                                              dset.num_obs,
                                              axis=0),
                                    system="trs")
        dset.add_position_delta(
            name="pos",
            val=np.stack(
                (self.data["east"], self.data["north"], self.data["vertical"]),
                axis=1) * Unit.millimeter2meter,
            system="enu",
            ref_pos=ref_pos,
        )

        # Add position sigma
        sigma = np.stack((self.data["east_sigma"], self.data["north_sigma"],
                          self.data["vertical_sigma"]),
                         axis=1)
        dset.add_sigma(name="pos_sigma",
                       val=dset.pos.val,
                       sigma=sigma * Unit.millimeter2meter,
                       unit="meter")

        # Add time
        dset.add_time(name="time",
                      val=self.data["year"],
                      scale="utc",
                      fmt="decimalyear",
                      write_level="operational")

        return dset
示例#15
0
def test_read_write_3():
    """Test references after write and then read when attributes are not dataset fields"""
    _dset = dataset.Dataset(2)
    ref_pos = position.Position([[1, 2, 3], [1, 2, 3]], system="trs")
    ref_pos2 = position.PosVel([[1, 2, 3, 1, 1, 1], [1, 2, 3, 2, 2, 2]],
                               system="trs")
    other = position.Position([[7, 8, 9], [7, 8, 9]], system="trs")
    other2 = position.PosVel([[1, 2, 3, 1, 2, 3], [1, 2, 3, 4, 5, 6]],
                             system="trs")
    _dset.add_position("testpos", [[4, 5, 6], [4, 5, 6]],
                       system="trs",
                       other=other)
    _dset.add_position_delta("testposdelta",
                             [[0.1, 0.1, 0.1], [0.2, 0.2, 0.2]],
                             system="trs",
                             ref_pos=ref_pos)
    _dset.add_posvel("testposvel", [[1, 1, 1, 2, 2, 2], [3, 3, 3, 4, 4, 4]],
                     system="trs",
                     other=other2)
    _dset.add_posvel_delta("testposveldelta",
                           [[4, 4, 4, 1, 1, 1], [5, 5, 5, 2, 2, 2]],
                           system="trs",
                           ref_pos=ref_pos2)
    file_name = "test.hdf5"
    _dset.write(file_name)
    _dset_new = dataset.Dataset.read(file_name)

    def test_field(field, new_field):
        try:
            if field.data.dtype.type is np.str_:
                assert np.char.equal(field.data, new_field.data).all()
            else:
                assert np.equal(np.asarray(field.data),
                                np.asarray(new_field.data)).all()
        except AttributeError:
            # field is a collection
            for collection_field_name, collection_field in field.data._fields.items(
            ):
                new_collection_field = new_field.data._fields[
                    collection_field_name]
                test_field(collection_field, new_collection_field)

    for field_name, field in _dset._fields.items():
        print(f"Testing {field_name}")
        new_field = _dset_new._fields[field_name]
        test_field(field, new_field)

    os.remove(file_name)
示例#16
0
def test_unique():
    _dset = dataset.Dataset(10)
    _dset.add_text("text", list("abcabcabca"))
    _dset.add_time("time", [58000] * 2 + [58001] * 6 + [58002] * 2,
                   fmt="mjd",
                   scale="utc")
    _dset.add_text("group.text", list("defdefdefd"))
    _dset.add_float("group.numbers", range(0, 10))

    assert np.char.equal(_dset.unique("text"), np.array(list("abc"))).all()
    assert np.equal(_dset.unique("time"), np.array([58000, 58001,
                                                    58002])).all()
    assert np.char.equal(_dset.unique("text", collection="group"),
                         np.array(list("def"))).all()
    assert np.equal(_dset.unique("group.numbers"), np.arange(0, 10)).all()
    assert np.equal(_dset.unique("numbers", collection="group"),
                    np.arange(0, 10)).all()
示例#17
0
    def as_dataset(self) -> "Dataset":
        """Return the parsed data as a Dataset

        GipsyX summary results are added to Dataset 'meta' variable. 

        Args:
            dset: Dataset.

        Returns:
            A dataset containing the data.
        """
        dset = dataset.Dataset(num_obs=0)

        if not self.data:
            log.warn("No data in {self.file_path}.")
            return dset

        dset.meta["summary"] = self.data
        return dset
示例#18
0
def dset_no_collection():
    """Contains all available fieldstypes"""
    _dset = dataset.Dataset(5)
    _dset.add_bool("idx", val=[0, 1, 1, 0, 1])
    _dset.add_float("numbers", val=[1, 2, 3, 4, 5])
    _dset.add_position("sat_pos", val=np.ones((5, 3)), system="trs")
    _dset.add_position("site_pos",
                       val=np.ones((5, 3)) * 2,
                       system="trs",
                       other=_dset.sat_pos)
    _dset.add_position_delta("site_delta",
                             val=np.ones((5, 3)) * 0.5,
                             system="trs",
                             ref_pos=_dset.site_pos)
    _dset.add_posvel("sat_posvel", val=np.ones((5, 6)), system="trs")
    _dset.add_posvel("site_posvel",
                     val=np.ones((5, 6)) * 2,
                     system="trs",
                     other=_dset.sat_posvel)
    _dset.add_posvel_delta("site_posvel_delta",
                           val=np.ones((5, 6)) * 0.5,
                           system="trs",
                           ref_pos=_dset.site_posvel)
    _dset.add_sigma("numbers2",
                    val=[3, 3, 3, 3, 3],
                    sigma=[0.2, 0.2, 0.2, 0.2, 0.2])
    _dset.add_text("text", val=["aaa", "aaa", "aaa", "aaa", "aaa"])
    _dset.add_time("time",
                   val=[datetime(2015, 1, i) for i in range(5, 10)],
                   scale="utc",
                   fmt="datetime")
    _dset.add_time_delta("time_delta",
                         val=[timedelta(seconds=i) for i in range(20, 25)],
                         scale="utc",
                         fmt="timedelta")
    return _dset
示例#19
0
    def as_dataset(self) -> "Dataset":
        """Store GNSS data in a dataset

        Returns:
            Midgard Dataset where GNSS observation are stored with following fields:

       |  Field               | Type              | Description                                                           |
       |----------------------|-------------------|-----------------------------------------------------------------------|
       | <observation type>   | numpy.ndarray     | GNSS observation type data (e.g. C1, P2, L1, L2, ...) given           |
       |                      |                   | in meters. Only observation types are kept, which are defined in      |
       |                      |                   | configuration file. Observation types are rejected, which include     |
       |                      |                   | only blank or 0.0 entries.                                            |
       | epoch_flag           | numpy.ndarray     | Epoch flag                                                            |
       | rcv_clk_offset       | numpy.ndarray     | Receiver clock offset in seconds given for each epoch                 |
       | satellite            | numpy.ndarray     | Satellite PRN number together with GNSS system identifier             |
       |                      |                   | (e.g. G07). Only satellites are kept, which are defined in            |
       |                      |                   | configuration file.                                                   |
       | satnum               | numpy.ndarray     | Satellite PRN number (e.g. 07). Only satellites are kept, which are   |
       |                      |                   | defined in configuration file.                                        |
       | site_pos             | PositionTable     | PositionTable object with given station coordinates (read from        |
       |                      |                   | RINEX header)                                                         |
       | station              | numpy.ndarray     | Station name list                                                     |
       | system               | numpy.ndarray     | GNSS system identifier. Only satellite system are kept, which are     |
       |                      |                   | defined in configuration file.                                        |
       | time                 | TimeTable         | Observation time given as TimeTable object                            |

            and following Dataset `meta` data:

       |  Entry              | Type  | Description                                                                        |
       |---------------------|-------|------------------------------------------------------------------------------------|
       | agency              | str   | Name of agency from observer                                                       |
       | antenna_east        | float | East component of vector between marker and antenna reference point in meters      |
       | antenna_height      | float | Height component of vector between marker and antenna reference point in meters    |
       | antenna_north       | float | North component of vector between marker and antenna reference point in meters     |
       | antenna_number      | str   | Antenna serial number                                                              |
       | antenna_type        | str   | Antenna type                                                                       |
       | comment             | list  | List with RINEX header comment lines                                               |
       | file_created        | str   | Date and time of file creation                                                     |
       | file_type           | str   | File type (e.g. 'O' for observation data)                                          |
       | interval            | float | Observation interval in seconds                                                    |
       | l1_wave_fact_default| str   | Default wavelength factors for L1 (GPS only)                                       |
       | l1_wave_fact_prn    | str   | Wavelength factors for L1 (GPS only) valid for a list of satellite PRNs (see       |
       |                     |       | wave_fact_prn)                                                                     |
       | l2_wave_fact_default| str   | Default wavelength factors for L2 (GPS only)                                       |
       | l2_wave_fact_prn    | str   | Wavelength factors for L2 (GPS only) valid for a list of satellite PRNs (see       |
       |                     |       | wave_fact_prn)                                                                     |
       | leap_seconds        | dict  | Dictionary with information related to leap seconds                                |
       | marker_name         | str   | Name of antenna marker                                                             |
       | marker_number       | str   | Number of antenna marker                                                           |
       | num_satellites      | int   | Number of satellites, for which observations are stored in the RINEX file          |
       | observer            | str   | Name of observer                                                                   |
       | obstypes            | dict  | Observation types given for each GNSS, whereby observation types and GNSS are      |
       |                     |       | rejected, which are empty (blank or zero entries).                                 |
       | program             | str   | Name of program creating current file                                              |
       | rcv_clk_offset_flag | str   | Flag (1=yes, 0=no) indicating if realtime-derived receiver clock offset is         |
       |                     |       | applied for epoch, code, and phase                                                 |
       | receiver_number     | str   | Receiver serial number                                                             |
       | receiver_type       | str   | Receiver type                                                                      |
       | receiver_version    | str   | Receiver firmware version                                                          |
       | run_by              | str   | Name of agency creating current file                                               |
       | sat_sys             | str   | Satellite system given in observation file (G, R, E, S or M)                       |
       | time_first_obs      | str   | Time of first observation record                                                   |
       | time_last_obs       | str   | Time of last observation record                                                    |
       | time_sys            | str   | Time system used for GNSS observations (GPS, GLO or GAL)                           |
       | version             | str   | Format version                                                                     |
       | wave_fact_prn       | list  | List of satellite PRNs for which the wavelength factors l1_wave_fact_prn and       |
       |                     |       |  l2_wave_fact_prn are valid                                                        |
        """

        # Time
        # TODO: Handling of different time systems needed!!!
        dset = dataset.Dataset(num_obs=len(self.data["time"]))

        # TODO workaround: "isot" does not work for initialization of time field (only 5 decimals for seconds are
        #                  allowed). Therefore self.data["time"] is converted to datetime object.
        from datetime import datetime, timedelta

        date = []
        millisec = []
        for v in self.data["time"]:
            val, val2 = v.split(".")
            date.append(datetime.strptime(val, "%Y-%m-%dT%H:%M:%S"))
            millisec.append(timedelta(milliseconds=int(val2)))
        dset.add_time("time",
                      val=date,
                      val2=millisec,
                      scale=self.time_scale,
                      fmt="datetime")
        dset.add_float("epoch_flag", val=np.array(self.data["epoch_flag"]))
        dset.add_float("rcv_clk_offset",
                       val=np.array(self.data["rcv_clk_offset"]))

        dset.meta.update(self.meta)

        for field, value in self.data["text"].items():
            dset.add_text(field, val=value)

        # Observations
        for obs_type in self.data["obs"]:
            dset.add_float(f"obs.{obs_type}",
                           val=np.array(self.data["obs"][obs_type]),
                           unit="meter")
            dset.add_float(f"lli.{obs_type}",
                           val=np.array(self.data["cycle_slip"][obs_type]))
            dset.add_float(f"snr.{obs_type}",
                           val=np.array(
                               self.data["signal_strength"][obs_type]))

        # Positions
        dset.add_position("site_pos",
                          time=dset.time,
                          system="trs",
                          val=np.repeat(self.data["pos"][None, :],
                                        dset.num_obs,
                                        axis=0))

        return dset
示例#20
0
    def as_dataset(self) -> "Dataset":
        """Return the parsed data as a Dataset

        Returns:
            A dataset containing the data.
        """
        # Spring constellation definition
        system_def = {
            "0": "",  # Unknown
            "1": "G",  # GPS
            "2": "R",  # GLONASS
            "3": "S",  # SBAS
            "4": "E",  # Galileo
            "5": "C",  # BeiDou
            "6": "J",  # QZSS
        }

        # Initialize dataset
        dset = dataset.Dataset()
        if not self.data:
            log.warn("No data in {self.file_path}.")
            return dset
        dset.num_obs = len(self.data["GPSEpoch"])

        # Add time
        dset.add_time(
            name="time",
            val=[
                dateutil.parser.parse(v.replace("UTC", ""))
                for v in self.data["UTCDateTime"]
            ],
            scale="utc",
            fmt="datetime",
            write_level="operational",
        )

        # Add system field based on Constellation column
        if "Constellation" in self.data.keys():
            dset.add_text("system",
                          val=[
                              system_def[str(value)]
                              for value in self.data["Constellation"]
                          ])

        # Add satellite field based on PRN column
        if "PRN" in self.data.keys():
            prn_data = []
            for prn in self.data["PRN"]:
                if prn >= 71 and prn <= 140:  # Handling of Galileo satellites
                    prn_data.append("E" + str(prn - 70).zfill(2))
                else:
                    log.fatal(f"Spring PRN number '{prn}' is unknown.")

            dset.add_text("satellite", val=prn_data)

        # Define fields to save in dataset
        remove_time_fields = {
            "Constellation", "GPSEpoch", "GPSWeek", "GPSSecond", "PRN", "",
            "UTCDateTime"
        }
        fields = set(self.data.keys()) - remove_time_fields

        # Add text and float fields
        for field in fields:

            if self.data[field].dtype.kind in {
                    "U", "S"
            }:  # Check if numpy type is string
                dset.add_text(field.lower(), val=self.data[field])
                continue

            dset.add_float(field.lower(), val=self.data[field])

        return dset
示例#21
0
    def as_dataset(self) -> "Dataset":
        """Store Gipsy time dependent parameter data in a dataset

        Returns:
            Midgard Dataset where time dependent parameter data are stored with following fields:


       | Field               | Type              | Description                                                        |
       |---------------------|-------------------|--------------------------------------------------------------------|
       | receiver_clock      | numpy.ndarray     | Receiver clock parameter                                           |
       | satellite           | numpy.ndarray     | Satellite PRN number together with GNSS identifier (e.g. G07)      |
       | satellite_clock     | numpy.ndarray     | Satellite clock parameter                                          |
       | satellite_ant_pco   | PositionTable     | Satellite antenna phase center offset                              |
       | site_posvel         | PosVel            | Station coordinates and velocities                                 |
       | source_id           | numpy.ndarray     | Source ID                                                          |
       | station             | numpy.ndarray     | Station name list                                                  |
       | system              | numpy.ndarray     | GNSS identifier (e.g. G or E)                                      |
       | time                | Time              | Parameter time given as TimeTable object                           |
       | troposphere_zhd     | numpy.ndarray     | Zenith hydrostatic troposphere delay parameter                     |
       | troposphere_zwd     | numpy.ndarray     | Zenith hydrostatic troposphere delay parameter                     |
       | troposphere_ge      | numpy.ndarray     | Horizontal delay gradient in the East direction                    |
       | troposphere_gn      | numpy.ndarray     | Horizontal delay gradient in the North direction                   |
       
       The fields above are given for 'apriori', 'value' and 'sigma' Dataset collections.
        
        """
        # TODO: Handling of unit. Should be added to dataset fields.

        field = {
            "Clk Bias":
            DatasetField(
                None, None,
                "float"),  # can be either receiver or satellite clock bias
            "Antennas Antenna1 MapCenterOffset All Z":
            DatasetField("satellite_ant_pco", "Satellite", "position"),
            "State Pos Z":
            DatasetField("site_posvel", "Station", "posvel"),
            "Source":
            DatasetField("source_id", "Source", "float"),
            "Trop GradEast":
            DatasetField("troposphere_ge", "Station", "float"),
            "Trop GradNorth":
            DatasetField("troposphere_gn", "Station", "float"),
            "Trop DryZ":
            DatasetField("troposphere_zhd", "Station", "float"),
            "Trop WetZ":
            DatasetField("troposphere_zwd", "Station", "float"),
        }

        not_used_parameter = [
            "Antennas Antenna1 MapCenterOffset All X",
            "Antennas Antenna1 MapCenterOffset All Y",
            "State Pos X",
            "State Pos Y",
            "State Vel X",
            "State Vel Y",
            "State Vel Z",
        ]

        dset = dataset.Dataset(num_obs=len(self.data["time_past_j2000"]))
        dset.meta.update(self.meta)

        # Note: GipsyX uses continuous seconds past Jan. 1, 2000 11:59:47 UTC time format in TDP files. That means,
        #       GipsyX does not follow convention of J2000:
        #           1.01.2000 12:00:00     TT  (TT = GipsyX(t) + 13s)
        #           1.01.2000 11:59:27.816 TAI (TAI = TT - 32.184s)
        #           1.01.2000 11:58:55.816 UTC (UTC = TAI + leap_seconds = TAI - 32s)
        #           1.01.2000 11:59:08.816 GPS (GPS = TAI - 19s)
        #
        #       Therefore Time object initialized with TT time scale has to be corrected about 13 seconds.
        #
        # TODO: Introduce j2000 = 2451545.0 as constant or unit?
        dset.add_time(
            "time",
            val=Time((self.data["time_past_j2000"] + 13.0) * Unit.second2day +
                     2451545.0,
                     scale="tt",
                     fmt="jd").gps,
        )
        keep_idx = np.ones(dset.num_obs, dtype=bool)
        collections = ["apriori", "value", "sigma"]

        # Loop over all existing parameter names
        for name in set(self.data["name"]):
            category, identifier, parameter = name.replace(
                ".", " ").split(maxsplit=2)

            if parameter in not_used_parameter:
                continue

            # Add station and satellite field to Dataset by first occurence
            if "Satellite" in category:
                if "satellite" not in dset.fields:
                    dset.add_text("satellite",
                                  val=np.repeat(None, dset.num_obs))
                    dset.add_text("system", val=np.repeat(None, dset.num_obs))

            if "Station" in category:
                if "station" not in dset.fields:
                    dset.add_text("station",
                                  val=np.repeat(identifier.lower(),
                                                dset.num_obs))

            if "Source" in category:
                idx = name == self.data["name"]

                for collection in collections:
                    field_name = f"{collection}.{field['Source'].name}"
                    dset.add_float(field_name,
                                   val=np.full(dset.num_obs, np.NaN))
                    dset[field_name][idx] = self.data["value"][idx]
                continue

            # Add parameter solution to Dataset
            if parameter in field.keys():

                idx = name == self.data["name"]

                if category == "Satellite":
                    sys = enums.get_value("gnss_3digit_id_to_id",
                                          identifier[0:3])
                    dset.system[idx] = sys
                    dset.satellite[idx] = sys + identifier[3:5]

                # Loop over 'apriori', 'value' and 'sigma' solutions, which are saved in separated Dataset collections
                for collection in collections:
                    field_name = f"{collection}.{field[parameter].name}"
                    log.debug(
                        f"Add dataset field '{field_name}' for parameter '{parameter}' and identifier '{identifier}'."
                    )

                    # Add float fields to Dataset
                    if field[parameter].dtype == "float":

                        # Note: "Clk Bias" parameter exists for receiver and satellite, therefore it has to be
                        #       distinguished based on the length of the 'identifier' (e.g. USNO or GPS64).
                        if parameter == "Clk Bias":
                            field_name = (f"{collection}.satellite_clock"
                                          if len(identifier) == 5 else
                                          f"{collection}.receiver_clock")

                        if field_name not in dset.fields:
                            dset.add_float(field_name,
                                           val=np.full(dset.num_obs, np.NaN))
                            dset[field_name][idx] = self.data[collection][idx]

                    # Add position fields to Dataset
                    elif field[parameter].dtype == "position":

                        if field_name not in dset.fields:
                            dset.add_position(field_name,
                                              time=dset.time,
                                              system="trs",
                                              val=np.full((dset.num_obs, 3),
                                                          np.NaN))

                        # Fill position field with data
                        tmp_sol = dict()

                        for item in [".X", ".Y", ".Z"]:
                            idx_item = name.replace(".Z",
                                                    item) == self.data["name"]
                            tmp_sol[item] = self.data["value"][idx_item]
                            # Note: Only .Z dataset indices are used for saving position field in Dataset. .X and .Y are
                            #       not necessary anymore and are removed from Dataset by using "keep_idx" variable.
                            if not item == ".Z":
                                keep_idx[idx_item] = False

                        dset[field_name][idx] = np.vstack(
                            (tmp_sol[".X"], tmp_sol[".Y"], tmp_sol[".Z"])).T

                    # Add posvel fields to Dataset
                    elif field[parameter].dtype == "posvel":

                        if field_name not in dset.fields:
                            dset.add_posvel(field_name,
                                            time=dset.time,
                                            system="trs",
                                            val=np.full((dset.num_obs, 6),
                                                        np.NaN))

                        # Fill position field with data
                        tmp_sol = dict()
                        for item in [
                                "State.Pos.X",
                                "State.Pos.Y",
                                "State.Pos.Z",
                                "State.Vel.X",
                                "State.Vel.Y",
                                "State.Vel.Z",
                        ]:
                            idx_item = name.replace("State.Pos.Z",
                                                    item) == self.data["name"]
                            tmp_sol[item] = self.data["value"][idx_item]
                            if not item == "State.Pos.Z":
                                keep_idx[idx_item] = False

                        dset[field_name][idx] = np.vstack((
                            tmp_sol["State.Pos.X"],
                            tmp_sol["State.Pos.Y"],
                            tmp_sol["State.Pos.Z"],
                            tmp_sol["State.Vel.X"],
                            tmp_sol["State.Vel.Y"],
                            tmp_sol["State.Vel.Z"],
                        )).T

            else:
                log.fatal(f"Parameter {parameter} is not defined.")

        dset.subset(
            keep_idx)  # Remove unnecessary entries (e.g. '.X' and '.Y' )

        return dset
# Pakken støtter mange operasjoner inkludert **transformasjoner**, **kombinasjon** av data, **maskeringer** osv.
#
# Se [rasterio.readthedocs.io](https://rasterio.readthedocs.io/) for mer informasjon.

# %% [markdown] slideshow={"slide_type": "slide"}
# # Midgard
#
# **Midgard** er en pakke utviklet av **Kartverket**. Midgard startet som et støttebibliotek til **Where**, Kartverkets programvare for å beregne referanserammer basert på VLBI, SLR, og GNSS.

# %% [markdown] slideshow={"slide_type": "fragment"}
# Midgard gir tilgang til en datastruktur for sammenstilling av **posisjon**, **hastighet** og **tid** med geodetisk nøyaktighet.

# %% slideshow={"slide_type": "-"}
from midgard.data import dataset

dset = dataset.Dataset(num_obs=4)
dset.add_text("navn", val=list(punkter.keys()))
dset.add_time("tidspunkt", val=["2020-11-25T11:30:00"] * 4, scale="utc", fmt="isot")
dset.add_position("posisjon", val=np.array([[pkt[0] for pkt in punkter.values()], [pkt[1] for pkt in punkter.values()], [10, 20, 40, 25]]).T, system="llh", time="tidspunkt")
print(dset)

# %% [markdown] slideshow={"slide_type": "subslide"}
# ## Midgard
#
# Posisjoner og hastigheter støtter forskjellige koordinatsystemer:
#
# - **llh**: Breddegrad, Lengdegrad, Høyde
# - **trs**: XYZ
# - **enu**: Lokalt Øst, Nord, Opp

# %% slideshow={"slide_type": "-"}
示例#23
0
def dset_empty():
    _dset = dataset.Dataset(5)
    return _dset
示例#24
0
    def as_dataset(
            self,
            ref_pos: Union[np.ndarray, List[float], None] = None) -> "Dataset":
        """Return the parsed data as a Dataset

        Args:
            ref_pos: Reference position given in terrestrial reference system and meters

        Returns:
            Midgard Dataset where GALAT result data are stored with following fields:

    
           | Field                    | Type              | Description                                               |
           |--------------------------|-------------------|-----------------------------------------------------------|
           | hpe                      | np.ndarray        | Horizontal Position Error of site position vs. reference  |
           |                          |                   | position                                                  |
           | num_satellite_available  | np.ndarray        | Number of available satellites                            |
           | num_satellite_used       | np.ndarray        | Number of used satellites                                 |
           | pdop                     | np.ndarray        | Position dilution of precision                            |
           | site_pos                 | Position          | Site position                                             |
           | site_pos_vs_ref          | PositionDelta     | Site position versus reference coordinate                 |
           | site_vel_3d              | np.ndarray        | 3D site velocity                                          |
           | time                     | Time              | Parameter time given as TimeTable object                  |
           | vpe                      | np.ndarray        | Vertical Position Error of site position vs. reference    |
           |                          |                   | position                                                  |
        """
        fields = {
            #"hpe": "meter", # Recalculated based on site position and given reference coordinate
            #"vpe": "meter", # Recalculated based on site position and given reference coordinate
            "site_vel_3d": "meter/second",
            "pdop": "",
            "num_satellite_available": "",
            "num_satellite_used": "",
        }

        # Initialize dataset
        dset = dataset.Dataset()
        if not self.data:
            log.warn("No data in {self.file_path}.")
            return dset
        dset.num_obs = len(self.data["time"])

        # Add time field
        dset.add_time(
            "time",
            val=self.data["time"],
            scale="gps",
            fmt="datetime",
        )

        # Add float fields
        for field in fields.keys():
            dset.add_float(name=field,
                           val=self.data[field],
                           unit=fields[field])

        # Add site position field
        dset.add_position(
            "site_pos",
            val=np.stack((
                self.data["latitude"] * Unit.deg2rad,
                self.data["longitude"] * Unit.deg2rad,
                self.data["height"],
            ),
                         axis=1),
            system="llh",
        )

        # Use either reference position from RINEX header or given argument as reference position
        if ref_pos is None:
            ref_pos = position.Position(
                np.repeat(
                    np.array([[
                        self.meta["pos_x"], self.meta["pos_y"],
                        self.meta["pos_z"]
                    ]]),
                    dset.num_obs,
                    axis=0,
                ),
                system="trs",
            )
        else:
            ref_pos = position.Position(np.repeat(np.array([ref_pos]),
                                                  dset.num_obs,
                                                  axis=0),
                                        system="trs")

        # Add relative position
        dset.add_position_delta(
            name="site_pos_vs_ref",
            val=(dset.site_pos.trs - ref_pos.trs).val,
            system="trs",
            ref_pos=ref_pos,
        )

        # Add HPE and VPE to dataset
        dset.add_float(
            "hpe",
            val=np.sqrt(dset.site_pos_vs_ref.enu.east**2 +
                        dset.site_pos_vs_ref.enu.north**2),
            unit="meter",
        )
        dset.add_float("vpe",
                       val=np.absolute(dset.site_pos_vs_ref.enu.up),
                       unit="meter")

        return dset
示例#25
0
def dset_float():
    _dset = dataset.Dataset(5)
    _dset.add_float("numbers", val=[0.1, 0.2, 0.3, 0.4, 0.5], unit="seconds")
    return _dset
示例#26
0
    def as_dataset(self) -> "Dataset":
        """Return the parsed data as a Dataset

        Returns:
            Midgard Dataset where station coordinates and belonging information are stored with following fields:

       |  Field                  | Type          | Description                                                       |
       |-------------------------|---------------|-------------------------------------------------------------------|
       | coord_comp_east_day<x>  | numpy.ndarray | Station coordinate comparison results for East component in [m]   |
       |                         |               | for day X (X=[1|2|...|7])                                         |
       | coord_comp_north_day<x> | numpy.ndarray | Station coordinate comparison results for North component in [m]  |
       |                         |               | for day X (X=[1|2|...|7])                                         |
       | coord_comp_up_day<x>    | numpy.ndarray | Station coordinate comparison results for Up component in [m]     |
       |                         |               | for day X (X=[1|2|...|7])                                         |
       | coord_comp_rms_east     | numpy.ndarray | List with daily station coordinate comparison results for East    |
       |                         |               | component in [m]                                                  |
       | coord_comp_rms_north    | numpy.ndarray | List with daily station coordinate comparison results for North   |
       |                         |               | component in [m]                                                  |
       | coord_comp_rms_up       | numpy.ndarray | List with daily station coordinate comparison results for Up      |
       |                         |               | component in [m]                                                  |
       | num_of_days             | numpy.ndarray | Number of days used for analysis                                  |
       | pos_mean_x              | numpy.ndarray | X-coordinate of mean station coordinate position in [m]           |
       | pos_mean_x_rms1         | numpy.ndarray | RMS1 of X-coordinate of mean station coordinate position in [m]   |
       | pos_mean_x_rms2         | numpy.ndarray | RMS2 of X-coordinate of mean station coordinate position in [m]   |
       | pos_mean_y              | numpy.ndarray | Y-coordinate of mean station coordinate position in [m]           |
       | pos_mean_y_rms1         | numpy.ndarray | RMS1 of Y-coordinate of mean station coordinate position in [m]   |
       | pos_mean_y_rms2         | numpy.ndarray | RMS2 of Y-coordinate of mean station coordinate position in [m]   |
       | pos_mean_z              | numpy.ndarray | Z-coordinate of mean station coordinate position in [m]           |
       | pos_mean_z_rms1         | numpy.ndarray | RMS1 of Z-coordinate of mean station coordinate position in [m]   |
       | pos_mean_z_rms2         | numpy.ndarray | RMS2 of Z-coordinate of mean station coordinate position in [m]   |
       | repeatability_east      | numpy.ndarray | Station coordinate repeatability for East component in [m]        |
       | repeatability_north     | numpy.ndarray | Station coordinate repeatability for North component in [m]       |
       | repeatability_up        | numpy.ndarray | Station coordinate repeatability for Up component in [m]          |
       | residual_east           | numpy.ndarray | Station residuals for East component in [m]                       |
       | residual_north          | numpy.ndarray | Station residuals for North component in [m]                      |
       | residual_up             | numpy.ndarray | Station residuals for Up component in [m]                         |
       | station                 | numpy.ndarray | Station names                                                     |
       | time                    | TimeTable     | Date of analysis session                                          |

            and following Dataset `meta` data:

       |  Entry              | Type  | Description                                                                    |
       |---------------------|-------|--------------------------------------------------------------------------------|
       | num_coord_files     | int   | Number of coordinate files used for analysis                                   | 
       | \__data_path__      | str   | File path                                                                      |
        """
        data = dict()

        # Generate dataset
        dset = dataset.Dataset(num_obs=len(self.data.keys()))
        dset.meta = self.meta.copy()

        # Remove unnecessary fields in meta
        for key in ["__parser_name__"]:
            del dset.meta[key]

        # Prepare data for adding to dataset
        for sta in sorted(self.data.keys()):
            for field in self.fields:

                if field in [
                        "coord_comp_east", "coord_comp_north", "coord_comp_up"
                ]:
                    for idx in range(0, self.meta["num_coord_files"]):
                        if field in self.data[sta]:
                            data.setdefault(f"{field}_day{idx+1}",
                                            list()).append(
                                                self.data[sta][field][idx])
                        else:
                            data.setdefault(f"{field}_day{idx+1}",
                                            list()).append(float('nan'))
                    continue

                if field in self.data[sta]:
                    data.setdefault(field,
                                    list()).append(self.data[sta][field])
                else:
                    # Field does not exist for station 'sta', therefore it is initialized with NaN.
                    data.setdefault(field, list()).append(float('nan'))

        # Add fields to dataset
        dset.add_text("station", val=sorted(self.data.keys()))

        for field in data:
            unit = "" if field == "num_of_days" else "meter"
            dset.add_float(field, val=data[field], unit=unit)

        dset.add_time(
            "time",
            val=[dset.meta["time"] for ii in range(0, dset.num_obs)],
            scale="utc",
            fmt="datetime",
        )

        return dset
示例#27
0
def dset_full():
    """Contains all available fieldstypes"""
    _dset = dataset.Dataset(5)
    _dset.add_bool("idx", val=[0, 1, 1, 0, 1])
    _dset.add_float("numbers", val=[1, 2, 3, 4, 5])
    _dset.add_float("numbers_1", val=[2, 2, 2, 2, 2])
    _dset.add_float("numbers_2", val=[3, 3, 3, 3, 3])
    _dset.add_position("sat_pos", val=np.ones((5, 3)), system="trs")
    _dset.add_position("site_pos",
                       val=np.ones((5, 3)) * 2,
                       system="trs",
                       other=_dset.sat_pos)
    _dset.add_position_delta("site_delta",
                             val=np.ones((5, 3)) * 0.5,
                             system="trs",
                             ref_pos=_dset.site_pos)
    _dset.add_posvel("sat_posvel", val=np.ones((5, 6)), system="trs")
    _dset.add_posvel("site_posvel",
                     val=np.ones((5, 6)) * 2,
                     system="trs",
                     other=_dset.sat_posvel)
    _dset.add_posvel_delta("site_posvel_delta",
                           val=np.ones((5, 6)) * 0.5,
                           system="trs",
                           ref_pos=_dset.site_posvel)
    _dset.add_sigma("numbers2",
                    val=[3, 3, 3, 3, 3],
                    sigma=[0.2, 0.2, 0.2, 0.2, 0.2])
    _dset.add_text("text", val=["aaa", "aaa", "aaa", "aaa", "aaa"])
    _dset.add_time("time",
                   val=[datetime(2015, 1, i) for i in range(5, 10)],
                   scale="utc",
                   fmt="datetime")
    _dset.add_time_delta("time_delta",
                         val=[timedelta(seconds=i) for i in range(20, 25)],
                         scale="utc",
                         fmt="timedelta")

    # Collections
    _dset.add_bool("group.idx", val=[0, 0, 0, 0, 0])
    _dset.add_float("group.numbers", val=[6, 7, 8, 9, 10])
    _dset.add_position("group.sat_pos", val=np.ones((5, 3)) * 7, system="trs")
    _dset.add_position("group.site_pos",
                       val=np.ones((5, 3)) * 8,
                       system="trs",
                       other=_dset.group.sat_pos)
    _dset.add_position_delta("group.site_delta",
                             val=np.ones((5, 3)) * 9.5,
                             system="trs",
                             ref_pos=_dset.group.site_pos)
    _dset.add_posvel("group.sat_posvel", val=np.ones((5, 6)) * 6, system="trs")
    _dset.add_posvel("group.site_posvel",
                     val=np.ones((5, 6)) * 5,
                     system="trs",
                     other=_dset.group.sat_posvel)
    _dset.add_posvel_delta("group.site_posvel_delta",
                           val=np.ones((5, 6)) * 1.5,
                           system="trs",
                           ref_pos=_dset.group.site_posvel)
    _dset.add_sigma("group.numbers2",
                    val=[1.2, 1.2, 1.2, 1.2, 1.2],
                    sigma=[3.2, 3.2, 3.2, 3.2, 3.2])
    _dset.add_text("group.text", val=["bbb", "bbb", "bbb", "bbb", "bbb"])
    _dset.add_time("group.time",
                   val=[datetime(2015, 1, i) for i in range(10, 15)],
                   scale="utc",
                   fmt="datetime")
    _dset.add_time_delta("group.time_delta",
                         val=[timedelta(seconds=i) for i in range(0, 5)],
                         scale="utc",
                         fmt="timedelta")

    # Nested collections
    _dset.add_bool("group.anothergroup.idx", val=[0, 0, 0, 0, 0])
    _dset.add_float("group.anothergroup.numbers", val=[6, 7, 8, 9, 10])
    _dset.add_position("group.anothergroup.sat_pos",
                       val=np.ones((5, 3)) * 7,
                       system="trs")
    _dset.add_position("group.anothergroup.site_pos",
                       val=np.ones((5, 3)) * 8,
                       system="trs",
                       other=_dset.group.anothergroup.sat_pos)
    _dset.add_position_delta(
        "group.anothergroup.site_delta",
        val=np.ones((5, 3)) * 9.5,
        system="trs",
        ref_pos=_dset.group.anothergroup.site_pos,
    )
    _dset.add_posvel("group.anothergroup.sat_posvel",
                     val=np.ones((5, 6)) * 6,
                     system="trs")
    _dset.add_posvel(
        "group.anothergroup.site_posvel",
        val=np.ones((5, 6)) * 5,
        system="trs",
        other=_dset.group.anothergroup.sat_posvel,
    )
    _dset.add_posvel_delta(
        "group.anothergroup.site_posvel_delta",
        val=np.ones((5, 6)) * 1.5,
        system="trs",
        ref_pos=_dset.group.anothergroup.site_posvel,
    )
    _dset.add_sigma("group.anothergroup.numbers2",
                    val=[1.2, 1.2, 1.2, 1.2, 1.2],
                    sigma=[3.2, 3.2, 3.2, 3.2, 3.2])
    _dset.add_text("group.anothergroup.text",
                   val=["bbb", "bbb", "bbb", "bbb", "bbb"])
    _dset.add_time("group.anothergroup.time",
                   val=[datetime(2015, 1, i) for i in range(10, 15)],
                   scale="utc",
                   fmt="datetime")
    _dset.add_time_delta("group.anothergroup.time_delta",
                         val=[timedelta(seconds=i) for i in range(0, 5)],
                         scale="utc",
                         fmt="timedelta")

    _dset.meta.add("dummy", "something")
    _dset.meta.add("testlist", [1, 2])
    _dset.meta.add("testdict", {"a": 2, "b": 3})
    _dset.meta.add("testtuple", ("c", "d"))
    _dset.meta.add("testset", {1, 2})
    _dset.meta.add("testlist2", list())
    _dset.meta.add("testdict2", dict())
    _dset.meta.add("testtuple2", tuple())
    _dset.meta.add("testset2", set())
    _dset.meta.add_event(_dset.time[0], "jump", "something happened")
    return _dset
示例#28
0
    def as_dataset(self) -> "Dataset":
        """Return the parsed data as a Dataset

        Returns:
            Midgard Dataset where timeseries data are stored with following fields:

    
           | Field                 | Type              | Description                                                  |
           |-----------------------|-------------------|--------------------------------------------------------------|
           | amplitude             | numpy.array       | Amplitude                                                    |
           | azimuth               | numpy.array       | Azimuth in [rad]                                             |
           | frequency             | numpy.array       | GNSS frequency identifier                                    |
           | peak2noise            | numpy.array       | Peak to noise                                                |
           | satellite             | numpy.array       | Satellite number                                             |
           | reflection_height     | numpy.array       | Reflection height in [m]                                     |
           | time                  | Time              | Time                                                         |
               
        """

        freq_def = {
            1: "L1",  # G
            2: "L2",  # G
            5: "L5",  # G
            20: "L2C",  # G
            101: "L1",  # R
            102: "L2",  # R
            201: "E1",  # E 
            205: "E5a",  # E
            206: "E6",  # E
            207: "E5b",  # E
            208: "E5",  # E
            302: "B1_2",  # C
            306: "B3",  # C
            307: "B2b",  # C
        }

        float_fields = {
            "amplitude": None,
            "azimuth": "radian",
            "peak2noise": None,
            "reflection_height": "meter",
        }

        # Initialize dataset
        dset = dataset.Dataset()
        if not self.data:
            log.warn("No data in {self.file_path}.")
            return dset
        dset.num_obs = len(self.data["time"])

        # Add text fields
        satellite = list()
        system = list()
        for sat in self.data["satellite"]:
            if sat >= 1 and sat < 100:  # GPS satellites
                system.append("G")
                satellite.append("G" + str(int(sat)).zfill(2))
            elif sat >= 101 and sat < 200:  # GLONASS satellites
                system.append("R")
                satellite.append("R" + str(int(sat))[1:3])
            elif sat >= 201 and sat < 300:  # Galileo satellites
                system.append("E")
                satellite.append("E" + str(int(sat))[1:3])
            elif sat >= 301 and sat < 400:  # BeiDou satellites
                system.append("C")
                satellite.append("C" + str(int(sat))[1:3])
            else:
                log.fatal(
                    "GNSSREFL satellite number {sat} is not defined. Valid satellite numbers are between [1-399]."
                )

        dset.add_text(
            name="system",
            val=system,
            write_level="operational",
        )

        dset.add_text(
            name="satellite",
            val=satellite,
            write_level="operational",
        )

        dset.add_text(
            name="frequency",
            val=[freq_def[v] for v in self.data["frequency"]],
            write_level="operational",
        )

        # Add time field
        dset.add_time(
            name="time",
            val=self.data["time"],
            scale="utc",
            fmt="datetime",
            write_level="operational",
        )

        # Add float fields
        for field in float_fields.keys():
            if field not in self.data.keys():
                log.warn(
                    f"Field '{field}' does not exist in file {self.meta['__data_path__']}."
                )
                continue

            value = np.deg2rad(
                self.data[field]) if field == "azimuth" else self.data[field]
            unit = "" if float_fields[field] is None else float_fields[field]

            dset.add_float(name=field,
                           val=value,
                           unit=unit,
                           write_level="operational")

        return dset
示例#29
0
    def as_dataset(
        self,
        ref_pos: Union[np.ndarray, List[float]] = [0.0, 0.0,
                                                   0.0]) -> "Dataset":
        """Return the parsed data as a Dataset

        Args:
            ref_pos: Reference position given in terrestrial reference system and meters

        Returns:
            Midgard Dataset where timeseries data are stored with following fields:

    
           | Field               | Type              | Description                                                    |
           |---------------------|-------------------|----------------------------------------------------------------|
           | obs.dpos            | PositionDelta     | Position delta object referred to a reference position         |
           | obs.dpos_sigma_east | numpy.array       | Standard deviation of east position                            |
           | obs.dpos_sigma_north| numpy.array       | Standard deviation of north position                           |
           | obs.dpos_sigma_up   | numpy.array       | Standard deviation of up position                              |
           | time                | Time              | Parameter time given as TimeTable object                       |
        """

        # Initialize dataset
        dset = dataset.Dataset()
        if not self.data:
            log.warn("No data in {self.file_path}.")
            return dset
        dset.num_obs = len(self.data["decimalyear"])
        dset.meta.update(self.meta)

        # Add position
        ref_pos = position.Position(np.repeat(np.array([ref_pos]),
                                              dset.num_obs,
                                              axis=0),
                                    system="trs")
        dset.add_position_delta(
            name="obs.dpos",
            val=np.stack(
                (self.data["east"], self.data["north"], self.data["vertical"]),
                axis=1),
            system="enu",
            ref_pos=ref_pos,
        )

        # TODO: sigma functionality has to be improved: dpos_sigma.enu.east, dpos_sigma.trs.x
        ## Add position sigma
        # sigma = np.stack((self.data["east_sigma"], self.data["north_sigma"], self.data["vertical_sigma"]), axis=1)
        # dset.add_sigma(name="dpos_sigma", val=dset.dpos.val, sigma=sigma, unit="meter")
        dset.add_float(name="obs.dpos_sigma_east",
                       val=self.data["east_sigma"],
                       unit="meter")
        dset.add_float(name="obs.dpos_sigma_north",
                       val=self.data["north_sigma"],
                       unit="meter")
        dset.add_float(name="obs.dpos_sigma_up",
                       val=self.data["vertical_sigma"],
                       unit="meter")

        # Add time
        dset.add_time(name="time",
                      val=self.data["decimalyear"],
                      scale="utc",
                      fmt="decimalyear",
                      write_level="operational")

        return dset
示例#30
0
    def as_dataset(self) -> "Dataset":
        """Store Gipsy residual data in a dataset

        Returns:
            Midgard Dataset where residual data are stored with following fields:


       | Field               | Type              | Description                                                        |
       |---------------------|-------------------|--------------------------------------------------------------------|
       | azimuth             | numpy.ndarray     | Azimuth from receiver                                              |
       | azimuth_sat         | numpy.ndarray     | Azimuth from satellite                                             |
       | elevation           | numpy.ndarray     | Elevation from receiver                                            |
       | elevation_sat       | numpy.ndarray     | Elevation from satellite                                           |
       | data_type           | numpy.ndarray     | Data type (e.g. IonoFreeC_1P_2P, IonoFreeL_1P_2P)                  |
       | residual            | numpy.ndarray     | Post-fit residual                                                  |
       | satellite           | numpy.ndarray     | Satellite PRN number together with GNSS identifier (e.g. G07)      |
       | station             | numpy.ndarray     | Station name list                                                  |
       | system              | numpy.ndarray     | GNSS identifier (e.g. G or E)                                      |
       | time                | Time              | Parameter time given as TimeTable object                           |
        
        """
        # TODO: Handling of unit. Should be added to dataset fields.

        dset = dataset.Dataset(num_obs=len(self.data["time_past_j2000"]))
        dset.meta.update(self.meta)

        # Note: GipsyX uses continuous seconds past Jan. 1, 2000 11:59:47 UTC time format in TDP files. That means,
        #       GipsyX does not follow convention of J2000:
        #           1.01.2000 12:00:00     TT  (TT = GipsyX(t) + 13s)
        #           1.01.2000 11:59:27.816 TAI (TAI = TT - 32.184s)
        #           1.01.2000 11:58:55.816 UTC (UTC = TAI + leap_seconds = TAI - 32s)
        #           1.01.2000 11:59:08.816 GPS (GPS = TAI - 19s)
        #
        #       Therefore Time object initialized with TT time scale has to be corrected about 13 seconds.
        #
        # TODO: Introduce j2000 = 2451545.0 as constant or unit?
        dset.add_time(
            "time",
            val=Time((np.array(self.data["time_past_j2000"]) + 13.0) *
                     Unit.second2day + 2451545.0,
                     scale="tt",
                     fmt="jd").gps,
        )

        # Loop over Dataset fields
        for field in self.data.keys():
            if field == "time_past_j2000":
                continue

            if field in ["data_type", "satellite", "station"]:

                if field == "satellite":
                    dset.add_text("satellite",
                                  val=np.repeat(None, dset.num_obs))
                    dset.add_text("system", val=np.repeat(None, dset.num_obs))
                    for sat in set(self.data["satellite"]):
                        idx = sat == np.array(self.data["satellite"])
                        sys = enums.get_value("gnss_3digit_id_to_id", sat[0:3])
                        dset.system[idx] = sys
                        dset.satellite[idx] = sys + sat[3:5]
                else:
                    dset.add_text(field, val=self.data[field])

            elif field == "deleted":
                dset.add_bool(field, val=self.data[field])

            else:
                dset.add_float(field, val=self.data[field])

        return dset