示例#1
0
文件: vascc.py 项目: uasau/where
    def _calculate_pos_trs(self, site):
        """Calculate positions for the given time epochs

        The positions are calculated as simple linear offsets based on the reference epoch. Makes sure to pick out the
        correct time interval to use.

        Args:
            key:    Key saying which site to calculate position for, type might depend on the Trf.

        Returns:
            Array:  Positions, one 3-vector for each time epoch.
        """
        station_info = self.data[site]
        ref_epoch = Time(float(station_info["ref_epoch"]),
                         fmt="decimalyear",
                         scale="utc")
        pos = np.full((self.time.size, 3), fill_value=np.nan)

        ref_pos = np.array(station_info["pos"])
        ref_vel = np.array(station_info["vel"])
        interval_years = (self.time - ref_epoch).jd * Unit.day2julian_years
        pos[:, :] = ref_pos + interval_years[:, None] * ref_vel[None, :]

        ell = ellipsoid.get("GRS80")
        pos_trs = Position(pos, system="trs", ellipsoid=ell, time=self.time)

        return np.squeeze(pos_trs)
示例#2
0
文件: slrf.py 项目: uasau/where
    def _calculate_pos_trs(self, site):
        """Calculate positions for the given time epochs

        The positions are calculated as simple linear offsets based on the reference epoch.

        Args:
            site (String):    Key saying which site to calculate position for.

        Returns:
            Array:  Positions, one 3-vector for each time epoch.
        """
        station_info = self.data[site]
        ref_epoch = Time(station_info["ref_epoch"], scale="utc", fmt="datetime")

        pos = np.zeros((self.time.size, 3))
        for pv in station_info["pos_vel"].values():
            idx = np.logical_and(self.time.utc.datetime >= pv["start"], self.time.utc.datetime < pv["end"])
            if idx.size == 1:
                idx = np.array([idx])
            if not any(idx):
                continue
            ref_pos = np.array([pv["STAX"], pv["STAY"], pv["STAZ"]])
            ref_vel = np.array([pv["VELX"], pv["VELY"], pv["VELZ"]])
            interval_years = (self.time - ref_epoch).jd * Unit.day2julian_years
            if isinstance(interval_years, float):
                interval_years = np.array([interval_years])
            pos[idx, :] = ref_pos + interval_years[idx, None] * ref_vel[None, :]

        ell = ellipsoid.get(config.tech.reference_ellipsoid.str.upper())
        pos_trs = Position(pos, system="trs", ellipsoid=ell, time=self.time)

        return np.squeeze(pos_trs)
示例#3
0
    def _calculate_pos_trs(self, site):
        """Calculate positions for the given time epochs

        There are no velocities available, so same position is returned for all time epochs

        Args:
            site (String):  Key specifying which site to calculate position for, must be key in self.data.

        Returns:
            Array:  Positions, one 3-vector for each time epoch.
        """
        pos = self.data[site].pop("pos")[None, :].repeat(self.time.size,
                                                         axis=0)
        ell = ellipsoid.get(config.tech.reference_ellipsoid.str.upper())
        pos_trs = Position(pos, system="trs", ellipsoid=ell, time=self.time)

        return np.squeeze(pos_trs)
示例#4
0
文件: nma.py 项目: yxw027/where
    def _calculate_pos_trs(self, site):
        """Calculate positions for the given time epochs

        There are no velocities available, so same position is returned for all time epochs

        Args:
            site (String):  Key specifying which site to calculate position for, must be key in self.data.

        Returns:
            Array:  Positions, one 3-vector for each time epoch.
        """
        site_data = self.data[site]
        if self.time.isscalar:
            pos = np.full(3, np.nan)
            if "pos" not in site_data:
                return pos

            epoch = self.time.datetime
            try:
                ep_idx = np.where((site_data["time_start"] <= epoch)
                                  & (epoch < site_data["time_end"]))[0][0]
                pos[:] = site_data["pos"][ep_idx]
            except IndexError:
                pass

        else:
            pos = np.full((len(self.time), 3), np.nan)
            if "pos" not in site_data:
                return pos

            for idx, epoch in enumerate(self.time.datetime):
                try:
                    ep_idx = np.where((site_data["time_start"] <= epoch)
                                      & (epoch < site_data["time_end"]))[0][0]
                except IndexError:
                    continue
                pos[idx] = site_data["pos"][ep_idx]

        ell = ellipsoid.get(config.tech.reference_ellipsoid.str.upper())
        pos_trs = Position(pos, system="trs", ellipsoid=ell, time=self.time)

        return np.squeeze(pos_trs)
示例#5
0
文件: vlbi.py 项目: mfkiwl/where
def _write_to_dataset(parser, dset, rundate, session):

    data = parser.as_dict()
    units = data.get("meta", {}).get("units", {})

    # Session meta
    dset.meta.add("tech", "vlbi")
    dset.meta.add("file", parser.file_path.stem, section="input")
    dset.meta.add("type", config.tech.obs_format.str.upper(), section="input")

    if "meta" not in data:
        # Only read master file if session_code is not available in data["meta"]
        # This is to avoid a dependency to the master file which changes frequently
        master = apriori.get("vlbi_master_schedule", rundate=rundate)
        master_data = master.get((rundate.timetuple().tm_yday, session), {})
        session_code = master_data.get("session_code", "")
    else:
        master = apriori.get("vlbi_master_schedule")
        session_code = data["meta"].get("session_code", "")

    dset.meta.add("session_code", session_code, section="input")
    dset.meta.add("session_type",
                  master.session_type(session_code),
                  section="input")

    log.info(f"Session code: {session_code}")

    # Convert source names to official IERS names
    source_names = apriori.get("vlbi_source_names")
    iers_source_names = [
        source_names[src]["iers_name"] if src in source_names else src
        for src in data["source"]
    ]
    # Replace the characeter "." with the letters "dot" in source names because "." has a special meaning in where
    data["source"] = iers_source_names

    # Replace spaces in station names with underscores to match official IVS name
    data["station_1"] = np.char.replace(data["station_1"], " ", "_")
    data["station_2"] = np.char.replace(data["station_2"], " ", "_")

    dset.num_obs = len(data["time"])
    dset.add_time("time",
                  val=data.pop("time"),
                  scale="utc",
                  fmt="isot",
                  write_level="operational")

    # Source directions
    crf = apriori.get("crf", time=dset.time)
    ra = np.array([
        crf[s].pos.right_ascension if s in crf else 0 for s in data["source"]
    ])
    dec = np.array(
        [crf[s].pos.declination if s in crf else 0 for s in data["source"]])
    dset.add_direction("src_dir",
                       ra=ra,
                       dec=dec,
                       time=dset.time,
                       write_level="operational")
    # Replace the characeter "." with the letters "dot" in source names because "." has a special meaning in where
    data["source"] = [s.replace(".", "dot") for s in iers_source_names]

    for field, values in data.items():
        values = np.array(values)
        if values.dtype.kind in {"U", "S"}:
            multiplier = -1 if field.endswith("_1") else 1
            dset.add_text(field,
                          val=values,
                          multiplier=multiplier,
                          write_level="operational")
        elif values.dtype.kind in {"f", "i"}:
            multiplier = -1 if field.endswith("_1") else 1
            unit = units.get(field, None)
            dset.add_float(field,
                           val=values,
                           multiplier=multiplier,
                           write_level="operational",
                           unit=unit)
        elif values.dtype.kind in {"O"}:
            continue
        else:
            log.warn(f"Unknown datatype {values.dtype} for field {field}")

    # Station information
    log.info(f"Found stations: {', '.join(dset.unique('station'))}")
    trf = apriori.get("trf", time=dset.time)
    station_codes = apriori.get("vlbi_station_codes")
    dset.add_text(
        "baseline",
        val=np.array([
            f"{s1}/{s2}"
            for s1, s2 in zip(data["station_1"], data["station_2"])
        ]),
        write_level="operational",
    )
    for site in dset.unique("station"):
        if site in station_codes:
            cdp = station_codes[site]["cdp"]
            trf_site = trf[cdp]
        else:
            named_site = trf.named_site(site)
            trf_site = trf.closest(named_site.pos)
            cdp = trf_site.key
            ignore_stations = config.tech.ignore_station.stations.list
            logger = log.info if site in ignore_stations else log.warn
            logger(
                f"Undefined station name {site}. Assuming station is {trf_site.name} to get a cdp number."
            )

        data["pos_" + site] = trf_site.pos.trs.val
        _site_pos = np.mean(data[f"pos_{site}"], axis=0)
        log.debug(
            f"Using position {_site_pos} for {site} from {trf_site.source}")

        ivsname = station_codes[cdp]["name"]
        data["sta_" + site] = dict(site_id=cdp, cdp=cdp, ivsname=ivsname)

    # Positions
    itrs_pos_1 = np.array(
        [data["pos_" + s][i, :] for i, s in enumerate(data["station_1"])])
    itrs_vel_1 = np.zeros((dset.num_obs, 3))
    dset.add_posvel(
        "site_pos_1",
        val=np.concatenate((itrs_pos_1, itrs_vel_1), axis=1),
        ellipsoid=ellipsoid.get(config.tech.reference_ellipsoid.str.upper()),
        system="trs",
        time=dset.time,
        # other=dset.src_dir,
        write_level="operational",
    )

    itrs_pos_2 = np.array(
        [data["pos_" + s][i, :] for i, s in enumerate(data["station_2"])])
    itrs_vel_2 = np.zeros((dset.num_obs, 3))
    dset.add_posvel(
        "site_pos_2",
        val=np.concatenate((itrs_pos_2, itrs_vel_2), axis=1),
        ellipsoid=ellipsoid.get(config.tech.reference_ellipsoid.str.upper()),
        system="trs",
        time=dset.time,
        # other=dset.src_dir,
        write_level="operational",
    )

    # Compute aberrated source directions
    def aberrated_src_dir(site_pos):
        """See IERS2010 Conventions, equation 11.15"""
        site_vel_gcrs = site_pos.gcrs.vel.val
        eph = apriori.get("ephemerides", time=dset.time)
        vel = eph.vel_bcrs("earth") + site_vel_gcrs
        return (
            dset.src_dir.unit_vector + vel / constant.c -
            dset.src_dir.unit_vector *
            (dset.src_dir.unit_vector[:, None, :] @ vel[:, :, None])[:, :, 0] /
            constant.c)

    k_1 = aberrated_src_dir(dset.site_pos_1)
    dset.add_direction("abr_src_dir_1", val=k_1, system="gcrs", time=dset.time)
    dset.site_pos_1.other = dset.abr_src_dir_1

    k_2 = aberrated_src_dir(dset.site_pos_2)
    dset.add_direction("abr_src_dir_2", val=k_2, system="gcrs", time=dset.time)
    dset.site_pos_2.other = dset.abr_src_dir_2

    # Station data
    sta_fields = set().union(
        *[v.keys() for k, v in data.items() if k.startswith("sta_")])
    for field in sta_fields:
        dset.add_text(field + "_1",
                      val=[data["sta_" + s][field] for s in data["station_1"]],
                      multiplier=-1)  # write_level='analysis')
        dset.add_text(field + "_2",
                      val=[data["sta_" + s][field] for s in data["station_2"]],
                      multiplier=1)  # write_level='analysis')

    # Station meta
    station_keys = sorted([k for k, v in data.items() if k.startswith("sta_")])
    pos_keys = sorted([k for k, v in data.items() if k.startswith("pos_")])

    for sta_key, pos_key in zip(station_keys, pos_keys):
        sta_name = sta_key.replace("sta_", "")
        cdp = data[sta_key]["cdp"]
        ivsname = station_codes[cdp]["name"]
        longitude, latitude, height, _ = sofa.iau_gc2gd(
            2, data[pos_key][0, :])  # TODO: Reference ellipsoid

        dset.meta.add("cdp", cdp, section=ivsname)
        dset.meta.add("site_id", cdp, section=ivsname)
        dset.meta.add("domes", station_codes[cdp]["domes"], section=ivsname)
        dset.meta.add("marker", station_codes[cdp]["marker"], section=ivsname)
        dset.meta.add("description",
                      station_codes[cdp]["description"],
                      section=ivsname)
        dset.meta.add("longitude", longitude, section=ivsname)
        dset.meta.add("latitude", latitude, section=ivsname)
        dset.meta.add("height", height, section=ivsname)
        if sta_name != ivsname:
            dset.meta.add("cdp", cdp, section=sta_name)
            dset.meta.add("site_id", cdp, section=sta_name)
            dset.meta.add("domes",
                          station_codes[cdp]["domes"],
                          section=sta_name)
            dset.meta.add("marker",
                          station_codes[cdp]["marker"],
                          section=sta_name)
            dset.meta.add("description",
                          station_codes[cdp]["description"],
                          section=sta_name)
            dset.meta.add("longitude", longitude, section=sta_name)
            dset.meta.add("latitude", latitude, section=sta_name)
            dset.meta.add("height", height, section=sta_name)

    # Final cleanup
    # If there are more than 300 sources in a NGS-file the source names are gibberish
    bad_source_idx = ra == 0
    bad_sources = np.array(dset.source)[bad_source_idx]
    for s in np.unique(bad_sources):
        log.warn(
            f"Unknown source {s}. Observations with this source is discarded")
    dset.subset(np.logical_not(bad_source_idx))
示例#6
0
文件: itrf.py 项目: mfkiwl/where
    def _calculate_pos_trs(self, site):
        """Calculate positions for the given time epochs

        The positions are calculated as simple linear offsets based on the reference epoch. If there is a post-seismic
        deformations model for a station the motion due to that model is added to the linear velocity model. Makes sure
        to pick out the correct time interval to use.

        Args:
            site (String):    Key saying which site to calculate position for.

        Returns:
            Array:  Positions, one 3-vector for each time epoch.
        """
        station_info = self.data[site]
        ref_epoch = Time(station_info["ref_epoch"],
                         scale="utc",
                         fmt="datetime")

        pos = np.zeros((self.time.size, 3))
        for pv in station_info["pos_vel"].values():
            idx = np.logical_and(self.time.utc.datetime >= pv["start"],
                                 self.time.utc.datetime < pv["end"])
            if idx.ndim == 0:
                idx = np.array([idx])
            if not any(idx):
                continue
            ref_pos = np.array([pv["STAX"], pv["STAY"], pv["STAZ"]])
            ref_vel = np.array([pv["VELX"], pv["VELY"], pv["VELZ"]])
            interval_years = (self.time - ref_epoch).jd * Unit.day2julian_years
            if isinstance(interval_years, float):
                interval_years = np.array([interval_years])
            pos[idx, :] = ref_pos + interval_years[idx,
                                                   None] * ref_vel[None, :]

        ell = ellipsoid.get(config.tech.reference_ellipsoid.str.upper())
        pos_trs = Position(np.squeeze(pos),
                           system="trs",
                           ellipsoid=ell,
                           time=self.time)

        # Post-seismic deformations, see Appendix C in :cite:'itrf2014'
        if "psd" in station_info:
            psd = station_info["psd"]
            denu = dict(H=np.zeros(self.time.size),
                        E=np.zeros(self.time.size),
                        N=np.zeros(self.time.size))
            for param in psd.values():
                t_0 = Time(param["epoch"], fmt="datetime", scale="utc")
                delta_t = (self.time - t_0).jd * Unit.day2julian_years
                if isinstance(delta_t, float):
                    delta_t = np.array([delta_t])
                idx = delta_t > 0
                for L in "ENH":
                    aexp = np.array(param.get("AEXP_" + L, list()))
                    texp = np.array(param.get("TEXP_" + L, list()))
                    for a, t in zip(aexp, texp):
                        denu[L][idx] += a * (1 - np.exp(-delta_t[idx] / t))
                    alog = np.array(param.get("ALOG_" + L, list()))
                    tlog = np.array(param.get("TLOG_" + L, list()))
                    for a, t in zip(alog, tlog):
                        denu[L][idx] += a * np.log(1 + delta_t[idx] / t)

            denu = np.vstack((denu["E"], denu["N"], denu["H"])).T

            pos_delta = PositionDelta(np.squeeze(denu),
                                      system="enu",
                                      ellipsoid=ell,
                                      ref_pos=pos_trs,
                                      time=self.time)
            pos_trs += pos_delta.trs

        return np.squeeze(pos_trs)