Пример #1
0
 def file_reference(self):
     """Mandatory block
     """
     self.fid.write("+FILE/REFERENCE\n")
     if "inst_name" not in self._analyst_info:
         log.error(
             "Add information about user and institute in configuration. Left blank in Sinex-file"
         )
     self.fid.write(
         f" {'DESCRIPTION':<18} {self._analyst_info.get('inst_name', ''):<60}\n"
     )
     self.fid.write(
         f" {'OUTPUT':<18} {'Daily VLBI solution, Normal equations':<60}\n")
     self.fid.write(
         f" {'ANALYST':<18} {self._analyst_info.get('email', ''):<60}\n")
     contacts = config.tech.get("contacts",
                                section="sinex",
                                default=where.__contact__).list
     for contact in contacts:
         self.fid.write(f" {'CONTACT':<18} {contact:<60}\n")
     self.fid.write(
         f" {'SOFTWARE':<18} {f'Where v{where.__version__}':<60}\n")
     # self.fid.write(' {'HARDWARE':<18} {'---':<60}\n')
     input_string = f"{self.dset.meta['input']['type']} {self.dset.meta['input']['file']}"
     self.fid.write(f" {'INPUT':<18} {input_string:<60}\n")
     self.fid.write("-FILE/REFERENCE\n")
Пример #2
0
 def _select_gnss_signal(self, system: Union[str, None]=None) -> Tuple[List[str], str]:
     """Select GNSS signal depending on given data in Dataset
 
     Args:
         system:  System identifier
         
     Returns:
         Selected GNSS signal
     """
     nav_type_def = {
             "FNAV_E5a": {"e5a": "F/NAV"},
             "INAV_E1": {"e1": "I/NAV"},
             "INAV_E5b": {"e5b": "I/NAV"},
             "INAV_E1E5b": {"e1": "I/NAV", "e5b": "I/NAV"},
             "LNAV": {None: "L/NAV"},
             "D1/D2": {None: "D1/D2"},
             "NAV": {None: "NAV"},
     }
     
     signals = dict()
     
     if system:
         idx = self.dset.filter(system=system)
         nav_types = set(self.dset.nav_type[idx])
     else:
         nav_types = self.dset.unique("nav_type")
         
     for nav_type in nav_types:
         try:
             signals.update(nav_type_def[nav_type])
         except KeyError:
             log.error(f"GNSS navigation message {nav_type} is not defined.")
             
     return signals
Пример #3
0
def _markdown_to_pdf(dset):
    """Convert markdown SISRE report file to pdf format

    Args:
       dset (Dataset):           A dataset containing the data.
    """

    if config.where.sisre_report.get("markdown_to_pdf", default=False).bool:
        md_path = str(
            files.path("output_sisre_comparison_report", file_vars=dset.vars))
        pdf_path = md_path.replace(".md", ".pdf")
        program = "pandoc"

        # Convert markdown to pdf with pandoc
        pandoc_args = [
            "-f markdown", "-V classoption:twoside", "-N", "-o " + pdf_path,
            md_path
        ]

        log.info(f"Start: {program} {' '.join(pandoc_args)}")
        status = os.system(f"{program} {' '.join(pandoc_args)}")
        if status != 0:
            log.error(
                f"{program} failed with error code {status} ({' '.join([program] + pandoc_args)})"
            )
Пример #4
0
def eccentricity_vector_station(ecc, dset):
    """Calculate the eccentricity vector for a station.

    Corrections are returned in meters in the Geocentric
    Celestial Reference System for each observation.

    Args:
        dset:        A Dataset containing model data

    Returns:
        Numpy array: GCRS corrections in meters.
    """

    denu = np.full((dset.num_obs, 3), np.nan)
    for site_id in dset.unique("site_id"):
        if ecc[site_id]["type"] == "NEU":
            denu[dset.filter(site_id=site_id)] = ecc[site_id]["vector"][[
                1, 0, 2
            ]]  # Convert from NEU to ENU

    dxyz = dset.site_pos.convert_enu_to_itrs(denu)
    for site_id in dset.unique("site_id"):
        if ecc[site_id]["type"] == "XYZ":
            dxyz[dset.filter(site_id=site_id)] = ecc[site_id]["vector"]

    missing_index = np.any(np.isnan(dxyz), axis=1)
    for site_id in np.unique(dset.site_id[missing_index]):
        log.error(
            "Missing eccentricity vector for site_id '{}'. Correction set to zero.",
            site_id)
    dxyz[np.isnan(dxyz)] = 0

    return dset.site_pos.convert_itrs_to_gcrs(dxyz)
Пример #5
0
def ocean_tides(dset):
    """Calculate ocean tide corrections at both stations

    Ocean tide corrections are returned in meters in the Geocentric Celestial Reference System for each observation. A
    Numpy array with 6 columns is returned, the first three columns are \f$ x, y, z \f$ for station 1, while the last
    three columns are \f$ x, y, z \f$ for station 2.

    Args:
        rundate:  The model run date.
        tech:     The technique.
        dset:     A Dataset containing model data.

    Returns:
        Numpy array with ocean tide corrections in meters.
    """
    # Ocean Tides Coefficients
    otc = apriori.get("ocean_tides")

    amplitudes = {
        s: otc[s]["amplitudes"]
        for s in dset.unique("site_id") if s in otc
    }
    phases = {s: otc[s]["phases"] for s in dset.unique("site_id") if s in otc}
    # Warn about missing Ocean Tides Coefficients
    for site_id in set(dset.unique("site_id")) - set(amplitudes.keys()):
        log.error(
            f"Missing ocean loading coefficients for site id {site_id!r}. Correction set to zero."
        )

    data_out = list()
    for _ in dset.for_each("station"):
        data_out.append(ocean_tides_station(dset, amplitudes, phases))

    return np.hstack(data_out)
Пример #6
0
    def write_block(self, block_name, *args):
        """Call the given block so it writes itself"""
        try:
            block_func = getattr(self, block_name)
        except AttributeError:
            log.error(f"Sinex block '{block_name}' is unknown")
            return

        block_func(*args)
Пример #7
0
def publish_files(publish=None):
    """Publish files to specified directories

    The publish string should list file_keys specified in files.conf. Each file_key needs to have a field named publish
    specifying a directory the file should be copied to.

    Args:
        publish (String):   List of file_keys that will be published.
    """
    if not config.where.files.publish.bool:
        return

    publish_list = config.tech.get("files_to_publish", value=publish).list
    for file_key in publish_list:
        try:
            source = config.files.path(file_key)
        except KeyError:
            log.error(
                f"File key '{file_key}' in publish configuration is unknown. Ignored"
            )
            continue
        if not source.exists():
            try:
                log.error(
                    f"File '{source}' (file key='{file_key}') does not exist, and can not be published"
                )
            except KeyError:
                log.error(
                    f"File key='{file_key}' has incomplete filename information and can not be published"
                )
            continue

        try:
            destinations = config.files[file_key].publish.replaced.as_list(
                convert=pathlib.Path)
        except AttributeError:
            log.error(
                f"File key '{file_key}' does not specify 'publish' directory in file configuration. Ignored"
            )
            continue

        # Copy file to destinations
        for destination in destinations:
            destination_path = destination / source.name
            try:
                if destination_path.exists():
                    os.remove(destination_path)
                destination.mkdir(parents=True, exist_ok=True)
                shutil.copy(source, destination)
            except (OSError, PermissionError, FileNotFoundError) as err:
                log.error(
                    f"Unable to publish {file_key}-file {source} to {destination} because of {err}"
                )
            else:
                log.info(
                    f"Published {file_key}-file {source} to {destination}")
Пример #8
0
def download_file(file_key, file_vars=None, create_dirs=True):
    """Download a file from the web and save it to disk

    TODO: Remove when files.path() is removed

    Use pycurl (libcurl) to do the actual downloading. Request might be nicer for this, but turned out to be much
    slower (and in practice unusable for bigger files) and also not really supporting ftp-downloads.

    Args:
        file_key (String):   File key that should be downloaded.
        file_vars (Dict):    File variables used to find path from file_key.
        create_dirs (Bool):  Create directories as necessary before downloading file.
    """
    if (not config.where.files.download_missing.bool
            or "url" not in config.files[file_key]
            or not config.files[file_key].url.str):
        return None

    file_path = path(file_key, file_vars=file_vars, download_missing=False)
    if file_path.exists():
        return None
    if create_dirs:
        file_path.parent.mkdir(parents=True, exist_ok=True)

    file_url = url(file_key, file_vars=file_vars)
    file_path = file_path.with_name(file_url.name)
    log.info(f"Download {file_key} from '{file_url}' to '{file_path}'")
    with builtins.open(file_path, mode="wb") as fid:
        c = pycurl.Curl()
        c.setopt(c.URL, file_url)
        c.setopt(c.WRITEDATA, fid)
        try:
            c.perform()
            if not (200 <= c.getinfo(c.HTTP_CODE) <= 299):
                raise pycurl.error()
        except pycurl.error:
            log.error(
                f"Problem downloading file: {c.getinfo(c.EFFECTIVE_URL)} ({c.getinfo(c.HTTP_CODE)})"
            )
            if file_path.exists():  # Print first 10 lines to console
                head_of_file = f"Contents of '{file_path}':\n" + "\n".join(
                    file_path.read_text().split("\n")[:10])
                print(console.indent(head_of_file, num_spaces=8))
                file_path.unlink()
            log.warn(
                f"Try to download '{file_url}' manually and save it at '{file_path}'"
            )
        else:
            log.info(f"Done downloading {file_key}")
        finally:
            c.close()
    return file_path
Пример #9
0
def empty_file(file_path):
    """Check if a file is empty

    Args:
        file_path (Path):  Path to a file.

    Returns:
        Bool:  Whether path is empty or not.
    """
    if not path_exists(file_path):
        log.error(f"File '{file_path}' does not exist.")

    return False if file_path.stat().st_size > 0 else True
Пример #10
0
    def __getitem__(self, key):
        """Read field data from table

        Look up location and shape of field data in self._fields. Then return data after they are reshaped.

        Args:
            key:   String with name of field.

        Returns:
            Numpy-array with field data.
        """
        if key == self.name:
            return self._data

        # We should not end up here ...
        from where.lib import log

        log.error("Somehow key is {} but my name is {}", key, self.name)
Пример #11
0
    def solution_estimate(self):
        """Mandatory
        """
        self.fid.write("+SOLUTION/ESTIMATE\n")
        self.fid.write(
            "*Index Type__ CODE PT SOLN Ref_epoch___ Unit S Total__value________ _Std_dev___\n"
        )
        sol_id = 1
        for i, param in enumerate(self.state_vector, start=1):
            point_code = "A" if param["type"] == "site_pos" else "--"
            try:
                param_type = _PARAMS[param["type"]][param["partial"]]
            except KeyError:
                continue

            param_unit = self.dset.meta["normal equation"]["unit"][i - 1]
            value = self.dset.meta["normal equation"]["solution"][
                i - 1] + self._get_apriori_value(param, param_unit)
            if self.dset.meta["normal equation"]["covariance"][i - 1][i -
                                                                      1] < 0:
                log.error("Negative covariance ({})for {} {}".format(
                    self.dset.meta["normal equation"]["covariance"][i - 1][i -
                                                                           1],
                    param_type,
                    self.dset.meta["normal equation"]["names"][i - 1],
                ))
            value_sigma = np.sqrt(
                self.dset.meta["normal equation"]["covariance"][i - 1][i - 1])
            self.fid.write(
                " {:>5} {:6} {:4} {:2} {:>4} {:12} {:4} {:1} {: 20.14e} {:11.5e}\n"
                "".format(
                    i,
                    param_type,
                    self.ids[param["id"]],
                    point_code,
                    sol_id,
                    self.dset.time.mean.yydddsssss,
                    param_unit,
                    2,
                    value,
                    value_sigma,
                ))
        self.fid.write("-SOLUTION/ESTIMATE\n")
Пример #12
0
def publish_files(publish=None):
    """Publish files to specified directories

    The publish string should list file_keys specified in files.conf. Each file_key needs to have a field named publish
    specifying a directory the file should be copied to.

    Args:
        publish (String):   List of file_keys that will be published.
    """
    if not config.where.files.publish.bool:
        return

    publish_list = config.tech.get("files_to_publish", value=publish).list
    for file_key in publish_list:
        try:
            source = path(file_key)
        except KeyError:
            log.error(f"File key '{file_key}' in publish configuration is unknown. Ignored")
            continue
        if not source.exists():
            log.error(f"File '{source}' (file key='{file_key}') does not exist, and can not be published")
            continue

        try:
            destinations = config.files[file_key].publish.replaced.as_list(convert=pathlib.Path)
        except AttributeError:
            log.error(f"File key '{file_key}' does not specify 'publish' directory in file configuration. Ignored")
            continue

        # Copy file to destinations
        for destination in destinations:
            log.info("Publishing {}-file {} to {}", file_key, source, destination)
            destination.mkdir(parents=True, exist_ok=True)
            shutil.copy(source, destination)
Пример #13
0
    def __init__(
        self,
        H: np.ndarray,
        z: Union[None, np.ndarray] = None,
        x0: Union[None, np.ndarray] = None,
        W: Union[None, np.ndarray] = None,
        param_names: Union[None, List[str]] = None,
    ) -> None:
        """Initialize the Kalman filter

        Args:
            H:            Design matrix with partial derivatives  (num_obs x num_unknowns)
            z:            Observed residual                       (num_obs)
            x0:           Apriori values of estimated parameters  (num_unknowns x 1)
            W:            Observation weight matrix               (num_obs x num_obs)
            param_names:  Parameter names                         (num_unknowns)
            
        """
        self.H = np.squeeze(H.T, axis=0).T
        self.num_obs, self.num_unknowns = self.H.shape
        self.degree_of_freedom = self.num_obs - self.num_unknowns

        if self.degree_of_freedom < 0:
            log.error(f"Degree of freedom is {self.degree_of_freedom} < 0. Estimate fewer parameters.")

        self.z = np.zeros((self.num_obs)) if z is None else z
        self.x0 = np.zeros((self.num_unknowns)) if x0 is None else x0
        self.W = np.eye(self.num_obs) if W is None else W  # Initialze as identity matrix, if not given
        self.param_names = param_names if param_names else []

        self.dx = np.zeros((self.num_unknowns))
        self.x_hat = np.zeros((self.num_unknowns))
        self.N = np.zeros((self.num_unknowns, self.num_unknowns))
        self.Cx = np.zeros((self.num_unknowns, self.num_unknowns))
        self.Qx = np.zeros((self.num_unknowns, self.num_unknowns))
        self.v = np.zeros((self.num_obs))
        self.sigma0 = None
        self.sigmax = np.zeros((self.num_unknowns))
Пример #14
0
    def markdown_to_pdf(self) -> None:
        """Convert markdown file to pdf format
        """
        # Close file object
        self.fid.close()

        if self.path.stat().st_size == 0:
            log.warn(f"Markdown file {self.path} is empty.")
            return 1

        pdf_path = str(self.path).replace(".md", ".pdf")
        program = "pandoc"

        # Convert markdown to pdf with pandoc
        pandoc_args = [
            "-f markdown", "-V classoption:twoside", "-N", "-o " + pdf_path,
            str(self.path)
        ]
        log.info(f"Start: {program} {' '.join(pandoc_args)}")
        status = os.system(f"{program} {' '.join(pandoc_args)}")
        if status != 0:
            log.error(
                f"{program} failed with error code {status} ({' '.join([program] + pandoc_args)})"
            )
Пример #15
0
    def _organize_data(self):
        """ Copy content from self.raw to self.data and convert all data to arrays with num_obs length
        """
        meta = self.data.setdefault("meta", {})
        meta["session_code"] = self.raw["Session"].get("Session")
        units = meta.setdefault("units", {})

        # Epoch info
        self.data["time"] = self.raw["Observables"]["TimeUTC"]["time"]

        num_obs = len(self.data["time"])
        self.data["station_1"] = self.raw["Observables"]["Baseline"][
            "Baseline"].reshape(num_obs, -1)[:, 0]
        self.data["station_2"] = self.raw["Observables"]["Baseline"][
            "Baseline"].reshape(num_obs, -1)[:, 1]
        self.data["source"] = self.raw["Observables"]["Source"]["Source"]

        # Obs info
        try:
            self.data["observed_delay_ferr"] = self.raw["Observables"][
                "GroupDelay"]["X"]["GroupDelaySig"] * constant.c
        except KeyError:
            self.data["observed_delay_ferr"] = np.zeros(num_obs)
            log.error("Missing group delay formal error information")
        units["observed_delay_ferr"] = ("meter", )

        try:
            self.data["data_quality"] = self.raw["ObsEdit"]["Edit"][
                "DelayFlag"]
        except KeyError:
            self.data["data_quality"] = np.full(num_obs, np.nan)
            log.warn("Missing data quality information")

        try:
            self.data["observed_delay"] = self.raw["ObsEdit"][
                "GroupDelayFull"]["X"]["GroupDelayFull"] * constant.c
        except KeyError:
            self.data["observed_delay"] = np.full(num_obs, np.nan)
            log.error("Missing full group delay information")
        units["observed_delay"] = ("meter", )

        try:
            self.data["iono_delay"] = (
                self.raw["ObsDerived"]["Cal-SlantPathIonoGroup"]["X"]
                ["Cal-SlantPathIonoGroup"].reshape(num_obs, -1)[:, 0] *
                constant.c)
        except KeyError:
            try:
                self.data["dtec"] = self.raw["Observables"]["DiffTec"][
                    "diffTec"]
                units["dtec"] = ("TECU", )
                self.data["ref_freq"] = self.raw["Observables"]["RefFreq"][
                    "X"]["RefFreq"] * Unit.MHz2Hz
                units["ref_freq"] = ("Hz", )
            except KeyError:
                log.warn("Missing ionosphere delay information")

            self.data["iono_delay"] = np.full(num_obs, np.nan)

        units["iono_delay"] = ("meter", )

        try:
            self.data["iono_delay_ferr"] = (
                self.raw["ObsDerived"]["Cal-SlantPathIonoGroup"]["X"]
                ["Cal-SlantPathIonoGroupSigma"].reshape(num_obs, -1)[:, 0] *
                constant.c)
        except KeyError:
            try:
                self.data["dtec_ferr"] = self.raw["Observables"]["DiffTec"][
                    "diffTecStdDev"]  # Unit: TECU
                units["dtec_ferr"] = ("TECU", )
            except KeyError:
                if not np.isnan(self.data["iono_delay"]).all():
                    log.warn(
                        "Missing ionosphere delay formal error information")

            self.data["iono_delay_ferr"] = np.full(num_obs, np.nan)

        units["iono_delay_ferr"] = ("meter", )

        try:
            self.data["iono_quality"] = self.raw["ObsDerived"][
                "Cal-SlantPathIonoGroup"]["X"][
                    "Cal-SlantPathIonoGroupDataFlag"]
        except KeyError:
            log.warn("Missing ionosphere quality information")
            self.data["iono_quality"] = np.full(num_obs, np.nan)

        # Station dependent info
        for field, params in self._STATION_FIELDS.items():
            self.data[field + "_1"] = np.zeros(len(self.data["time"]))
            self.data[field + "_2"] = np.zeros(len(self.data["time"]))
            for station in self.raw["Head"]["StationList"]:
                sta_idx_1 = self.data["station_1"] == station
                sta_idx_2 = self.data["station_2"] == station
                sta_key = station.replace(" ", "_")
                sta_time = self.raw[sta_key]["TimeUTC"]["sec_since_ref"]
                try:
                    sta_data = self.raw[sta_key][params["filestub"]][
                        params["variable"]]
                    missing_idx = np.isclose(sta_data, params["nan_value"])
                    sta_data[missing_idx] = np.nan
                    if missing_idx.any():
                        log.warn(f"Missing {field} data for {station}")
                except KeyError:
                    sta_data = np.full(len(sta_time), np.nan)
                    log.warn(f"Missing all {field} data for {station}")

                if len(sta_data) == 1:
                    # Use constant function if there is only one data point
                    func = lambda _: sta_data[0]
                else:
                    func = interpolate.interp1d(
                        sta_time,
                        sta_data,
                        bounds_error=False,
                        fill_value=(sta_data[0], sta_data[-1]),
                        assume_sorted=True,
                    )
                epochs_1 = self.raw["Observables"]["TimeUTC"]["sec_since_ref"][
                    sta_idx_1]
                epochs_2 = self.raw["Observables"]["TimeUTC"]["sec_since_ref"][
                    sta_idx_2]
                self.data[field +
                          "_1"][sta_idx_1] = func(epochs_1) * params["factor"]
                self.data[field +
                          "_2"][sta_idx_2] = func(epochs_2) * params["factor"]
                units[field + "_1"] = params["unit"]
                units[field + "_2"] = params["unit"]
Пример #16
0
    def update_dataset(self, dset, param_names, normal_idx, num_unknowns):
        """Update the given dataset with results from the filtering

        Args:
            dset (Dataset):       The dataset.
            param_names (List):   Strings with names of parameters. Used to form field names.
            normal_idx (Slice):   Slice denoting which parameters should be used for the normal equations.
            num_unknowns (Int):   Number of unknowns.
        """
        # Update dataset with state and estimation fields and calculate new residuals
        self._add_fields(dset, param_names)
        dset.residual[:] = dset.est - (dset.obs - dset.calc)
        num_unknowns += dset.meta.get("num_clock_coeff", 0)

        # Calculate normal equations, and add statistics about estimation to dataset
        N, b = self._normal_equations(normal_idx, dset.num_obs - 1)
        g = self.x_hat[dset.num_obs - 1, normal_idx, :]
        deg_freedom = dset.num_obs - num_unknowns
        v = dset.residual[:, None]
        P = np.diag(1 / self.r[:dset.num_obs])
        sq_sum_residuals = np.asscalar(v.T @ P @ v)
        sq_sum_omc_terms = np.asscalar(2 * b.T @ g - g.T @ N @ g)
        variance_factor = sq_sum_residuals / deg_freedom if deg_freedom != 0 else np.inf
        log.info(
            f"Variance factor = {variance_factor:.4f}, degrees of freedom = {deg_freedom:d}"
        )

        # Report and set analysis status if there are too few degrees of freedom
        if deg_freedom < 1:
            log.error(
                f"Degrees of freedom is {deg_freedom} < 1. Estimate fewer parameters"
            )
            if dset.meta.get("analysis_status") == "unchecked":
                dset.meta["analysis_status"] = "too few degrees of freedom"

        else:
            if dset.meta.get(
                    "analysis_status") == "too few degrees of freedom":
                dset.meta["analysis_status"] = "unchecked"

        # Report and set analysis status if there are too few stations
        # TODO: if vlbi_site_pos in state_vector and num_stations < 3
        estimate_site_pos = np.char.startswith(
            np.array(param_names, dtype=str), "vlbi_site_pos").any()
        if len(dset.unique("station")) < 3 and estimate_site_pos:
            log.warn(
                f"Too few stations {len(dset.unique('station'))} < 3. Do not estimate station positions."
            )
            # if dset.meta.get("analysis_status") == "unchecked":
            # dset.meta["analysis_status"] = "needs custom state vector"
        elif len(dset.unique("station")) < 3 and estimate_site_pos:
            if dset.meta.get("analysis_status") == "needs custom state vector":
                dset.meta["analysis_status"] = "unchecked"
        # Update config
        cfg_vars = dset.vars.copy()
        cfg_vars.pop("rundate")
        with config.update_tech_config(dset.analysis["rundate"],
                                       cfg_vars.pop("pipeline"),
                                       **cfg_vars) as cfg:
            cfg.update("analysis_status",
                       "status",
                       dset.meta.get("analysis_status", ""),
                       source=__file__)

        # Add information to dset.meta
        dset.meta.add("number of observations",
                      dset.num_obs,
                      section="statistics")
        dset.meta.add("number of unknowns", num_unknowns, section="statistics")
        dset.meta.add("square sum of residuals",
                      sq_sum_residuals,
                      section="statistics")
        dset.meta.add("degrees of freedom", deg_freedom, section="statistics")
        dset.meta.add("variance factor", variance_factor, section="statistics")
        dset.meta.add("weighted square sum of o-c",
                      sq_sum_residuals + sq_sum_omc_terms,
                      section="statistics")
        dset.meta.add("matrix", N.tolist(), section="normal equation")
        dset.meta.add("vector", b[:, 0].tolist(), section="normal equation")
        dset.meta.add("names",
                      param_names[normal_idx],
                      section="normal equation")
        dset.meta.add("unit", [
            config.tech[f.split("-")[0]].unit.str
            for f in param_names[normal_idx]
        ],
                      section="normal equation")
Пример #17
0
    def update_dataset(self, dset, param_names, normal_idx, num_unknowns):
        """Update the given dataset with results from the filtering

        Args:
            dset (Dataset):       The dataset.
            param_names (List):   Strings with names of parameters. Used to form field names.
            normal_idx (Slice):   Slice denoting which parameters should be used for the normal equations.
            num_unknowns (Int):   Number of unknowns.
        """
        # Update dataset with state and estimation fields and calculate new residuals
        self._add_fields(dset, param_names)
        dset.residual[:] = dset.estimate - (dset.obs - dset.calc)
        num_unknowns += dset.meta.get("num_clock_coeff", 0)

        # Calculate normal equations, and add statistics about estimation to dataset
        N, b = self._normal_equations(normal_idx, dset.num_obs - 1)
        g = self.x_hat[dset.num_obs - 1, normal_idx, :]
        deg_freedom = dset.num_obs - num_unknowns
        v = dset.residual[:, None]
        P = np.diag(1 / self.r[:dset.num_obs])
        sq_sum_residuals = np.asscalar(v.T @ P @ v)
        sq_sum_omc_terms = np.asscalar(2 * b.T @ g - g.T @ N @ g)
        variance_factor = sq_sum_residuals / deg_freedom if deg_freedom != 0 else np.inf
        log.info("Variance factor = {:.4f}, degrees of freedom = {:d}",
                 variance_factor, deg_freedom)

        # Report and set analysis status if there are too few degrees of freedom
        if deg_freedom < 1:
            log.error(
                f"Degrees of freedom is {deg_freedom} < 1. Estimate fewer parameters"
            )
            if dset.meta.get("analysis_status") == "unchecked":
                dset.meta["analysis_status"] = "too few degrees of freedom"

                # Update config
                # with config.update_tech_config(dset.rundate, dset.vars["tech"], dset.vars["session"]) as cfg:
                # cfg.update("analysis_status", "status", dset.meta["analysis_status"], source=__file__)
        else:
            if dset.meta.get(
                    "analysis_status") == "too few degrees of freedom":
                dset.meta["analysis_status"] = "unchecked"

                # Update config
                # with config.update_tech_config(dset.rundate, dset.vars["tech"], dset.vars["session"]) as cfg:
                # cfg.update("analysis_status", "status", dset.meta["analysis_status"], source=__file__)

        # Report and set analysis status if there are too few stations
        # TODO: if vlbi_site_pos in state_vector and num_stations < 3
        estimate_site_pos = np.char.startswith(np.array(param_names),
                                               "vlbi_site_pos").any()
        if len(dset.unique("station")) < 3 and estimate_site_pos:
            log.error(
                f"Too few stations {len(dset.unique('station'))} < 3. Do not estimate station positions."
            )
            if dset.meta.get("analysis_status") == "unchecked":
                dset.meta["analysis_status"] = "needs custom state vector"
        elif len(dset.unique("station")) < 3 and estimate_site_pos:
            if dset.meta.get("analysis_status") == "needs custom state vector":
                dset.meta["analysis_status"] = "unchecked"
        # Update config
        with config.update_tech_config(dset.rundate, dset.vars["tech"],
                                       dset.vars["session"]) as cfg:
            cfg.update("analysis_status",
                       "status",
                       dset.meta.get("analysis_status", ""),
                       source=__file__)

        # Add information to dset.meta
        dset.add_to_meta("statistics", "number of observations", dset.num_obs)
        dset.add_to_meta("statistics", "number of unknowns", num_unknowns)
        dset.add_to_meta("statistics", "square sum of residuals",
                         sq_sum_residuals)
        dset.add_to_meta("statistics", "degrees of freedom", deg_freedom)
        dset.add_to_meta("statistics", "variance factor", variance_factor)
        dset.add_to_meta("statistics", "weighted square sum of o-c",
                         sq_sum_residuals + sq_sum_omc_terms)
        dset.add_to_meta("normal equation", "matrix", N.tolist())
        dset.add_to_meta("normal equation", "vector", b[:, 0].tolist())
        dset.add_to_meta("normal equation", "names", param_names[normal_idx])
        dset.add_to_meta("normal equation", "unit", [
            config.tech[f.split("-")[0]].unit.str
            for f in param_names[normal_idx]
        ])

        # TODO should this be here?
        log.info("Solving normal equations")
        names = dset.meta["normal equation"]["names"]
        n = len(names)
        d = np.zeros((n, 6))
        stations = set()
        reference_frame = config.tech.reference_frames.list[0]

        from where import apriori

        trf = apriori.get("trf",
                          time=dset.time.utc.mean,
                          reference_frames=reference_frame)

        # thaller2008: eq 2.51 (skipping scale factor)
        for idx, column in enumerate(names):
            if "_site_pos-" not in column:
                continue
            station = column.split("-", maxsplit=1)[-1].rsplit("_",
                                                               maxsplit=1)[0]
            site_id = dset.meta[station]["site_id"]
            if site_id in trf:
                x0, y0, z0 = trf[
                    site_id].pos.itrs  # TODO: Take units into account
                if column.endswith("_x"):
                    d[idx, :] = np.array([1, 0, 0, 0, z0, -y0])
                if column.endswith("_y"):
                    d[idx, :] = np.array([0, 1, 0, -z0, 0, x0])
                if column.endswith("_z"):
                    d[idx, :] = np.array([0, 0, 1, y0, -x0, 0])
                stations.add(station)

        log.info("Applying NNT/NNR with {} from {}", ", ".join(stations),
                 reference_frame.upper())
        # thaller2008: eq 2.57
        try:
            H = np.linalg.inv(d.T @ d) @ d.T
        except np.linalg.LinAlgError:
            H = np.zeros((6, n))

        sigmas = [0.0001] * 3 + [1.5e-11] * 3

        # NNR to CRF
        if "celestial_reference_frames" in config.tech.master_section:
            celestial_reference_frame = config.tech.celestial_reference_frames.list[
                0]
            crf = apriori.get(
                "crf",
                celestial_reference_frames=celestial_reference_frame,
                session=dset.dataset_name)
            H2 = np.zeros((3, n))
            for idx, column in enumerate(names):
                if "_src_dir-" not in column:
                    continue
                source = column.split("-", maxsplit=1)[-1].split("_")[0]
                if source in crf:
                    ra = crf[source].pos.crs[0]
                    dec = crf[source].pos.crs[1]
                    if column.endswith("_ra"):
                        H2[0, idx] = -np.cos(ra) * np.sin(dec) * np.cos(dec)
                        H2[1, idx] = -np.sin(ra) * np.sin(dec) * np.cos(dec)
                        H2[2, idx] = np.cos(dec)**2
                    if column.endswith("_dec"):
                        H2[0, idx] = np.sin(ra)
                        H2[1, idx] = -np.cos(ra)

            if H2.any():
                log.info("Applying NNR constraint to {}",
                         celestial_reference_frame.upper())
                # add NNR to CRF constraints
                H = np.concatenate((H, H2))
                sigmas = sigmas + [1e-6] * 3

        # thaller2008: eq 2.45
        P_h = np.diag(1 / np.array(sigmas)**2)

        # thaller2008: eq 2.58
        N_h = N + H.T @ P_h @ H

        # solve neq
        N_h_inv = np.linalg.inv(N_h)
        x = N_h_inv @ b

        # Covariance: thaller2008: eq 2.16
        Q_xx = variance_factor**2 * N_h_inv

        dset.add_to_meta("normal equation", "solution", x[:, 0].tolist())
        dset.add_to_meta("normal equation", "covariance", Q_xx.tolist())
Пример #18
0
 def no_traceback_hook(_not_used_1, value, _not_used_2):
     """Only prints the error message, no traceback."""
     log.error(str(value))
Пример #19
0
def ocean_tides_station(dset, amplitudes, phases, correction_cache):
    """Calculate the ocean tide corrections for a station

    Ocean tide corrections are returned in meters in the Geocentric Celestial Reference System for each observation.

    Args:
        dset:        A Dataset containing model data.

    Returns:
        Numpy array with ocean tide corrections in meters.
    """
    denu = np.zeros((dset.num_obs, 3))
    use_cmc = config.tech.ocean_tides_cmc.bool

    # Calculate correction
    for obs, site_id in enumerate(dset.site_id):
        if site_id not in amplitudes:
            # Warn about missing Ocean Tides Coefficients
            if site_id in _WARNED_MISSING:
                continue
            station = dset.unique("station", site_id=site_id)[0]
            log.error(
                f"Missing ocean loading coefficients for site id {site_id!r} ({station}). Correction set to zero."
            )
            _WARNED_MISSING.add(site_id)
            continue

        cache_key = (dset.station[obs], dset.time.utc.datetime[obs])
        if cache_key in correction_cache:
            denu[obs] = correction_cache[cache_key]
        else:
            epoch = [float(t) for t in dset.time.utc[obs].yday.split(":")]
            dup, dsouth, dwest = iers.hardisp(epoch, amplitudes[site_id],
                                              phases[site_id], 1, 1.0)

            # Correction in topocentric (east, north, up) coordinates
            denu[obs] = np.array([-dwest[0], -dsouth[0], dup[0]])
            correction_cache[cache_key] = denu[obs]

    if position.is_position(dset.site_pos):
        pos_correction = position.PositionDelta(denu,
                                                system="enu",
                                                ref_pos=dset.site_pos,
                                                time=dset.time)
    elif position.is_posvel(dset.site_pos):
        # set velocity to zero
        denu = np.concatenate((denu, np.zeros(denu.shape)), axis=1)
        pos_correction = position.PosVelDelta(denu,
                                              system="enu",
                                              ref_pos=dset.site_pos,
                                              time=dset.time)
    else:
        log.fatal(
            f"dset.site_pos{dset.default_field_suffix} is not a PositionArray or PosVelArray."
        )

    # Center of mass corrections
    if use_cmc:
        coeff_cmc = apriori.get("ocean_tides_cmc")
        in_phase = coeff_cmc["in_phase"]
        cross_phase = coeff_cmc["cross_phase"]
        cmc = np.zeros((dset.num_obs, 3))
        for obs, time in enumerate(dset.time.utc):
            year, doy = time.datetime.year, float(
                time.datetime.strftime("%j")) + time.mjd_frac
            angle = iers.arg2(year, doy)[:, None]
            cmc[obs] += np.sum(in_phase * np.cos(angle) +
                               cross_phase * np.sin(angle),
                               axis=0)

        if position.is_position(dset.site_pos):
            cmc_correction = position.PositionDelta(cmc,
                                                    system="trs",
                                                    ref_pos=dset.site_pos,
                                                    time=dset.time)
        elif position.is_posvel(dset.site_pos):
            # set velocity to zero
            cmc = np.concatenate((cmc, np.zeros(cmc.shape)), axis=1)
            cmc_correction = position.PosVelDelta(cmc,
                                                  system="trs",
                                                  ref_pos=dset.site_pos,
                                                  time=dset.time)
        pos_correction = pos_correction.trs + cmc_correction

    return pos_correction.gcrs
Пример #20
0
    def write_to_dataset(self, dset):
        """Store SLR data in a dataset

        Args:
           dset_out: The Dataset where data are stored.
        """
        dset.num_obs = len(self.meta["time"])
        dset.add_time("time",
                      val=Time(val=self.rundate.isoformat()).mjd,
                      val2=self.meta.pop("time"),
                      scale="utc",
                      format="mjd")
        for field, value in self.meta.items():
            dset.add_text(field, val=value)

        # Positions
        trf = apriori.get("trf", time=dset.time)
        for station in dset.unique("station"):
            trf_site = trf[station]
            station_pos = trf_site.pos.itrs
            log.debug(
                "Station position for {} ({}) according to ITRF is (x,y,z) = {}",
                station,
                trf_site.name,
                station_pos.mean(axis=0),
            )
            domes = trf_site.meta["domes"]

            if False:  # TODO: Add these missing stations to trf-file
                domes = "00000"
                log.warn("No information about station {} on ITRF file",
                         station)
                if station == "7407":
                    station_pos = np.repeat(
                        [[4119502.13, -4553595.23, -1722855.13]],
                        dset.num_obs,
                        axis=0)
                elif station == "1889":
                    station_pos = np.repeat(
                        [[3451136.221, 3060335.064, 4391970.241]],
                        dset.num_obs,
                        axis=0)
                elif station == "1888":
                    station_pos = np.repeat(
                        [[2730139.097, 1562328.629, 5529998.585]],
                        dset.num_obs,
                        axis=0)
                elif station == "1891":
                    station_pos = np.repeat(
                        [[-968340.32, 3794415.10, 5018178.10]],
                        dset.num_obs,
                        axis=0)
                elif station == "1887":
                    station_pos = np.repeat(
                        [[2001873.3346, 3987633.3547, 4542477.6716]],
                        dset.num_obs,
                        axis=0)
                elif station == "1886":
                    station_pos = np.repeat(
                        [[3466773.394, 3059757.864, 4381456.782]],
                        dset.num_obs,
                        axis=0)
                elif station == "1874":
                    station_pos = np.repeat(
                        [[2844591.641, 2161111.997, 5266356.839]],
                        dset.num_obs,
                        axis=0)
                elif station == "1890":
                    station_pos = np.repeat(
                        [[-838299.699, 3865738.865, 4987640.921]],
                        dset.num_obs,
                        axis=0)
                else:
                    log.error("Unknown station {}", station)
                    station_pos = np.zeros((dset.num_obs, 3))
                log.warn("Using coordinates {} for {}",
                         np.mean(station_pos, axis=0), station)

            self.data["pos_" + station] = station_pos
            self.data["station-other_" + station] = dict(domes=domes,
                                                         cdp=station,
                                                         site_id=station)

        dset.add_position("site_pos",
                          time="time",
                          itrs=np.array([
                              self.data["pos_" + s][idx]
                              for idx, s in enumerate(dset.station)
                          ]))

        # Station data
        sta_fields = set().union(*[
            v.keys() for k, v in self.data.items() if k.startswith("station_")
        ])
        for field in sta_fields:
            dset.add_float(field,
                           val=np.array([
                               float(self.data["station_" + s][field])
                               for s in dset.station
                           ]))
        sta_fields = set().union(*[
            v.keys() for k, v in self.data.items()
            if k.startswith("station-other_")
        ])
        for field in sta_fields:
            dset.add_text(field,
                          val=[
                              self.data["station-other_" + s][field]
                              for s in dset.station
                          ])

        # Satellite data
        sat_fields = set().union(*[
            v.keys() for k, v in self.data.items()
            if k.startswith("satellite_")
        ])
        for field in sat_fields:
            dset.add_float(field,
                           val=np.array([
                               float(self.data["satellite_" + s][field])
                               for s in dset.satellite
                           ]))

        # Observations
        for field, values in self.data["obs"].items():
            dset.add_float(field, val=np.array(values))

        for field, values in self.data["obs_str"].items():
            dset.add_text(field, val=values)

        # Meterological data
        met_fields = set().union(
            *[v.keys() for k, v in self.data.items() if k.startswith("met_")])
        for field in met_fields:
            dset.add_float(field,
                           val=np.diag([
                               self.data["met_" + s][field]
                               for s in dset.station
                           ]))
Пример #21
0
    def update_dataset(self, dset, param_names, normal_idx, num_unknowns):
        """Update the given dataset with results from the filtering

        Args:
            dset (Dataset):       The dataset.
            param_names (List):   Strings with names of parameters. Used to form field names.
            normal_idx (Slice):   Slice denoting which parameters should be used for the normal equations.
            num_unknowns (Int):   Number of unknowns.
        """
        # Update dataset with state and estimation fields and calculate new residuals
        self._add_fields(dset, param_names)
        dset.residual[:] = dset.est - (dset.obs - dset.calc)
        num_unknowns += dset.meta.get("num_clock_coeff", 0)

        # Calculate normal equations, and add statistics about estimation to dataset
        N, b = self._normal_equations(normal_idx, dset.num_obs - 1)
        g = self.x_hat[dset.num_obs - 1, normal_idx, :]
        deg_freedom = dset.num_obs - num_unknowns
        v = dset.residual[:, None]
        P = np.diag(1 / self.r[:dset.num_obs])
        sq_sum_residuals = np.asscalar(v.T @ P @ v)
        sq_sum_omc_terms = np.asscalar(2 * b.T @ g - g.T @ N @ g)
        variance_factor = sq_sum_residuals / deg_freedom if deg_freedom != 0 else np.inf
        log.info(
            f"Variance factor = {variance_factor:.4f}, degrees of freedom = {deg_freedom:d}"
        )

        # Report and set analysis status if there are too few degrees of freedom
        if deg_freedom < 1:
            log.error(
                f"Degrees of freedom is {deg_freedom} < 1. Estimate fewer parameters"
            )
            if dset.meta.get("analysis_status") == "unchecked":
                dset.meta["analysis_status"] = "too few degrees of freedom"

        else:
            if dset.meta.get(
                    "analysis_status") == "too few degrees of freedom":
                dset.meta["analysis_status"] = "unchecked"

        # Report and set analysis status if there are too few stations
        # TODO: if vlbi_site_pos in state_vector and num_stations < 3
        estimate_site_pos = np.char.startswith(
            np.array(param_names, dtype=str), "vlbi_site_pos").any()
        if len(dset.unique("station")) < 3 and estimate_site_pos:
            log.warn(
                f"Too few stations {len(dset.unique('station'))} < 3. Do not estimate station positions."
            )
            # if dset.meta.get("analysis_status") == "unchecked":
            # dset.meta["analysis_status"] = "needs custom state vector"
        elif len(dset.unique("station")) < 3 and estimate_site_pos:
            if dset.meta.get("analysis_status") == "needs custom state vector":
                dset.meta["analysis_status"] = "unchecked"
        # Update config
        cfg_vars = dset.vars.copy()
        cfg_vars.pop("rundate")
        with config.update_tech_config(dset.analysis["rundate"],
                                       cfg_vars.pop("pipeline"),
                                       **cfg_vars) as cfg:
            cfg.update("analysis_status",
                       "status",
                       dset.meta.get("analysis_status", ""),
                       source=__file__)

        # Add information to dset.meta
        dset.meta.add("number of observations",
                      dset.num_obs,
                      section="statistics")
        dset.meta.add("number of unknowns", num_unknowns, section="statistics")
        dset.meta.add("square sum of residuals",
                      sq_sum_residuals,
                      section="statistics")
        dset.meta.add("degrees of freedom", deg_freedom, section="statistics")
        dset.meta.add("variance factor", variance_factor, section="statistics")
        dset.meta.add("weighted square sum of o-c",
                      sq_sum_residuals + sq_sum_omc_terms,
                      section="statistics")
        dset.meta.add("matrix", N.tolist(), section="normal equation")
        dset.meta.add("vector", b[:, 0].tolist(), section="normal equation")
        dset.meta.add("names",
                      param_names[normal_idx],
                      section="normal equation")
        dset.meta.add("unit", [
            config.tech[f.split("-")[0]].unit.str
            for f in param_names[normal_idx]
        ],
                      section="normal equation")

        # TODO should this be here?
        log.info("Solving normal equations")
        names = dset.meta["normal equation"]["names"]
        n = len(names)
        d = np.zeros((n, 6))
        fix_param_weight = np.zeros(n)
        H = np.zeros((6, n))
        stations = set()
        from where import apriori

        reference_frame = config.tech.reference_frames.list[0]
        trf = apriori.get("trf",
                          time=dset.time.utc.mean,
                          reference_frames=reference_frame)
        # thaller2008: eq 2.51 (skipping scale factor)
        for idx, column in enumerate(names):
            if "_site_pos-" not in column:
                continue
            station = column.split("-", maxsplit=1)[-1].rsplit("_",
                                                               maxsplit=1)[0]
            site_id = dset.meta[station]["site_id"]
            if site_id in trf:
                x0, y0, z0 = trf[site_id].pos.trs
                if column.endswith("_x"):
                    d[idx, :] = np.array([1, 0, 0, 0, z0, -y0])
                if column.endswith("_y"):
                    d[idx, :] = np.array([0, 1, 0, -z0, 0, x0])
                if column.endswith("_z"):
                    d[idx, :] = np.array([0, 0, 1, y0, -x0, 0])
                stations.add(station)

        if len(stations) >= 3:
            try:
                # thaller2008: eq 2.57
                H = np.linalg.inv(d.T @ d) @ d.T
                log.info(
                    f"Applying NNT/NNR with {', '.join(stations)} from {reference_frame.upper()}"
                )
            except np.linalg.LinAlgError:
                log.warn(f"Unable to invert matrix for NNR/NNT constraints")
        else:
            log.info(
                f"Too few stations to use NNR/NNT contraints from {reference_frame.upper()}. Using absolute constraints for station positions."
            )
            # Too few stations to use NNT/NNR?
            for idx, column in enumerate(names):
                if "_site_pos-" not in column:
                    continue
                station = column.split("-",
                                       maxsplit=1)[-1].rsplit("_",
                                                              maxsplit=1)[0]
                fix_param_weight[idx] = 1 / (1e-6)**2  # 1/meters**2

        sigmas = [0.0001] * 3 + [1.5e-11] * 3

        # NNR to CRF
        if "celestial_reference_frames" in config.tech.master_section:
            celestial_reference_frame = config.tech.celestial_reference_frames.list[
                0]
            crf = apriori.get(
                "crf",
                time=dset.time,
                celestial_reference_frames=celestial_reference_frame)
            H2 = np.zeros((3, n))
            for idx, column in enumerate(names):
                if "_src_dir-" not in column:
                    continue
                source = column.split("-", maxsplit=1)[-1].split("_")[0]
                if source in crf:
                    ra = crf[source].pos.right_ascension
                    dec = crf[source].pos.declination
                    if dset.num(source=source) < 5:
                        fix_param_weight[idx] = 1 / (1e-12)**2  # 1/radians**2
                        if column.endswith("_ra"):
                            log.info(
                                f"Too few observations for source {source}. Using absolute constraints for source positions."
                            )
                        continue

                    if column.endswith("_ra"):
                        H2[0, idx] = -np.cos(ra) * np.sin(dec) * np.cos(dec)
                        H2[1, idx] = -np.sin(ra) * np.sin(dec) * np.cos(dec)
                        H2[2, idx] = np.cos(dec)**2
                    if column.endswith("_dec"):
                        H2[0, idx] = np.sin(ra)
                        H2[1, idx] = -np.cos(ra)

            if H2.any():
                log.info(
                    f"Applying NNR constraint to {celestial_reference_frame.upper()}"
                )
                # add NNR to CRF constraints
                H = np.concatenate((H, H2))
                sigmas = sigmas + [1e-6] * 3

        # thaller2008: eq 2.45
        P_h = np.diag(1 / np.array(sigmas)**2)

        # Free network constraints: thaller2008: eq 2.58
        N_h = N + H.T @ P_h @ H

        # Baselines with too few obs?
        for idx, column in enumerate(names):
            if "_baseline-" not in column:
                continue
            baseline = column.split("-", maxsplit=1)[-1].rsplit("_",
                                                                maxsplit=1)[0]
            if dset.num(baseline=baseline) < 5:
                fix_param_weight[idx] = 1 / (1e-6)**2  # 1/meters**2
                log.info(
                    f"Too few observations for baseline {baseline}. Constrained to a priori value"
                )
                continue

        # Absolute constraints (on sources with too few observations): thaller2008: eq.2.49
        N_h += np.diag(fix_param_weight)

        # solve neq
        N_h_inv = np.linalg.inv(N_h)
        x = N_h_inv @ b

        # Covariance: thaller2008: eq 2.16
        Q_xx = variance_factor**2 * N_h_inv

        dset.meta.add("solution", x[:, 0].tolist(), section="normal equation")
        dset.meta.add("covariance", Q_xx.tolist(), section="normal equation")
Пример #22
0
def clock_correction(dset):
    """Estimate clock polynomial
    """
    # Take previous clock corrections into account
    try:
        output = dset.vlbi_clock
    except AttributeError:
        output = np.zeros(dset.num_obs)

    # Read order of clock polynomial from config file
    terms = 1 + config.tech.get("order_of_polynomial", section=MODEL, default=2).int

    # Read clock breaks from session config, only split on commas (and commas followed by whitespace)
    clock_breaks = config.tech.get("clock_breaks", section=MODEL, default="").as_list(split_re=", *")
    stations, time_intervals = parse_clock_breaks(dset, clock_breaks)

    # Read reference clock from edit file and store in dataset
    ref_clock_str = config.tech.get("reference_clock", section=MODEL, default="").str
    ref_clock = parse_reference_clock(stations, ref_clock_str)
    dset.meta["ref_clock"] = ref_clock

    # Remove reference clock from list of clocks to be estimated
    idx = stations.index(ref_clock)
    del stations[idx]
    del time_intervals[idx]

    # Number of clock polynomial coefficients
    num_coefficients = len(stations) * terms
    param_names = [
        sta + " clk_a" + str(t) + " " + time_intervals[i][0].utc.iso + " " + time_intervals[i][1].utc.iso
        for i, sta in enumerate(stations)
        for t in range(terms)
    ]
    dset.meta["num_clock_coeff"] = num_coefficients

    # Set up matrices for estimation
    A = np.zeros((dset.num_obs, num_coefficients, 1))

    # Time coefficients, used when setting up A
    t = dset.time.utc.mjd - dset.time.utc[0].mjd
    poly = np.array([t ** n for n in range(terms)]).T

    # Set up the A matrix with time coefficients
    for idx, (station, (t_start, t_end)) in enumerate(zip(stations, time_intervals)):
        filter_time = np.logical_and(t_start.utc.mjd <= dset.time.utc.mjd, dset.time.utc.mjd < t_end.utc.mjd)
        filter_1 = np.logical_and(dset.filter(station_1=station), filter_time)
        A[filter_1, idx * terms : (idx + 1) * terms, 0] = poly[filter_1]
        filter_2 = np.logical_and(dset.filter(station_2=station), filter_time)
        A[filter_2, idx * terms : (idx + 1) * terms, 0] = -poly[filter_2]

    # Calculate normal matrix N and the moment vector U
    U = np.sum(A @ dset.residual[:, None, None], axis=0)
    N = np.sum(A @ A.transpose(0, 2, 1), axis=0)

    # Invert the normal matrix to find corrections, only the non-zero part of the matrix is inverted
    idx = np.logical_not(U == 0)[:, 0]
    X = np.zeros((num_coefficients, 1))
    det = np.linalg.det(N[idx, :][:, idx])
    threshold = 1e-12
    if np.abs(det) < threshold:
        # TODO: what is a good threshold value?
        rank = np.linalg.matrix_rank(N[idx, :][:, idx])
        log.warn(f"Determinant of normal matrix in clock correction is close to zero ({det})")
        log.info(f"Normal matrix shape = {N.shape}, normal matrix rank = {rank}")
        _, R = np.linalg.qr(N[idx, :][:, idx])
        for i, row in enumerate(R):
            if np.max(np.abs(row)) < threshold * 10 ** 3:
                log.error(f"{param_names[i]} linearly dependent (max_row = {np.max(np.abs(row))})")
    try:
        X[idx] = np.linalg.inv(N[idx, :][:, idx]) @ U[idx]
    except np.linalg.LinAlgError:
        log.fatal(f"Singular matrix in {MODEL}")

    # Calculate final corrections
    output += (A.transpose(0, 2, 1) @ X)[:, 0, 0]
    return output