Exemplo n.º 1
0
def parse_dataset_id(rundate, tech, stage, dataset_name, dataset_id, **kwargs):
    """Allow for some advanced handling of dataset_id

    In addition to using regular numbers as dataset_id, some text keywords can be used:

    + 'last': Use the last dataset_id written to file, default 0 if no file is previously written.
    + 'all':  Return a list of all dataset_ids in the file.
    """
    if isinstance(dataset_id, (float, int)):
        return dataset_id

    # Use the JSON-file to find information about the dataset ids
    file_vars = dict(
        config.program_vars(rundate,
                            tech,
                            session=dataset_name,
                            stage=stage,
                            **kwargs), **config.date_vars(rundate))
    try:
        with files.open("dataset_json", file_vars=file_vars) as fid:
            json_data = json.load(fid)
    except FileNotFoundError:
        json_data = dict()

    if dataset_id == "last":
        # If _last_dataset_id is not given, use dataset_id=0 as default
        return json_data.get(dataset_name, dict()).get("_last_dataset_id", 0)

    if dataset_id == "all":
        return [
            int(k.split("/")[-1]) for k in json_data.keys()
            if k.startswith("{}/".format(dataset_name))
        ]
Exemplo n.º 2
0
def list_datasets(rundate, tech, session, stage, **kwargs):
    """List datasets in a given dataset file

    Args:
        rundate:  Datetime, the model run date.
        tech:     String, the technique.
        stage:    String, the stage.
        kwargs:   Other arguments are passed to files.open.

    Returns:
        List of strings describing the datasets.
    """
    file_vars = dict(
        config.program_vars(rundate,
                            tech,
                            session=session,
                            stage=stage,
                            **kwargs), **config.date_vars(rundate))

    try:
        with files.open("dataset_json", file_vars=file_vars) as fid:
            json_data = json.load(fid)
    except FileNotFoundError:
        return list()
        log.fatal(
            f"No data found for {tech.upper()} {stage} {rundate.strftime(config.FMT_date)}"
        )

    return sorted(k for k in json_data.keys()
                  if not k.startswith("_") and "/" in k)
Exemplo n.º 3
0
def get_rinex_file_version(file_key, file_vars):
    """ Get RINEX file version for a given file key

    Args:
        file_key:       File key defined in files.conf file (e.g. given for RINEX navigation or observation file)
        vars:           Variables needed to identify RINEX file based on definition in files.conf file.

    Returns:
        tuple:         with following elements

    ===============  ==================================================================================
     Elements          Description
    ===============  ==================================================================================
     version          RINEX file version
     filepath         RINEX file path
    ===============  ==================================================================================
    """
    file_path = files.path(file_key, file_vars=file_vars)
    with files.open(file_key, file_vars=file_vars, mode="rt") as infile:
        try:
            version = infile.readline().split()[0]
        except IndexError:
            log.fatal(f"Could not find Rinex version in file {file_path}")

    return version, file_path
Exemplo n.º 4
0
def read_reports_from_file():
    try:
        with files.open("report_pickle", mode="rb") as fid:
            tmp_reports = pickle.load(fid)
            for section, values in tmp_reports.items():
                if section == "DEFAULT" or section.startswith("_"):
                    continue
                _REPORTS[section] = values

    except FileNotFoundError:
        pass
Exemplo n.º 5
0
def write_session_report(rundate, tech):
    dsets = dict()
    for _, dset, report_data in report.reports("remover_data"):
        dset.add_float("keep_idx", val=report_data["keep_idx"])
        dsets[dset.dataset_name] = dset

    with files.open("output_gnss_session_report", mode="wt") as fid:
        header(fid)
        rejected_satellites_per_station(fid, dsets)
        rejected_satellites(fid, dsets)
        rejected_stations(fid, dsets)
Exemplo n.º 6
0
def system_test_output(dset):
    """Write simple output based on dataset

    Args:
        dset:   Dataset, information about model run.
    """
    fields = config.tech.get("fields", section="system_test").tuple

    with files.open("output_system_test", file_vars=dset.vars,
                    mode="wt") as fid:
        for idx, vals in enumerate(dset.values(*fields), start=1):
            fid.write(f"{idx:6d} " + " ".join(str(v) for v in vals) + "\n")
Exemplo n.º 7
0
def vascc_calc(dset):
    """Write a list of calculated observations in the VASCC format.

    Args:
        dset:  Dataset, data for a model run.
    """
    with files.open("output_vascc_calc",
                    file_vars=dict(session=dset.dataset_name, **dset.vars),
                    mode="wt") as fid:
        for obs, (time, src, sta_1, sta_2,
                  calc) in enumerate(dset.values("time", "source", "station_1",
                                                 "station_2", "calc"),
                                     start=1):
            time_str = time.utc.datetime.strftime("%Y/%m/%d %H:%M:%S.%f")[:22]
            fid.write("{:6d} {:>22s} {:<8s} {:<8s} {:<8s} {:+16.14E}\n".format(
                obs, time_str, src, sta_1, sta_2, calc / constant.c))
Exemplo n.º 8
0
def sisre_report(dset):
    """Write SISRE report

    Args:
        dset (Dataset):       A dataset containing the data.
    """
    write_level = config.tech.get("write_level",
                                  default="operational").as_enum("write_level")

    # TODO: Better solution?
    if "sampling_rate" not in dset.vars:  # necessary if called for example by where_concatenate.py
        dset.vars["sampling_rate"] = ""

    with files.open(file_key=f"output_sisre_report_{dset.dataset_id}",
                    file_vars=dset.vars,
                    create_dirs=True,
                    mode="wt") as fid:
        _write_title(fid, dset.rundate)
        _write_information(fid)
        _write_config(fid)
        fid.write("\n# Satellite status\n\n")
        # _unhealthy_satellites(fid, dset)
        # _eclipse_satellites(fid, dset)

        # Generate figure directory to save figures generated for SISRE report
        fid.write("\n# SISRE analysis results\n\n")
        figure_dir = files.path("output_sisre_report_figure",
                                file_vars=dset.vars)
        figure_dir.mkdir(parents=True, exist_ok=True)

        _plot_scatter_orbit_and_clock_differences(fid, figure_dir, dset)
        _plot_scatter_sisre(fid, figure_dir, dset)
        _plot_scatter_field(fid, figure_dir, dset, "sisre")
        # _plot_scatter_field(fid, figure_dir, dset, 'sisre', label=False, legend=False)
        _plot_histogram_sisre(fid, figure_dir, dset)
        _plot_scatter_field(fid, figure_dir, dset, "age_of_ephemeris")
        _satellite_statistics_and_plot(fid, figure_dir, dset)

        # if write_level <= enums.get_value("write_level", "detail"):
        #    fid.write("\n# Analysis of input files\n\n")
        #    # _plot_scatter_satellite_bias(fid, figure_dir, dset)
        #    _plot_scatter_field(fid, figure_dir, dset, "bias_brdc")
        #    _plot_scatter_field(fid, figure_dir, dset, "bias_precise")

    # Generate PDF from Markdown file
    _markdown_to_pdf(dset)
Exemplo n.º 9
0
def copy_log_from_where(rundate, pipeline, session):
    file_vars = dict(**config.program_vars(rundate, pipeline, session),
                     **config.date_vars(rundate))
    log_level = config.where.runner.log_level.str
    current_level = "none"
    try:
        with files.open("log", file_vars=file_vars) as fid:
            for line in fid:
                line_level, _, text = line.partition(" ")
                line_level = line_level.strip().lower()
                current_level = line_level if line_level else current_level
                text = text.strip()
                if getattr(LogLevel, current_level) >= getattr(
                        LogLevel, log_level) and text:
                    log.log(text, current_level)
    except FileNotFoundError as err:
        log.warn(f"'{err}'")
Exemplo n.º 10
0
def sisre_comparison_report(dset):
    """Compare SISRE datasets

    Args:
        dset (list):       List with different SISRE datasets. The datasets contain the data.
    """
    dsets = dset
    df_merged = pd.DataFrame()

    for name, dset in dsets.items():

        if dset.num_obs == 0:
            log.warn(f"Dataset '{name}' is empty.")
            continue

        user_type_name = _get_user_type_name(name)
        df = dset.as_dataframe(fields=["satellite", "system", "sisre", "time.gps"])  # , index="time.gps")
        df = df.rename(columns={"sisre": user_type_name})

        if df_merged.empty:
            df_merged = df
            continue
        df_merged = df_merged.merge(df, on=["satellite", "system", "time.gps"], how="outer")

    if df_merged.empty:
        log.fatal(f"All given datasets are empty [{', '.join(dsets.keys())}].")

    with files.open(
        file_key="output_sisre_comparison_report", file_vars=dsets[next(iter(dsets))].vars, mode="wt"
    ) as fid:
        _header(fid)
        fid.write("#Comparison of SISRE analyses\n")

        # Generate figure directory to save figures generated for SISRE report
        figure_dir = files.path("output_sisre_comparison_report_figure", file_vars=dset.vars)
        figure_dir.mkdir(parents=True, exist_ok=True)

        _plot_bar_sisre_satellite_percentile(df_merged, fid, figure_dir, threshold=False)
        _plot_bar_sisre_satellite_percentile(df_merged, fid, figure_dir, threshold=True)
        _plot_bar_sisre_signal_combination_percentile(df_merged, fid, figure_dir, threshold=False)
        _plot_bar_sisre_signal_combination_percentile(df_merged, fid, figure_dir, threshold=True)
        _plot_bar_sisre_signal_combination_rms(df_merged, fid, figure_dir)

    # Generate PDF from Markdown file
    _markdown_to_pdf(dset)
Exemplo n.º 11
0
 def read_data(self):
     """Read the data from three monthly datafiles
     """
     files_read = []
     date_to_read = self.rundate - timedelta(days=7)
     while date_to_read < self.rundate + timedelta(days=self.arc_length +
                                                   8):
         self.vars.update(config.date_vars(date_to_read))
         file_path = files.path(self.file_key, file_vars=self.vars)
         if file_path not in files_read:
             files_read.append(file_path)
             self.dependencies.append(file_path)
             with files.open(self.file_key,
                             file_vars=self.vars,
                             mode="rt",
                             encoding="latin_1") as fid:
                 self.parse_file(fid)
         date_to_read += timedelta(days=1)
Exemplo n.º 12
0
def write_sinex(dset):
    """Write normal equations of session solution in SINEX format.

    Args:
        dset:  Dataset, data for a model run.
    """
    # Add dependency to sinex_blocks-module
    dependencies.add(sinex_blocks.__file__)

    if config.tech.analysis_status.status.str == "bad":
        log.info("Bad session. Not producing SINEX.")
        return
    with files.open("output_sinex", file_vars=dset.vars, mode="wt") as fid:
        sinex = sinex_blocks.SinexBlocks(dset, fid)
        sinex.header_line()
        for block in config.tech[WRITER].blocks.list:
            block_name, *args = block.split(":")
            sinex.write_block(block_name, *args)
        sinex.end_line()
Exemplo n.º 13
0
def write_requirements():
    """Write requirements (python modules) to file for reproducibility.

    Note that this only stores the modules that have been imported, and that have a `__version__`-attribute (see PEP
    396 - https://www.python.org/dev/peps/pep-0396/)
    """
    # Find versions of imported modules (use list() for copy in case modules are imported when reading __version__)
    reqs = {
        n: getattr(m, "__version__", None)
        for n, m in list(sys.modules.items())
    }
    reqs["python"] = platform.python_version()
    reqs_str = "\n".join(
        sorted("{}=={}".format(m, v.strip()) for m, v in reqs.items()
               if isinstance(v, str)))

    # Write to requirements-file
    with files.open("requirements", mode="w") as fid:
        fid.write(reqs_str + "\n")
Exemplo n.º 14
0
def uere_writer(dset):
    """Write UERE analysis results

    Args:
        dset:       Dataset, a dataset containing the data.
    """

    with files.open("output_uere", file_vars=dset.vars, mode="wt") as fid:

        # Write header
        fid.write(
            "#{:>19s}{:>14s}{:>5s}{:>16s}{:>16s}{:>16s}\n"
            "".format("YYYY:MM:DD:hh:mm:ss", "MJD", "SAT", "SISRE", "UEE", "UERE")
        )
        fid.write("#{:>54s}{:>16s}{:>16s}\n" "".format("[m]", "[m]", "[m]"))
        fid.write("#{}\n".format("_" * 88))

        # Loop over all observations
        for idx in range(0, dset.num_obs):

            #
            # Write SISRE analysis results
            #

            # #YYYY:MM:DD:hh:mm:ss           MJD  SAT           SISRE             UEE            UERE
            # #                                                   [m]             [m]             [m]
            # #______________________________________________________________________________________
            #  2017:10:28:00:00:00  58054.000000  G01         0.86957        -1.26053        -0.51241
            #  2017:10:28:00:00:00  58054.000000  G02        -0.27013         0.25391        -0.39711
            # ----+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8----+--
            fid.write(
                " {:>19s}{:14.6f}{:>5s}{:16.5f}{:16.5f}{:16.5f}\n"
                "".format(
                    dset.time.gps.datetime[idx].strftime("%Y:%m:%d:%H:%M:%S"),
                    dset.time.gps.mjd[idx],
                    dset.satellite[idx],
                    dset.sisre[idx],
                    dset.uee[idx],
                    dset.uere[idx],
                )
            )
Exemplo n.º 15
0
def sisre_output_buffer(dset):
    """Write SISRE buffer file by appending SISRE output file path

    Args:
        dset:       Dataset, a dataset containing the data.
    """
    with files.open("output_sisre_buffer", file_vars=dset.vars, mode="at") as fid:

        # Allow only one process to hold an exclusive lock for a given file at a given time
        try:
            fcntl.flock(fid, fcntl.LOCK_EX | fcntl.LOCK_NB)
        except IOError:
            log.fatal("flock() failed to hold an exclusive lock.")

        # Append SISRE output file pathes SISRE buffer file
        file_path = files.path(f"output_sisre_2", file_vars=dset.vars)
        fid.write(f"{file_path}\n")

        # Unlock file
        try:
            fcntl.flock(fid, fcntl.LOCK_UN)
        except:
            log.fatal("flock() failed to unlock file.")
Exemplo n.º 16
0
def write_session_report(rundate, tech):
    dsets = dict()
    for _, dset, report_data in report.reports("remover_data"):
        remover_name = report_data["remover_name"]
        station = dset.dataset_name
        dset_station = dsets.setdefault("removers",
                                        dict()).setdefault(station, dset)
        dset_station.add_float(
            "keep_idx_{}".format(remover_name),
            val=report_data["keep_idx"])  # TODO: What is keep_idx?

    for _, dset, report_data in report.reports("orbit_data"):
        dsets.setdefault("orbit", dict())[report_data["station"]] = dset

    with files.open("output_gnss_session_report", mode="wt") as fid:
        header(fid)
        write_config(fid)
        unhealthy_satellites(fid, dsets)
        eclipse_satellites(fid, dsets)
        # rejected_satellites_per_station(fid, dsets)
        # rejected_satellite_observations(fid, dsets)
        # rejected_station_observations(fid, dsets)
        statistics(fid, dsets)
Exemplo n.º 17
0
def baseline_stats(dset):
    """Write statistics about baselines to file.

    Args:
        dset:   Dataset, information about model run.
    """
    stats_str = ["Statistics about stations and baselines"]
    baselines = itertools.permutations(dset.unique("station"), 2)
    idx = np.ones(dset.num_obs, dtype=bool)
    stats_str.append(_write_line("ALL", "", dset, idx))

    for sta in dset.unique("station"):
        idx = dset.filter(station=sta)
        stats_str.append(_write_line(sta, "", dset, idx))

    for sta_1, sta_2 in baselines:
        idx = np.logical_and(dset.filter(station=sta_1),
                             dset.filter(station=sta_2))
        stats_str.append(_write_line(sta_1, sta_2, dset, idx))

    with files.open("output_baseline_stats", file_vars=dset.vars,
                    mode="wt") as fid:
        fid.write("\n".join(stats_str))
    log.out("\n  ".join(stats_str))
Exemplo n.º 18
0
"""Where library module for handling of SI-unit conversions

Description:
------------

See midgard.math.unit for full documentation

Note that `pint` has a system for defining new units and constants if necessary,
`http://pint.readthedocs.io/en/latest/defining.html`. To use this system, add units to the `units.conf` file in the
`config`-directory.
"""

# Midgard imports
from midgard.math.unit import Unit

# Where imports
from where.lib import files

# Read extra units defined specially for Where
with files.open("units") as fid:
    Unit._ureg.load_definitions(fid)
Exemplo n.º 19
0
def write_one_day(dset, date):
    """Write RINEX navigation file for given date

    Args:
        dset:       Dataset, a dataset containing the data.
        date:       Current date
    """
    brdc = apriori.get(
        "orbit",
        rundate=dset.rundate,
        time=dset.time,
        satellite=tuple(dset.satellite),
        system=tuple(dset.system),
        station=dset.vars["station"],
        apriori_orbit="broadcast",
    )

    meta = brdc.dset_edit.meta[date.strftime("%Y-%m-%d")]
    data = brdc.dset_edit  # TODO: Another possibility: brdc.dset_raw
    file_vars = dset.vars
    file_vars["doy"] = config.date_vars(
        date
    )["doy"]  # TODO: workaround, so that all files are written in the same working directory -> does not work if year is changed.

    with files.open("output_rinex2_nav", file_vars=file_vars,
                    mode="wt") as fid:

        #
        # Write RINEX navigation header
        #
        if meta["file_type"] == "N":
            file_type = "NAVIGATION DATA"

        fid.write("{:>9s}{:11s}{:40s}RINEX VERSION / TYPE\n".format(
            meta["version"], "", file_type))
        fid.write("{:20s}{:20s}{:20s}PGM / RUN BY / DATE\n".format(
            meta["program"], meta["run_by"], meta["file_created"]))

        for line in meta["comment"]:
            fid.write("{:60s}COMMENT\n".format(line))
        fid.write("{:>14.4e}{:>12.4e}{:>12.4e}{:>12.4e}{:10s}ION ALPHA\n"
                  "".format(
                      meta["iono_para"]["GPSA"]["para"][0],
                      meta["iono_para"]["GPSA"]["para"][1],
                      meta["iono_para"]["GPSA"]["para"][2],
                      meta["iono_para"]["GPSA"]["para"][3],
                      "",
                  ))
        fid.write("{:>14.4e}{:>12.4e}{:>12.4e}{:>12.4e}{:10s}ION BETA\n"
                  "".format(
                      meta["iono_para"]["GPSB"]["para"][0],
                      meta["iono_para"]["GPSB"]["para"][1],
                      meta["iono_para"]["GPSB"]["para"][2],
                      meta["iono_para"]["GPSB"]["para"][3],
                      "",
                  ))
        # TODO fid.write('{:>22.12e}{:>19.12e}{:>9d}{:>9d}{:1s}DELTA-UTC: A0,A1,T,W\n'
        #          ''.format(meta['a0'], meta['a1'], int(meta['t']), int(meta['w']), ''))
        fid.write("{:>6d}{:54s}LEAP SECONDS\n".format(
            int(meta["leap_seconds"]["leap_seconds"]), ""))
        fid.write("{:60s}END OF HEADER\n".format(""))

        #
        # Write RINEX navigation data
        #
        # TODO:
        #        for drow in data.get_rows():
        #            fid.write('{d.sat:2d} {d.time:%Y%m%d %H%M%S} {d.inc0:13.4f}'.format(d=drow))
        #            fid.write('  {d.hurra:14.10f} ...'.format(d=drow))

        for idx in range(0, data.num_obs):
            sat = int(data.satellite[idx][1:3])
            d = data.time.gps.datetime[idx]
            fid.write(
                "{:2d}{:>3s}{:>3d}{:>3d}{:>3d}{:>3d}{:>5.1f}{:>19.12e}{:>19.12e}{:>19.12e}\n"
                "".format(
                    sat,
                    str(d.year)[2:4],
                    d.month,
                    d.day,
                    d.hour,
                    d.minute,
                    d.second,
                    data.sat_clock_bias[idx],
                    data.sat_clock_drift[idx],
                    data.sat_clock_drift_rate[idx],
                ))
            fid.write("{:22.12e}{:>19.12e}{:>19.12e}{:>19.12e}\n"
                      "".format(data.iode[idx], data.crs[idx],
                                data.delta_n[idx], data.m0[idx]))
            fid.write("{:22.12e}{:>19.12e}{:>19.12e}{:>19.12e}\n"
                      "".format(data.cuc[idx], data.e[idx], data.cus[idx],
                                data.sqrt_a[idx]))
            # TODO: toe depends on GNSS system time -> for BeiDou it has to be changed
            fid.write("{:22.12e}{:>19.12e}{:>19.12e}{:>19.12e}\n"
                      "".format(data.toe.gps.gpssec[idx], data.cic[idx],
                                data.Omega[idx], data.cis[idx]))
            fid.write("{:22.12e}{:>19.12e}{:>19.12e}{:>19.12e}\n"
                      "".format(data.i0[idx], data.crc[idx], data.omega[idx],
                                data.Omega_dot[idx]))
            # TODO: gnss_week depends on GNSS -> for BeiDou it has to be changed
            # TODO: codes_l2 only valid for GPS and QZSS -> Galileo data_source; rest None
            # TODO: 'G': 'l2p_flag', 'J': 'l2p_flag'
            fid.write("{:22.12e}{:>19.12e}{:>19.12e}{:>19.12e}\n"
                      "".format(data.idot[idx], data.codes_l2[idx],
                                data.gnss_week[idx], data.l2p_flag[idx]))
            # TODO: 'G': 'iodc', 'J': 'iodc', 'E': 'bgd_e1_e5b', 'C': 'tgd_b2_b3'
            # TODO: 'G': 'tgd', 'J': 'tgd', 'E': 'bgd_e1_e5a', 'C': 'tgd_b1_b3', 'I': 'tgd'
            fid.write("{:22.12e}{:>19.12e}{:>19.12e}{:>19.12e}\n"
                      "".format(data.sv_accuracy[idx], data.sv_health[idx],
                                data.tgd[idx], data.iodc[idx]))
            # TODO: transmission_time depends on GNSS system time -> for BeiDou it has to be changed
            # TODO: fit_interval only valid for GPS and QZSS -> for BeiDou age_of_clock_corr; rest None
            fid.write("{:22.12e}{:>19.12e}{:>19.12e}{:>19.12e}\n"
                      "".format(data.transmission_time.gps.gpssec[idx],
                                data.fit_interval[idx], 0.0, 0.0))
Exemplo n.º 20
0
 def write_to_file():
     log.debug(f"Store reports for {tech.upper()} {rundate}")
     with files.open("report_pickle", mode="wb",
                     file_vars=file_vars) as fid:
         pickle.dump(_REPORTS, fid)
Exemplo n.º 21
0
def rinex3_nav(dset):
    """Write RINEX navigation file 

    Args:
        dset:       Dataset, a dataset containing the data.
    """

    # Overwrite Dataset. This is necessary if the writer is called from a analysis (e.g. SISRE) with does not include
    # broadcast ephemeris information.
    # TODO: Is that the best solution?
    if "rinex_nav/edit" not in dset.description and "rinex_nav/raw" not in dset.description:
        brdc = apriori.get(
            "orbit",
            rundate=dset.rundate,
            time=dset.time,
            satellite=tuple(dset.satellite),
            system=tuple(dset.system),
            station=dset.vars["station"],
            apriori_orbit="broadcast",
        )
        meta = brdc.dset_edit.meta[dset.rundate.strftime("%Y-%m-%d")]
        data = brdc.dset_edit  # TODO: Another possibility: brdc.dset_raw

    else:
        meta = dset.meta[dset.rundate.strftime("%Y-%m-%d")]
        data = dset  # TODO: Another possibility: brdc.dset_raw

    sat_sys_definition = dict(
        G="GPS", R="GLONASS", E="Galileo", J="QZSS", C="BDS", I="IRNSS", S="SBAS Payload", M="Mixed"
    )
    rinex_version = "3.03"

    with files.open("output_rinex3_nav", file_vars=dset.vars, mode="wt") as fid:

        #
        # Write RINEX navigation header
        #

        # ----+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8
        #      3.03           N: GNSS NAV DATA    E: GALILEO          RINEX VERSION / TYPE
        file_type = "N: GNSS NAV DATA"
        sat_sys = set(dset.system).pop() if len(set(dset.system)) == 1 else "M"
        fid.write(
            "{:>9s}{:11s}{:20s}{:20s}RINEX VERSION / TYPE\n"
            "".format(rinex_version, "", file_type, sat_sys + ": " + sat_sys_definition[sat_sys])
        )

        # ----+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8
        # CCRINEXN V1.6.0 UX  CDDIS               19990903 152236 UTC     PGM / RUN BY / DATE
        pgm = "where " + where.__version__
        run_by = util.get_user_info()["inst_abbreviation"] if "inst_abbreviation" in util.get_user_info() else ""
        file_created = datetime.utcnow().strftime("%Y%m%d %H%M%S") + " UTC"
        fid.write("{:20s}{:20s}{:20s}PGM / RUN BY / DATE\n".format(pgm, run_by, file_created))

        # ----+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8
        # IGS BROADCAST EPHEMERIS FILE                                COMMENT
        # TODO fid.write('{:60s}COMMENT\n'.format(line))

        # ----+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8
        # BDSA    .1397E-07   .0000E+00  -.5960E-07   .5960E-07       IONOSPHERIC CORR
        # BDSB    .1106E+06  -.3277E+05  -.2621E+06   .1966E+06       IONOSPHERIC CORR
        if "iono_para" in meta:
            for type_, val in sorted(meta["iono_para"].items()):
                fid.write(
                    "{:4s} {:>12.4e}{:>12.4e}{:>12.4e}{:>12.4e} {:1s} {:2s}  IONOSPHERIC CORR\n"
                    "".format(
                        type_,
                        val["para"][0],
                        val["para"][1],
                        val["para"][2],
                        val["para"][3],
                        val["time_mark"],
                        val["sv_id"],
                    )
                )

        # ----+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8
        # BDUT -5.5879354477e-09-0.000000000e+00     14 1886          TIME SYSTEM CORR
        # GAUT  0.0000000000e+00 0.000000000e+00 172800 1886          TIME SYSTEM CORR
        if "time_sys_corr" in meta:
            for type_, val in sorted(meta["time_sys_corr"].items()):
                fid.write(
                    "{:4s} {:>17.10e}{:>16.9e}{:>7d}{:>5d}{:10s}TIME SYSTEM CORR\n"
                    "".format(type_, val["a0"], val["a1"], val["t"], val["w"], "")
                )

        # ----+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8
        #     16    17  1851     3                                    LEAP SECONDS
        if "leap_seconds" in meta:
            fid.write(
                "{:>6s}{:>6s}{:>6s}{:>6s}{:3s}{:33s}LEAP SECONDS\n"
                "".format(
                    meta["leap_seconds"]["leap_seconds"],
                    meta["leap_seconds"]["future_past_leap_seconds"],
                    meta["leap_seconds"]["week"],
                    meta["leap_seconds"]["week_day"],
                    meta["leap_seconds"]["time_sys"],
                    "",
                )
            )

        # ----+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8
        #                                                             END OF HEADER
        fid.write("{:60s}END OF HEADER\n".format(""))

        #
        # Write RINEX navigation data
        #

        # ----+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8
        # E11 2016 02 28 22 00 00  .654120231047E-04  .109707798401E-10  .000000000000E+00
        #       .400000000000E+01  .129375000000E+02  .319691887879E-08 -.292934515480E+01
        #       .460073351860E-06  .329698785208E-03  .683590769768E-05  .544061308098E+04
        #       .792000000000E+05  .391155481339E-07 -.125937621871E+01  .316649675369E-07
        #       .967916388734E+00  .197406250000E+03 -.653087089047E+00 -.571166648526E-08
        #       .276797244002E-09  .257000000000E+03  .188600000000E+04  .000000000000E+00
        #      -.100000000000E+01  .000000000000E+00 -.249128788710E-07 -.225845724344E-07
        #       .798850000000E+05  .000000000000E+00  .000000000000E+00  .000000000000E+00

        for idx in range(0, data.num_obs):

            # Remove observation epochs, which does not fit in the given time period
            # TODO: Is the time handling ok. Especially for BeiDou from day to day or week to week?
            rundate = datetime(data.rundate.year, data.rundate.month, data.rundate.day)
            if data.time.gps.datetime[idx] < rundate or data.time.gps.datetime[idx] >= (rundate + timedelta(days=1)):
                continue

            # TODO:
            #        for drow in data.get_rows():
            #            fid.write('{d.sat:2d} {d.time:%Y%m%d %H%M%S} {d.inc0:13.4f}'.format(d=drow))
            #            fid.write('  {d.hurra:14.10f} ...'.format(d=drow))

            if data.system[idx] in ["R", "S"]:
                log.warning("Writing of RINEX navigation message is not implemented for GLONASS and SBAS satellites.")
                continue

            time = _time_system_correction(data, idx)
            gnss_data = _get_fields_based_on_system(data, idx)

            # BROADCAST ORBIT - 1
            fid.write(
                "{:3s} {:>4d} {:>2s} {:>2s} {:>2s} {:>2s} {:>2s}{:>19.12e}{:>19.12e}{:>19.12e}\n"
                "".format(
                    data.satellite[idx],
                    time["toc"].year,
                    str(time["toc"].month).zfill(2),
                    str(time["toc"].day).zfill(2),
                    str(time["toc"].hour).zfill(2),
                    str(time["toc"].minute).zfill(2),
                    str(time["toc"].second).zfill(2),
                    data.sat_clock_bias[idx],
                    data.sat_clock_drift[idx],
                    data.sat_clock_drift_rate[idx],
                )
            )

            # BROADCAST ORBIT - 2
            fid.write(
                "    {:19.12e}{:>19.12e}{:>19.12e}{:>19.12e}\n"
                "".format(data.iode[idx], data.crs[idx], data.delta_n[idx], data.m0[idx])
            )

            # BROADCAST ORBIT - 3
            fid.write(
                "    {:19.12e}{:>19.12e}{:>19.12e}{:>19.12e}\n"
                "".format(data.cuc[idx], data.e[idx], data.cus[idx], data.sqrt_a[idx])
            )

            # BROADCAST ORBIT - 4
            fid.write(
                "    {:19.12e}{:>19.12e}{:>19.12e}{:>19.12e}\n"
                "".format(time["toe"], data.cic[idx], data.Omega[idx], data.cis[idx])
            )

            # BROADCAST ORBIT - 5
            fid.write(
                "    {:19.12e}{:>19.12e}{:>19.12e}{:>19.12e}\n"
                "".format(data.i0[idx], data.crc[idx], data.omega[idx], data.Omega_dot[idx])
            )

            # BROADCAST ORBIT - 6
            fid.write(
                "    {:19.12e}{:>19.12e}{:>19.12e}{:>19.12e}\n"
                "".format(data.idot[idx], gnss_data["data_info"], time["week"], gnss_data["l2p_flag"])
            )

            # BROADCAST ORBIT - 7
            fid.write(
                "    {:19.12e}{:>19.12e}{:>19.12e}{:>19.12e}\n"
                "".format(
                    data.sv_accuracy[idx], data.sv_health[idx], gnss_data["tgd_bgd"], gnss_data["iodc_groupdelay"]
                )
            )

            # BROADCAST ORBIT - 8
            fid.write(
                "    {:19.12e}{:>19.12e}{:>19.12e}{:>19.12e}\n"
                "".format(time["transmission_time"], gnss_data["interval"], 0.0, 0.0)
            )
Exemplo n.º 22
0
def sisre_comparison_report(dset):
    """Compare SISRE datasets

    Args:
        dset (list):       List with different SISRE datasets. The datasets contain the data.
    """
    dsets = dset
    df_merged = pd.DataFrame()

    for name, dset in dsets.items():

        if dset.num_obs == 0:
            log.warn(f"Dataset '{name}' is empty.")
            continue

        signal_type = _get_signal_type(dset.meta)
        df = dset.as_dataframe(
            fields=["satellite", "system", "sisre",
                    "time.gps"])  # , index="time.gps")
        df = df.rename(columns={"sisre": signal_type})

        if df_merged.empty:
            df_merged = df
            continue
        df_merged = df_merged.merge(df,
                                    on=["satellite", "system", "time.gps"],
                                    how="outer")

    if df_merged.empty:
        log.fatal(f"All given datasets are empty [{', '.join(dsets.keys())}].")

    with files.open(file_key="output_sisre_comparison_report",
                    file_vars=dsets[next(iter(dsets))].vars,
                    create_dirs=True,
                    mode="wt") as fid:
        _header(fid)
        fid.write("#Comparison of SISE analyses\n")
        fid.write(
            "In the following SISE analyses results are compared for:\n\n")
        fid.write("* Monthly 95th percentile SISE for satellites\n")
        fid.write(
            "* Monthly 95th percentile and RMS SISE for signal combinations (users)\n"
        )
        fid.write("\\newpage\n")

        # Generate figure directory to save figures generated for SISRE report
        figure_dir = files.path("output_sisre_comparison_report_figure",
                                file_vars=dset.vars)
        figure_dir.mkdir(parents=True, exist_ok=True)

        fid.write(f"\n\n##Monthly 95th percentile SISE for satellites\n")
        # Produce plot with same yrange than for _plot_bar_sisre_signal_combination_percentile threshold plot
        _plot_bar_sisre_satellite_percentile(df_merged,
                                             fid,
                                             figure_dir,
                                             threshold=False,
                                             write_table=True,
                                             yrange=[0, 2])
        _plot_bar_sisre_satellite_percentile(df_merged,
                                             fid,
                                             figure_dir,
                                             threshold=True,
                                             write_table=False)

        fid.write(
            f"\n\n##Monthly 95th percentile and RMS SISE for signal combinations (users)\n"
        )
        _plot_bar_sisre_signal_combination_percentile(df_merged,
                                                      fid,
                                                      figure_dir,
                                                      threshold=False,
                                                      write_table=True)
        _plot_bar_sisre_signal_combination_percentile(df_merged,
                                                      fid,
                                                      figure_dir,
                                                      threshold=True,
                                                      write_table=False)
        _plot_bar_sisre_signal_combination_rms(df_merged,
                                               fid,
                                               figure_dir,
                                               write_table=True)

    # Generate PDF from Markdown file
    _markdown_to_pdf(dset)
Exemplo n.º 23
0
class _convert_units(type):
    """A meta-class that does the parsing of units

    The meta-class is used for convenience. It allows us to use the `unit`-class without instantiating it. That is, we
    can write `unit.km2m` instead of `unit().km2m`.
    """

    ureg = pint.UnitRegistry()

    with files.open("units") as fid:
        ureg.load_definitions(fid)

    @cache.function
    def __call__(cls, from_unit, to_unit=None):
        """Calculate the conversion scale between from_unit and to_unit

        If `to_unit` is not given, then `from_unit` is interpreted as a constant which is converted to base units
        (meters, seconds, etc) and returned.

        Args:
            from_unit (String):   The unit to convert from
            to_unit (String):     The unit to convert to

        Returns:
            Float:  Scale to multiply by to convert from from_unit to to_unit
        """
        if to_unit is None:
            return cls.ureg(from_unit)
        else:
            return cls.ureg(from_unit).to(to_unit).magnitude

    def __getattr__(cls, key):
        """Simplify notation for converting between units

        This makes it possible to type `unit.km2m` instead of `unit('km', 'm')`. We split on the character `2`
        (pronounced "to"), and pass the result on to :func:`__call__` to do the conversion. If a `2` is not found, we
        check if we can split on '_to_' instead, if so it is interpreted as a conversion function and is handed of to
        :func:`convert`. Finally, if no split is done, the attribute is interpreted as a simple unit.

        Note that if you need a unit whose name contains a '2' (or '_to_') you need to use the notation
        `unit('foot_H2O', 'pascal'). Similarly, more complex units need the same notation, e.g. `unit('meters per
        second ** 2')`.

        Args:
            key (String):   The key (name) of the attribute to the class. Interpreted as units

        Returns:
            Float:  Scale to multiply by to perform the unit conversion
        """
        if "2" in key:
            from_unit, to_unit = key.split("2", maxsplit=1)
            return cls(from_unit, to_unit)
        elif "_to_" in key:
            from_unit, to_unit = key.split("_to_", maxsplit=1)
            return cls.function(from_unit, to_unit)
        else:
            return cls(key)

    def function(cls, from_unit, to_unit):
        """Create a conversion function

        This is necessary for unit conversions that are not simple multiplications. The usual example is temperature
        conversions for instance from Celsius to Fahrenheit.

        Args:
            from_unit (String):   The unit to convert from
            to_unit (String):     The unit to convert to

        Returns:
            Function:  Conversion function that converts from from_unit to to_unit
        """
        return lambda value: cls.ureg.Quantity(value, cls.ureg(from_unit)).to(
            cls.ureg(to_unit)).magnitude

    def register(cls, unit):
        """Register unit of a function/method/property

        This method should be used as a decorator on the function/method/property, and specify the unit of the value
        returned by that function/method/property. For instance

            @property
            @unit.register('meter')
            def calculate_delay(...):
                return delay_in_meters

        Units registered with this decorator can be used by the functions returned by the `unit_func_factory`,
        `convert_func_factory` and `factor_func_factory`.

        Args:
            unit (String):  Name of unit.

        Returns:
            Function:  Decorator that registers the unit.
        """
        def register_decorator(func):
            """Register unit of func in _UNITS-dictionary"""
            module_name = func.__module__
            func_name = func.__name__
            _UNITS.setdefault(module_name, dict())[func_name] = unit

            return func

        return register_decorator

    @staticmethod
    def _get_unit(module_name, func_name):
        """Get registered unit of function/method/property

        Outside code should use the `unit_factory` to get registered units.

        Args:
            module_name (String):   Name of module containing function/method/property.
            func_name (String):     Name of function/method/property with registered unit.

        Returns:
            String:  Name of unit.
        """
        units = _UNITS.get(module_name, dict())
        try:
            return units[func_name]
        except KeyError:
            raise UnitError("No unit is registered for '{}' in {}".format(
                func_name, module_name)) from None

    def unit_factory(cls, module_name):
        """Provide a function that can get registered units of functions/methods/properties

        The function checks for units registered with the unit.register-decorator. It can for instance be added to a
        class as follows:

            unit = staticmethod(unit.unit_func_factory(__name__))

        Args:
            module_name (String):   Name of module as returned by `__name__`.

        Returns:
            Function:  Function that gets unit of values returned by functions.
        """
        def unit(func_name):
            """Unit of value returned by function/method/property

            Args:
                func_name (String):  Name of function/method/property.

            Returns:
                String:  Name of unit.
            """
            return cls._get_unit(module_name, func_name)

        return unit

    def convert_factory(cls, module_name):
        """Provide a function that can convert values of properties to a given unit

        The function checks for units registered with the unit.register-decorator. It can for instance be added to a
        class as follows:

            convert_to = unit.convert_property_factory(__name__)

        Note that unlike the other factories, this one only works for properties.

        Args:
            module_name (String):   Name of module as returned by `__name__`.

        Returns:
            Function:  Function that converts values of properties.
        """
        def convert(self, property_name, to_unit):
            """Convert value of property to another unit

            Args:
                property_name (String):  Name of property.
                to_unit (String):        Name of other unit

            Returns:
                Numeric scalar or array:  Values of property converted to other unit.
            """
            from_unit = cls._get_unit(module_name, property_name)
            factor = cls(from_unit, to_unit)
            return getattr(self, property_name) * factor

        return convert

    def factor_factory(cls, module_name):
        """Provide a function that calculates conversion factor to another unit

        The function finds conversion factors for units registered with the unit.register-decorator. It can for
        instance be added to a class as follows:

            unit_factor = staticmethod(unit.factor_factory(__name__))

        Args:
            module_name (String):   Name of module as returned by `__name__`.

        Returns:
            Function:  Function that calculates conversion factor to another unit.
        """
        def factor(func_name, to_unit):
            """Conversion factor between unit of function/method/property and another unit

            Args:
                func_name (String):  Name of function/method/property.
                to_unit (String):    Name of other unit.

            Returns:
                Float:  Conversion factor.
            """
            from_unit = cls._get_unit(module_name, func_name)
            return cls(from_unit, to_unit)

        return factor

    def units_dict(cls, module_name):
        """Dictionary of units registered on a module

        Add a sub-dictionary if the module name is unknown, to set up a reference in case units are registered later.

        Returns:
            Dictionary:  Units registered on a module.
        """
        _UNITS.setdefault(module_name, dict())
        return _UNITS[module_name]

    @property
    def names(cls):
        """List available units and constants

        The list of available units contains aliases (for instance s, sec, second), but not plural forms (secs,
        seconds) or possible prefixes (milliseconds, usec, ms).

        Returns:
            List of strings: Available units and constants
        """
        return dir(cls.ureg)

    #
    # Conversion routines not defined by pint
    #
    def rad_to_dms(cls, radians):
        """Converts radians to degrees, minutes and seconds

        Args:
            radians (Float):  angle(s) in radians

        Returns:
            Tuple of Floats:  degrees, minutes, seconds

        Examples:
            >>> unit.rad_to_dms(1.04570587646256)
            (59.0, 54.0, 52.3200000000179)
            >>> unit.rad_to_dms(-0.2196050301753194)
            (-12.0, 34.0, 56.78900000000468)
            >>> unit.rad_to_dms(-0.005817642339636369)
            (-0.0, 19.0, 59.974869999999925)
        """
        sign = np.sign(radians)
        degrees = abs(radians) * cls.radians2degrees
        minutes = (degrees % 1) * cls.hour2minutes
        seconds = (minutes % 1) * cls.minute2seconds

        return sign * np.floor(degrees), np.floor(minutes), seconds

    def dms_to_rad(cls, degrees, minutes, seconds):
        """Convert degrees, minutes and seconds to radians

        The sign of degrees will be used. In this case, be careful that the sign
        of +0 or -0 is correctly passed on. That is, degrees must be specified as a float, not an
        int.

        Args:
            degrees:   Degrees as float (including sign) or array of floats
            minutes:   Minutes as int/float or array of ints/floats
            seconds:   Seconds as float or array of floats

        Returns:
            Float/Array: Given degrees, minutes and seconds as radians.

        Examples:
            >>> unit.dms_to_rad(59, 54, 52.32)
            1.04570587646256
            >>> unit.dms_to_rad(-12.0, 34, 56.789)
            -0.21960503017531938
            >>> unit.dms_to_rad(-0.0, 19, 59.974870)
            -0.005817642339636369
        """
        sign = np.copysign(1, degrees)
        return (sign * (np.abs(degrees) + minutes * cls.minutes2hours +
                        seconds * cls.seconds2hours) * cls.degrees2radians)

    def hms_to_rad(cls, hours, minutes, seconds):
        """Convert hours, minutes and seconds to radians

        Args:
            hours:     Hours as int or array of ints
            minutes:   Minutes as int or or array of ints
            seconds:   Seconds as float or or array of floats

        Returns:
            Float: Given hours, minutes and seconds as radians.

        Examples:
            >>> unit.hms_to_rad(17, 7, 17.753427)
            4.482423920139868
            >>> unit.hms_to_rad('12', '0', '0.00')
            3.1415926535897936
            >>> unit.hms_to_rad(-12, 34, 56.789)
            Traceback (most recent call last):
            ValueError: hours must be non-negative
        """
        return 15 * cls.dms_to_rad(hours, minutes, seconds)
Exemplo n.º 24
0
 def write_to_file():
     log.debug("Store reports for {} {}".format(tech.upper(), rundate))
     with files.open("report_pickle", mode="wb",
                     file_vars=file_vars) as fid:
         pickle.dump(_REPORTS, fid)
Exemplo n.º 25
0
def rinex3_obs(dset):
    """Write RINEX observations in Rinex format 3.03

    Args:
        dset:       Dataset, a dataset containing the data.
    """

    # Initialze variables
    meta = dset.meta
    version = "3.03"
    program = "Where v{}".format(where.__version__)
    run_by = "NMA"
    date = datetime.utcnow()
    time_sys = "GPS"  # TODO: So far only GPS time system can be handled by Where.
    file_created = "{:15s} {:3s}".format(date.strftime("%Y%m%d %H%M%S"), "UTC")
    pos_x = dset.site_pos.itrs[0][0]
    pos_y = dset.site_pos.itrs[0][1]
    pos_z = dset.site_pos.itrs[0][2]

    cfg_sampling_rate = config.tech.sampling_rate.float
    num_satellites = len(dset.unique("satellite"))

    if meta["file_type"] == "O":
        file_type = "OBSERVATION DATA"

    if meta["interval"] <= float(cfg_sampling_rate):
        sampling_rate = cfg_sampling_rate
    else:
        sampling_rate = meta["interval"]
    dset.vars["sampling_rate"] = str(
        int(sampling_rate
            ))  # Used as placeholder for determination of output file name

    with files.open("output_rinex3_obs", file_vars=dset.vars,
                    mode="wt") as fid:

        # ================================
        #  Write RINEX observation header
        # ================================
        #
        # ----+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8
        #      3.02           OBSERVATION DATA    M (MIXED)           RINEX VERSION / TYPE
        fid.write("{:>9s}{:11s}{:20s}{:20s}RINEX VERSION / TYPE\n".format(
            version, "", file_type, meta["sat_sys"]))

        # ----+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8
        # MAKERINEX 2.0.20023 BKG/GOWETTZELL      2016-03-02 00:20    PGM / RUN BY / DATE
        fid.write("{:20s}{:20s}{:20s}PGM / RUN BY / DATE\n".format(
            program, run_by, file_created))

        # ----+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8
        # G = GPS R = GLONASS E = GALILEO S = GEO M = MIXED           COMMENT
        if "comment" in meta:
            for line in meta["comment"]:
                fid.write("{:60s}COMMENT\n".format(line))

        # ----+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8
        # stas                                                        MARKER NAME
        fid.write("{:60s}MARKER NAME\n".format(meta["marker_name"]))

        # ----+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8
        # 66008M005                                                   MARKER NUMBER
        if "marker_number" in meta:
            fid.write("{:60s}MARKER NUMBER\n".format(meta["marker_number"]))

        if "marker_type" in meta:
            fid.write("{:60s}MARKER TYPE\n".format(meta["marker_type"]))

        # ----+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8
        # SATREF              Norwegian Mapping Authority             OBSERVER / AGENCY
        fid.write("{:20s}{:40s}OBSERVER / AGENCY\n".format(
            meta["observer"], meta["agency"]))

        # ----+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8
        # 3008040             SEPT POLARX4        2.9.0               REC # / TYPE / VERS
        fid.write("{:20s}{:20s}{:20s}REC # / TYPE / VERS\n"
                  "".format(meta["receiver_number"], meta["receiver_type"],
                            meta["receiver_version"]))

        # ----+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8
        # CR620012101         ASH701945C_M    SCIS                    ANT # / TYPE
        fid.write("{:20s}{:40s}ANT # / TYPE\n".format(meta["antenna_number"],
                                                      meta["antenna_type"]))

        # ----+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8
        #   3275756.7623   321111.1395  5445046.6477                  APPROX POSITION XYZ
        fid.write(
            "{:>14.4f}{:>14.4f}{:>14.4f}{:18s}APPROX POSITION XYZ\n".format(
                pos_x, pos_y, pos_z, ""))

        # ----+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8
        #         0.0000        0.0000        0.0000                  ANTENNA: DELTA H/E/N
        fid.write("{:>14.4f}{:>14.4f}{:>14.4f}{:18s}ANTENNA: DELTA H/E/N\n"
                  "".format(meta["antenna_height"], meta["antenna_east"],
                            meta["antenna_north"], ""))

        if "ant_vehicle_x" in meta:
            fid.write("{:>14.4f}{:>14.4f}{:>14.4f}{:18s}ANTENNA: DELTA X/Y/Z\n"
                      "".format(meta["ant_vehicle_x"], meta["ant_vehicle_y"],
                                meta["ant_vehicle_z"], ""))

        # TODO: ANTENNA:PHASECENTER
        # TODO: ANTENNA:B.SIGHT XYZ
        # TODO: ANTENNA:ZERODIR AZI
        # TODO: ANTENNA:ZERODIR XYZ
        # TODO: CENTER OF MASS: XYZ

        # ----+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8
        # G   26 C1C C1P L1C L1P D1C D1P S1C S1P C2P C2W C2S C2L C2X  SYS / # / OBS TYPES
        #        L2P L2W L2S L2L L2X D2P D2W D2S D2L D2X S2P S2W S2S  SYS / # / OBS TYPES
        # R   16 C1C C1P L1C L1P D1C D1P S1C S1P C2C C2P L2C L2P D2C  SYS / # / OBS TYPES
        #        D2P S2C S2P                                          SYS / # / OBS TYPES
        for sys in sorted(meta["obstypes"]):
            obstypes = meta["obstypes"][sys].copy()
            num_lines = int(len(obstypes) / 13) + 1
            for line in range(0, num_lines):
                num_obstypes = len(obstypes)
                num_obstypes_str = str(num_obstypes) if line == 0 else ""
                spaces = "  " if meta["version"].startswith("2") else " "
                if num_obstypes <= 13:
                    fid.write("{:1s}{:>5s} {:53s}SYS / # / OBS TYPES\n".format(
                        sys, num_obstypes_str, spaces.join(obstypes)))
                    if num_obstypes == 13:
                        break
                else:
                    fid.write("{:1s}{:>5s} {:53s}SYS / # / OBS TYPES\n".format(
                        sys, num_obstypes_str, spaces.join(obstypes[0:13])))
                    del obstypes[0:13]

        # ----+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8
        # DBHZ                                                        SIGNAL STRENGTH UNIT
        if "signal_strength_unit" in meta:
            fid.write("{:60s}SIGNAL STRENGTH UNIT\n".format(
                meta["signal_strength_unit"]))

        # ----+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8
        #      1.000                                                  INTERVAL
        if "interval" in meta:
            fid.write("{:>10.3f}{:50s}INTERVAL\n".format(sampling_rate, ""))

        # ----+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8
        #   2016    03    01    00    00   00.0000000     GPS         TIME OF FIRST OBS
        if not meta["time_sys"] == "GPS":
            log.fatal("Time system '{}' is not implemented so far in Where.",
                      meta["time_sys"])
        d = dset.time.gps.datetime[0]
        fid.write(
            "{:>6d}{:>6d}{:>6d}{:>6d}{:>6d}{:>13.7f}{:>8s}{:9s}TIME OF FIRST OBS\n"
            "".format(d.year, d.month, d.day, d.hour, d.minute, d.second,
                      time_sys, ""))

        # ----+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8
        #   2016    03    01    23    59   59.0000000     GPS         TIME OF LAST OBS
        if "time_last_obs" in meta:
            d = dset.time.gps.datetime[-1]
            fid.write(
                "{:>6d}{:>6d}{:>6d}{:>6d}{:>6d}{:>13.7f}{:>8s}{:9s}TIME OF LAST OBS\n"
                "".format(d.year, d.month, d.day, d.hour, d.minute, d.second,
                          time_sys, ""))

        # ----+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8
        #      0                                                      RCV CLOCK OFFS APPL
        if "rcv_clk_offset_flag" in meta:
            fid.write("{:>6s}{:54s}RCV CLOCK OFFS APPL\n".format(
                meta["rcv_clk_offset_flag"], ""))

        # ----+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8
        # G APPL_DCB          xyz.uvw.abc//pub/dcb_gps.dat            SYS / DCBS APPLIED
        if "dcbs_applied" in meta:
            for sys in sorted(meta["dcbs_applied"]):
                if sys in meta["obstypes"]:
                    fid.write("{:1s} {:17s} {:40s}SYS / DCBS APPLIED\n"
                              "".format(sys, meta["dcbs_applied"][sys]["prg"],
                                        meta["dcbs_applied"][sys]["url"]))

        if "pcvs_applied" in meta:
            for sys in sorted(meta["pcvs_applied"]):
                if sys in meta["obstypes"]:
                    fid.write("{:1s} {:17s} {:40s}SYS / PCVS APPLIED\n"
                              "".format(sys, meta["pcvs_applied"][sys]["prg"],
                                        meta["pcvs_applied"][sys]["url"]))
        # TODO: SYS / SCALE FACTOR

        # ----+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8
        # G L1C  0.00000  12 G01 G02 G03 G04 G05 G06 G07 G08 G09 G10  SYS / PHASE SHIFT
        #                    G11 G12                                  SYS / PHASE SHIFT
        # G L1W  0.00000                                              SYS / PHASE SHIFT
        if "phase_shift" in meta:
            num_sat_limit = 10
            for sys, obstypes in sorted(meta["phase_shift"].items()):

                if sys not in meta["obstypes"]:
                    continue

                if not obstypes:
                    # Note: Phase corrections are unknown.
                    fid.write("{:1s}{:59s}SYS / PHASE SHIFT\n".format(sys, ""))
                    continue

                for type_ in obstypes:
                    if type_ in meta["obstypes"][sys]:
                        # TODO: Remove unused satellites
                        sats = meta["phase_shift"][sys][type_]["sat"].copy()
                        num_lines = int(len(sats) / num_sat_limit) + 1
                        for line in range(0, num_lines):
                            num_sats = len(sats)
                            if line == 0:
                                num_sats_str = str(
                                    num_sats) if num_sats > 0 else ""
                                phase_shift_str = "{:1s} {:>3s} {:>8.5f}{:>4s}" "".format(
                                    sys, type_,
                                    float(meta["phase_shift"][sys][type_]
                                          ["corr"]), num_sats_str)
                            else:
                                phase_shift_str = ""

                            if num_sats <= num_sat_limit:
                                fid.write(
                                    "{:18s} {:41s}SYS / PHASE SHIFT\n".format(
                                        phase_shift_str, " ".join(sats)))
                            else:
                                fid.write(
                                    "{:18s} {:41s}SYS / PHASE SHIFT\n".format(
                                        phase_shift_str,
                                        " ".join(sats[0:num_sat_limit])))
                                del sats[0:num_sat_limit]

        # TODO: WAVELENGTH FACT L1/2  -> given only for RINEX 2.11, but could be of interest in RINEX file

        if "R" in meta["obstypes"]:
            # ----+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8
            #  22 R01  1 R02 -4 R03  5 R04  6 R05  1 R06 -4 R07  5 R08  6 GLONASS SLOT / FRQ #
            #     R09 -6 R10 -7 R11  0 R13 -2 R14 -7 R15  0 R17  4 R18 -3 GLONASS SLOT / FRQ #
            #     R19  3 R20  2 R21  4 R22 -3 R23  3 R24  2               GLONASS SLOT / FRQ #
            # TODO: Remove unused satellites from 'GLONASS SLOT / FRQ #'
            if "glonass_slot" in meta:
                num_sat = len(meta["glonass_slot"])
                glonass_slots = dict(meta["glonass_slot"])
                num_lines = int(num_sat / 8) + 1
                for idx in range(0, num_lines):
                    line = "{:>3d}".format(num_sat) if idx == 0 else "   "
                    for num, (slot, bias) in enumerate(
                            sorted(glonass_slots.items())):
                        if num == 8:
                            break
                        line = line + " {:3s} {:>2d}".format(slot, bias)
                        del glonass_slots[slot]
                    fid.write(line.ljust(60) + "GLONASS SLOT / FRQ #\n")

            # ----+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8
            #  C1C  -10.000 C1P  -10.123 C2C  -10.432 C2P  -10.634        GLONASS COD/PHS/BIS
            line = ""
            if "glonass_bias" in meta:
                for type_, bias in sorted(meta["glonass_bias"].items()):
                    if type_ in meta["obstypes"]["R"]:
                        line = line + " {:3s} {:8.3f}".format(
                            type_, float(bias))
            fid.write(line.ljust(60) + "GLONASS COD/PHS/BIS\n")

        # ----+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8
        #     16    17  1851     3                                    LEAP SECONDS
        #
        # NOTE: Entries 'future_past_leap_seconds', 'week', 'week_day' and 'time_sys' are not given in RINEX version
        #       2.11.
        if "leap_seconds" in meta:
            if meta["version"].startswith("2"):
                fid.write("{:>6d}{:54s}LEAP SECONDS\n".format(
                    int(meta["leap_seconds"]["leap_seconds"]), ""))
            else:
                fid.write("{:>6d}{:>6s}{:>6s}{:>6s}{:3s}{:33s}LEAP SECONDS\n"
                          "".format(
                              int(meta["leap_seconds"]["leap_seconds"]),
                              meta["leap_seconds"]["future_past_leap_seconds"],
                              meta["leap_seconds"]["week"],
                              meta["leap_seconds"]["week_day"],
                              meta["leap_seconds"]["time_sys"],
                              "",
                          ))

        # ----+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8
        #     71                                                      # OF SATELLITES
        fid.write("{:>6d}{:54s}# OF SATELLITES\n".format(num_satellites, ""))
        # TODO: PRN / # OF OBS

        # ----+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8
        #                                                             END OF HEADER
        fid.write("{:60s}END OF HEADER\n".format(""))

        # ================================
        #  Write RINEX observation data
        # ================================
        #
        epoch_prev = dset.time.gps.datetime[0]
        first_obs_in_epoch = True
        obs_epoch_cache = dict()

        # Loop over all observations
        for idx in range(0, dset.num_obs):

            # Write epoch (reading of observations from epoch 'd_prev' in 'obs_epoch_cache' is finished)
            epoch = dset.time.gps.datetime[idx]
            if epoch_prev != epoch:
                num_sat = idx - idx_epoch_start  # TODO: idx_epoch_start is not defined
                _write_epoch(dset, fid, obs_epoch_cache, idx, num_sat,
                             epoch_prev)
                first_obs_in_epoch = True

            if first_obs_in_epoch is True:
                obs_epoch_cache = dict()
                idx_epoch_start = idx
                first_obs_in_epoch = False

            # Save observations for a given epoch in obs_epoch_cache
            #
            # NOTE: The caching is mainly necessary to determine the number of satellites for an epoch and to be
            #       flexible in what kind of order the observation types should be written. The order of the
            #       observation types for a given GNSS is defined via dset.meta['obstypes'] variable.
            if dset.satellite[idx] in obs_epoch_cache:
                log.fatal("Satellite {} occurs twice in epoch {}.",
                          dset.satellite[idx], dset.time.gps.datetime[idx])

            for type_ in dset.meta["obstypes"][dset.system[idx]]:
                lli = " " if dset[type_ + "_lli"][idx] == 0.0 else str(
                    int(dset[type_ + "_lli"][idx]))
                snr = " " if dset[type_ + "_snr"][idx] == 0.0 else str(
                    int(dset[type_ + "_snr"][idx]))
                obs_epoch_cache.setdefault(dset.satellite[idx],
                                           list()).append({
                                               "obs":
                                               dset[type_][idx],
                                               "lli":
                                               lli,
                                               "snr":
                                               snr
                                           })
            epoch_prev = epoch

        # Write last epoch
        num_sat = (idx + 1) - idx_epoch_start
        _write_epoch(dset, fid, obs_epoch_cache, idx, num_sat, epoch_prev)