Beispiel #1
0
def gnss_comparison(dset: "Dataset") -> None:
    """Compare GNSS datasets

    Args:
        dset:  Dictionary with station name as keys and the belonging Dataset as value
    """

    dset_first = dset[list(dset.keys())[0]]
    dset_first.vars[
        "solution"] = config.tech.gnss_comparison_report.solution.str.lower()

    # Get dataframes for writing
    _, df_day, df_month, _, _ = _generate_dataframes(dset)

    # Prepare dataframes for writing
    df_day.index = df_day.index.strftime('%Y-%m-%d')
    df_day.index.name = "date"
    df_day = df_day.reset_index()
    df_month = df_month.reset_index()

    # Write files for daily and monthly solutions
    output_defs = {
        "day": df_day,
        "month": df_month,
        "day_summary": _generate_dataframe_summary(df_day, index="station"),
        "month_summary": _generate_dataframe_summary(df_month,
                                                     index="station"),
    }

    for type_, output_array in output_defs.items():

        file_vars = dset_first.vars.copy()
        file_vars.update(solution=f"{file_vars['solution']}_{type_}")
        file_path = config.files.path("output_gnss_vel_comparison",
                                      file_vars=file_vars)
        file_path.parent.mkdir(parents=True, exist_ok=True)

        log.info(f"Write '{type_}' comparison file {file_path}.")

        fields = FIELDS_SUM if "summary" in type_ else FIELDS
        summary = "Summary of GNSS site velocity comparison results" if "summary" in type_ else "GNSS site velocity comparison results"

        # Get header
        header = get_header(
            fields,
            pgm_version=f"where {where.__version__}",
            run_by=util.get_user_info()["inst_abbreviation"]
            if "inst_abbreviation" in util.get_user_info() else "",
            summary=summary,
        )

        # Write to disk
        np.savetxt(
            file_path,
            output_array[[f.name for f in fields]].to_numpy(),
            fmt=tuple(f.format for f in fields),
            header=header,
            delimiter="",
            encoding="utf8",
        )
Beispiel #2
0
def gnss_residual(dset: "Dataset") -> None:
    """Write GNSS post-fit residual results

    Args:
        dset:  A dataset containing the data.
    """

    file_path = config.files.path("output_residual", file_vars=dset.vars)

    # Add date field to dataset
    if "date" not in dset.fields:
        dset.add_text("date", val=[d.strftime("%Y/%m/%d %H:%M:%S") for d in dset.time.datetime])

    # Put together fields in an array as specified by the 'dtype' tuple list
    output_list = list(zip(*(get_field(dset, f.field, f.attrs, f.unit) for f in FIELDS)))
    output_array = np.array(output_list, dtype=[(f.name, f.dtype) for f in FIELDS])

    # Write to disk
    header = get_header(
        FIELDS,
        pgm_version=f"where {where.__version__}",
        run_by=util.get_user_info()["inst_abbreviation"] if "inst_abbreviation" in util.get_user_info() else "",
        summary="GNSS post-fit residual results",
    )
    np.savetxt(
        file_path,
        output_array,
        fmt=tuple(f.format for f in FIELDS),
        header="\n".join(header),
        delimiter="",
        encoding="utf8",
    )
Beispiel #3
0
def calculate_solution(dset: "Dataset") -> None:
    """Write calculate solution results


    Args:
        dset:  A dataset containing the data.
    """
    file_path = config.files.path("output_calculate_solution",
                                  file_vars={
                                      **dset.vars,
                                      **dset.analysis
                                  })

    # Update WriterField depending on used pipeline
    fields_def = list(FIELDS)
    fields_def.append(
        WriterField(
            "residual_prefit",
            "residual_prefit",
            (),
            float,
            "%15.4f" if dset.vars["pipeline"] == "gnss_vel" else "%15.3f",
            15,
            "RESIDUAL",
            "m/s" if dset.vars["pipeline"] == "gnss_vel" else "meter",
            "Pre-fit residual",
        ))

    # Add date field to dataset
    if "date" not in dset.fields:
        dset.add_text(
            "date",
            val=[d.strftime("%Y/%m/%d %H:%M:%S") for d in dset.time.datetime],
            write_level="detail")

    # Select fields available in Dataset
    fields = get_existing_fields(dset, fields_def)

    # Put together fields in an array as specified by the 'dtype' tuple list
    output_list = list(
        zip(*(get_field(dset, f.field, f.attrs, f.unit) for f in fields)))
    output_array = np.array(output_list,
                            dtype=[(f.name, f.dtype) for f in fields])

    # Write to disk
    header = get_header(
        fields,
        pgm_version=f"where {where.__version__}",
        run_by=util.get_user_info()["inst_abbreviation"]
        if "inst_abbreviation" in util.get_user_info() else "",
        summary="Calculate solutions results",
    )
    np.savetxt(
        file_path,
        output_array,
        fmt=tuple(f.format for f in fields),
        header=header,
        delimiter="",
        encoding="utf8",
    )
Beispiel #4
0
def rinex_nav_writer(dset: "Dataset") -> None:
    """Write selected RINEX navigation file observations

    Args:
        dset:   A dataset containing the data.
    """
    file_path = config.files.path(f"output_rinex_nav", file_vars={**dset.vars, **dset.analysis})

    # Add additional fields used by the writer
    dset.add_text("date", val=[d.strftime("%Y/%m/%d %H:%M:%S") for d in dset.time.datetime])
    dset.add_text(
        "time_gpsweek",
        val=[f"{t.gps.gps_ws.week:04.0f}{t.gps.gps_ws.day:1.0f}:{t.gps.gps_ws.seconds:06.0f}" for t in dset.time],
        write_level="detail",
    )
    dset.add_text(
        "trans_time_gpsweek",
        val=[
            f"{t.gps.gps_ws.week:04.0f}{t.gps.gps_ws.day:1.0f}:{t.gps.gps_ws.seconds:06.0f}"
            for t in dset.transmission_time
        ],
        write_level="detail",
    )
    dset.add_text(
        "toe_gpsweek",
        val=[f"{t.gps.gps_ws.week:04.0f}{t.gps.gps_ws.day:1.0f}:{t.gps.gps_ws.seconds:06.0f}" for t in dset.toe],
        write_level="detail",
    )

    dset.add_float(
        "diff_trans_toe", 
        val=(dset.transmission_time.mjd - dset.toe.mjd) * Unit.day2second, 
        write_level="detail",
    )

    # Select fields available in Dataset (e.g. DVS and SHS fields are only given for Galileo)
    fields = get_existing_fields(dset, FIELDS)

    # List epochs ordered by satellites
    idx = np.concatenate([np.where(dset.filter(satellite=s))[0] for s in dset.unique("satellite")])

    # Put together fields in an array as specified by the fields-tuple
    output_list = list(zip(*(get_field(dset, f.field, f.attrs, f.unit) for f in fields)))
    output_array = np.array(output_list, dtype=[(f.name, f.dtype) for f in fields])[idx]

    # Write to disk
    header = get_header(
        fields,
        pgm_version=f"where {where.__version__}",
        run_by=util.get_user_info()["inst_abbreviation"] if "inst_abbreviation" in util.get_user_info() else "",
        summary="RINEX navigation file analysis results",
    )
    np.savetxt(
        file_path,
        output_array,
        fmt=tuple(f.format for f in fields),
        header=header,
        delimiter="",
        encoding="utf8",
    )
def gnss_position(dset: "Dataset") -> None:
    """Write GNSS position results


    Args:
        dset:  A dataset containing the data.
    """
    file_path = config.files.path("output_dilution_of_precision",
                                  file_vars=dset.vars)

    # Add date and DOPs fields to dataset
    if "date" not in dset.fields:
        dset.add_text(
            "date",
            val=[d.strftime("%Y/%m/%d %H:%M:%S") for d in dset.time.datetime],
            unit="YYYY/MM/DD hh:mm:ss")

    if "pdop" not in dset.fields:
        gnss_dop(dset)

    if "num_satellite_used" not in dset.fields:
        dset.add_float(
            "num_satellite_used",
            val=gnss.get_number_of_satellites(dset.system, dset.satellite,
                                              dset.time),
        )

    # Put together fields in an array as specified by the 'dtype' tuple list
    output_list = list()
    for epoch in dset.unique("time"):
        idx = dset.filter(time=epoch)

        # Append current epoch position solution to final output solution
        output_list.append(
            tuple([
                get_field(dset, f.field, f.attrs, f.unit)[idx][0]
                for f in FIELDS
            ]))

    output_array = np.array(output_list,
                            dtype=[(f.name, f.dtype) for f in FIELDS])

    # Write to disk
    header = get_header(
        FIELDS,
        pgm_version=f"where {where.__version__}",
        run_by=util.get_user_info()["inst_abbreviation"]
        if "inst_abbreviation" in util.get_user_info() else "",
        summary="GNSS dilution of precision results",
    )
    np.savetxt(
        file_path,
        output_array,
        fmt=tuple(f.format for f in FIELDS),
        header="\n".join(header),
        delimiter="",
        encoding="utf8",
    )
Beispiel #6
0
def gnss_residual(dset: "Dataset") -> None:
    """Write GNSS post-fit residual results

    Args:
        dset:  A dataset containing the data.
    """

    file_path = config.files.path("output_residual", file_vars=dset.vars)

    # Update WriterField depending on used pipeline
    fields_def = list(FIELDS)
    fields_def.append(
        WriterField(
            "residual",
            "residual",
            (),
            float,
            "%15.4f" if dset.vars["pipeline"] == "gnss_vel" else "%15.3f",
            15,
            "RESIDUAL",
            "meter/second" if dset.vars["pipeline"] == "gnss_vel" else "meter",
            "Post-fit residual",
        ))

    # Add date field to dataset
    if "date" not in dset.fields:
        dset.add_text(
            "date",
            val=[d.strftime("%Y/%m/%d %H:%M:%S") for d in dset.time.datetime])

    # Put together fields in an array as specified by the 'dtype' tuple list
    output_list = list(
        zip(*(get_field(dset, f.field, f.attrs, f.unit) for f in fields_def)))
    output_array = np.array(output_list,
                            dtype=[(f.name, f.dtype) for f in fields_def])

    # Write to disk
    header = get_header(
        fields_def,
        pgm_version=f"where {where.__version__}",
        run_by=util.get_user_info()["inst_abbreviation"]
        if "inst_abbreviation" in util.get_user_info() else "",
        summary="GNSS post-fit residual results",
    )

    np.savetxt(
        file_path,
        output_array,
        fmt=tuple(f.format for f in fields_def),
        header=header,
        delimiter="",
        encoding="utf8",
    )
Beispiel #7
0
def estimate_solution(dset: "Dataset") -> None:
    """Write estimate solution results

    Args:
        dset:  A dataset containing the data.
    """
    file_path = config.files.path("output_estimate_solution",
                                  file_vars=dset.vars)

    # Add date field to dataset
    if "date" not in dset.fields:
        dset.add_text(
            "date",
            val=[d.strftime("%Y/%m/%d %H:%M:%S") for d in dset.time.datetime])

    # Epochwise estimation or over whole time period
    if config.tech.estimate_epochwise.bool:

        output_array = np.array([])
        for epoch in sorted(set(dset.time.gps.mjd)):
            idx = dset.time.gps.mjd == epoch

            # Append current epoch solution to final output solution for each estimated parameter
            epoch_array = _get_epoch(dset, idx)
            output_array = np.concatenate(
                (output_array,
                 epoch_array), axis=0) if output_array.size else epoch_array
    else:
        # Get solution for first observation
        idx = np.squeeze(np.array(np.nonzero(dset.time.gps.mjd)) ==
                         0)  # first observation -> TODO: Better solution?
        output_array = _get_epoch(dset, idx)

    # Write to disk
    header = get_header(
        FIELDS,
        pgm_version=f"where {where.__version__}",
        run_by=util.get_user_info()["inst_abbreviation"]
        if "inst_abbreviation" in util.get_user_info() else "",
        summary="Estimate solutions results",
    )
    np.savetxt(
        file_path,
        output_array,
        fmt=tuple(f.format for f in FIELDS),
        header="\n".join(header),
        delimiter="",
        encoding="utf8",
    )
def gnss_receiver_velocity_summary(dset: "Dataset") -> None:
    """Write Doppler estimated GNSS receiver velocity results to a text file


    Args:
        dset:  A dataset containing the data.
    """
    # File name generation
    file_path = config.files.path("output_receiver_velocity_summary", file_vars=dset.vars)

    # Handle GR-I: date field to dataset
    if "date" not in dset.fields:
        dset.add_text("date", val=[d.strftime("%Y/%m/%d %H:%M:%S") for d in dset.time.datetime])

    # Put together fields in an array as specified by the 'dtype' tuple list
    if config.tech.estimate_epochwise.bool:  # Epochwise estimation or over whole time period

        output_list = list()
        for epoch in dset.unique("time"):
            idx = dset.filter(time=epoch)

            # Append current epoch position solution to final output solution
            output_list.append(tuple([get_field(dset, f.field, f.attrs, f.unit)[idx][0] for f in FIELDS]))

    else:
        # Get position solution for first observation
        idx = np.squeeze(np.array(np.nonzero(dset.time.gps.mjd)) == 0)  # first observation -> TODO: Better solution?
        output_list = [tuple([get_field(dset, f.field, f.attrs, f.unit)[idx][0] for f in FIELDS])]

    # define the output atrray
    output_array = np.array(output_list, dtype=[(f.name, f.dtype) for f in FIELDS])

    # Write to disk
    header = get_header(
        FIELDS,
        pgm_version=f"where {where.__version__}",
        run_by=util.get_user_info()["inst_abbreviation"] if "inst_abbreviation" in util.get_user_info() else "",
        summary="GNSS station velocity results",
    )

    np.savetxt(
        file_path,
        output_array,
        fmt=tuple(f.format for f in FIELDS),
        header=header,
        delimiter="",
        encoding="utf8",
    )
Beispiel #9
0
def _get_header(dset: "Dataset") -> str:
    """Get header

    Args:
        dset:   A dataset containing the data.

    Returns:
        Header lines
    """
    # SISRE configuration
    add_description = "\nSISRE ANALYSIS CONFIGURATION\n\n"
    add_description += str(
        config.tech.as_str(key_width=25, width=70, only_used=True)) + "\n\n\n"
    add_description += _get_paths()

    # Information about used biases and phase center offsets (PCOs)
    add_description += f"{'SAT':^4s}{'BIAS_BRDC':>10s}{'BIAS_PREC':>10s}{'PCO_BRDC':^26s}{'PCO_PREC':^26s}\n"
    add_description += f"{'':^4s}{'[m]':^10s}{'[m]':^10s}{'[m]':^26s}{'[m]':^26s}\n"
    for sat in dset.unique("satellite"):
        add_description += "{:>3s}{:>10.4f}{:>10.4f}{:>10.4f}{:>8.4f}{:>8.4f}{:>10.4f}{:>8.4f}{:>8.4f}\n".format(
            sat,
            dset.meta["bias_brdc"][sat],
            dset.meta["bias_precise"][sat],
            dset.meta["pco_sat_brdc"][sat][0],
            dset.meta["pco_sat_brdc"][sat][1],
            dset.meta["pco_sat_brdc"][sat][2],
            dset.meta["pco_sat_precise"][sat][0],
            dset.meta["pco_sat_precise"][sat][1],
            dset.meta["pco_sat_precise"][sat][2],
        )

    header = get_header(
        FIELDS,
        pgm_version=f"where {where.__version__}",
        run_by=util.get_user_info()["inst_abbreviation"]
        if "inst_abbreviation" in util.get_user_info() else "",
        summary="SISRE analysis results",
        add_description=add_description + "\n\n",
    )

    return header
Beispiel #10
0
def gnss_velocity(dset: "Dataset") -> None:
    """Write GNSS velocity results


    Args:
        dset:  A dataset containing the data.
    """
    file_path = config.files.path("output_velocity", file_vars=dset.vars)

    # Add date field to dataset
    if "date" not in dset.fields:
        dset.add_text(
            "date",
            val=[d.strftime("%Y/%m/%d %H:%M:%S") for d in dset.time.datetime])

    # Add velocity in topocentric coordinates
    lat, lon, height = dset.site_pos.pos.llh.T
    vel_enu = np.squeeze(
        rotation.trs2enu(lat, lon) @ dset.site_vel[:, :, None])
    dset.add_float("site_vel_east", val=vel_enu[:, 0], unit="meter/second")
    dset.add_float("site_vel_north", val=vel_enu[:, 1], unit="meter/second")
    dset.add_float("site_vel_up", val=vel_enu[:, 2], unit="meter/second")

    # Add horizontal velocity (HV), vertical velocity (VV) and 3D velocity
    dset.add_float("site_vel_h",
                   val=np.sqrt(vel_enu[:, 0]**2 + vel_enu[:, 1]**2),
                   unit="meter/second")
    dset.add_float("site_vel_v",
                   val=np.absolute(vel_enu[:, 2]),
                   unit="meter/second")
    dset.add_float(
        "site_vel_3d",
        val=np.sqrt(dset.site_vel[:, 0]**2 + dset.site_vel[:, 1]**2 +
                    dset.site_vel[:, 2]**2),
        #val=np.sqrt(vel_enu[:,0] ** 2 + vel_enu[:,1] ** 2 + vel_enu[:,2] ** 2),
        unit="meter/second",
    )

    # Add site velocity and standard deviation of site velocity coordinates
    dset.add_float("site_vel_x", val=dset.site_vel[:, 0], unit="meter/second")
    dset.add_float("site_vel_y", val=dset.site_vel[:, 1], unit="meter/second")
    dset.add_float("site_vel_z", val=dset.site_vel[:, 2], unit="meter/second")
    dset.add_float("site_vel_sigma_x",
                   val=np.sqrt(dset.estimate_cov_site_vel_xx),
                   unit="meter/second")
    dset.add_float("site_vel_sigma_y",
                   val=np.sqrt(dset.estimate_cov_site_vel_yy),
                   unit="meter/second")
    dset.add_float("site_vel_sigma_z",
                   val=np.sqrt(dset.estimate_cov_site_vel_zz),
                   unit="meter/second")

    # Put together fields in an array as specified by the 'dtype' tuple list
    if config.tech.estimate_epochwise.bool:  # Epochwise estimation or over whole time period

        output_list = list()
        for epoch in dset.unique("time"):
            idx = dset.filter(time=epoch)

            # Append current epoch position solution to final output solution
            output_list.append(
                tuple([
                    get_field(dset, f.field, f.attrs, f.unit)[idx][0]
                    for f in FIELDS
                ]))

    else:
        # Get position solution for first observation
        idx = np.squeeze(np.array(np.nonzero(dset.time.gps.mjd)) ==
                         0)  # first observation -> TODO: Better solution?
        output_list = [
            tuple([
                get_field(dset, idx, f.field, f.attrs, f.unit)[idx][0]
                for f in FIELDS
            ])
        ]

    output_array = np.array(output_list,
                            dtype=[(f.name, f.dtype) for f in FIELDS])

    # Write to disk
    header = get_header(
        FIELDS,
        pgm_version=f"where {where.__version__}",
        run_by=util.get_user_info()["inst_abbreviation"]
        if "inst_abbreviation" in util.get_user_info() else "",
        summary="GNSS velocity results",
    )
    np.savetxt(
        file_path,
        output_array,
        fmt=tuple(f.format for f in FIELDS),
        header=header,
        delimiter="",
        encoding="utf8",
    )
Beispiel #11
0
def estimate_solution(dset: "Dataset") -> None:
    """Write estimate solution results

    Args:
        dset:  A dataset containing the data.
    """
    file_path = config.files.path("output_estimate_solution",
                                  file_vars={
                                      **dset.vars,
                                      **dset.analysis
                                  })

    # Add date field to dataset
    if "date" not in dset.fields:
        dset.add_text(
            "date",
            val=[d.strftime("%Y/%m/%d %H:%M:%S") for d in dset.time.datetime],
            write_level="detail")

    # Add states to WriterField depending on used pipeline
    fields_def = list(FIELDS)

    if dset.vars["pipeline"] == "gnss":

        fields_def.append(
            WriterField(
                "param_name",
                "param_name",
                (),
                object,
                "%-20s",
                20,
                "PARAM_NAME",
                "",
                f"Parameter name: \n"
                f"""
{'': >38}gnss_rcv_clock   - GNSS receiver clock
{'': >38}gnss_site_pos-x  - X-coordinate of site position
{'': >38}gnss_site_pos-y  - Y-coordinate of site position
{'': >38}gnss_site_pos-z  - Z-coordinate of site position
""",
            ))

    elif dset.vars["pipeline"] == "gnss_vel":

        fields_def.append(
            WriterField(
                "param_name",
                "param_name",
                (),
                object,
                "%-20s",
                20,
                "PARAM_NAME",
                "",
                f"Parameter name: \n"
                f"""
{'': >38}gnss_rcv_clock_drift   - GNSS receiver clock drift
{'': >38}gnss_site_vel-x        - X-coordinate of site velocity
{'': >38}gnss_site_vel-y        - Y-coordinate of site velocity
{'': >38}gnss_site_vel-z        - Z-coordinate of site velocity
""",
            ))

    else:
        log.fatal(
            "Estimate solution writer is implemented only for 'gnss' and 'gnss_vel' pipeline."
        )

    # Epochwise estimation or over whole time period
    if config.tech.estimate_epochwise.bool:

        output_array = np.array([])
        for epoch in sorted(set(dset.time.gps.mjd)):
            idx = dset.time.gps.mjd == epoch

            # Append current epoch solution to final output solution for each estimated parameter
            epoch_array = _get_epoch(dset, idx, fields_def)
            output_array = np.concatenate(
                (output_array,
                 epoch_array), axis=0) if output_array.size else epoch_array
    else:
        # Get solution for first observation
        idx = np.squeeze(np.array(np.nonzero(dset.time.gps.mjd)) ==
                         0)  # first observation -> TODO: Better solution?
        output_array = _get_epoch(dset, idx, fields_def)

    # Write to disk
    header = get_header(
        FIELDS,
        pgm_version=f"where {where.__version__}",
        run_by=util.get_user_info()["inst_abbreviation"]
        if "inst_abbreviation" in util.get_user_info() else "",
        summary="Estimate solutions results",
    )
    np.savetxt(
        file_path,
        output_array,
        fmt=tuple(f.format for f in fields_def),
        header=header,
        delimiter="",
        encoding="utf8",
    )
Beispiel #12
0
def gnss_position(dset: "Dataset") -> None:
    """Write GNSS position results


    Args:
        dset:  A dataset containing the data.
    """
    file_path = config.files.path("output_position", file_vars=dset.vars)

    # Add date field to dataset
    if "date" not in dset.fields:
        dset.add_text("date", val=[d.strftime("%Y/%m/%d %H:%M:%S") for d in dset.time.datetime])

    # Add ENU position to dataset
    ref_pos = position.Position(
        val=np.array([dset.meta["pos_x"], dset.meta["pos_y"], dset.meta["pos_z"]]), system="trs"
    )
    enu = (dset.site_pos.trs.pos - ref_pos).enu
    dset.add_float("site_pos_vs_ref_east", val=enu.east, unit="meter")
    dset.add_float("site_pos_vs_ref_north", val=enu.north, unit="meter")
    dset.add_float("site_pos_vs_ref_up", val=enu.up, unit="meter")

    # Add HPE and VPE to dataset
    dset.add_float("hpe", val=np.sqrt(enu.east ** 2 + enu.north ** 2), unit="meter")
    dset.add_float("vpe", val=np.absolute(enu.up), unit="meter")

    # Add standard deviation of site position coordinates
    dset.add_float("site_pos_sigma_x", val=np.sqrt(dset.estimate_cov_site_pos_xx), unit="meter")
    dset.add_float("site_pos_sigma_y", val=np.sqrt(dset.estimate_cov_site_pos_yy), unit="meter")
    dset.add_float("site_pos_sigma_z", val=np.sqrt(dset.estimate_cov_site_pos_zz), unit="meter")

    # Put together fields in an array as specified by the 'dtype' tuple list
    if config.tech.estimate_epochwise.bool:  # Epochwise estimation or over whole time period

        output_list = list()
        for epoch in dset.unique("time"):
            idx = dset.filter(time=epoch)

            # Append current epoch position solution to final output solution
            output_list.append(tuple([get_field(dset, f.field, f.attrs, f.unit)[idx][0] for f in FIELDS]))

    else:
        # Get position solution for first observation
        idx = np.squeeze(np.array(np.nonzero(dset.time.gps.mjd)) == 0)  # first observation -> TODO: Better solution?
        output_list = [tuple([get_field(dset, idx, f.field, f.attrs, f.unit)[idx][0] for f in FIELDS])]

    output_array = np.array(output_list, dtype=[(f.name, f.dtype) for f in FIELDS])

    # Write to disk
    header = get_header(
        FIELDS,
        pgm_version=f"where {where.__version__}",
        run_by=util.get_user_info()["inst_abbreviation"] if "inst_abbreviation" in util.get_user_info() else "",
        summary="GNSS position results",
    )
    np.savetxt(
        file_path,
        output_array,
        fmt=tuple(f.format for f in FIELDS),
        header="\n".join(header),
        delimiter="",
        encoding="utf8",
    )
Beispiel #13
0
def gnss_satellite_position(dset: "Dataset") -> None:
    """Write GNSS satellite position results


    Args:
        dset:  A dataset containing the data.
    """
    file_path = config.files.path("output_satellite_position",
                                  file_vars={
                                      **dset.vars,
                                      **dset.analysis
                                  })

    # Add date field to dataset
    if "date" not in dset.fields:
        dset.add_text(
            "date",
            val=[d.strftime("%Y/%m/%d %H:%M:%S") for d in dset.time.datetime])

    # Add fields in case of broadcast ephemeris
    if config.tech.apriori_orbit.str == "broadcast":
        dset.add_text(
            "trans_time_gpsweek",
            val=[
                f"{t.gps_ws.week:04.0f}{t.gps_ws.day:1.0f}:{t.gps_ws.seconds:06.0f}"
                for t in dset.used_transmission_time
            ],
        )
        dset.add_text(
            "toe_gpsweek",
            val=[
                f"{t.gps_ws.week:04.0f}{t.gps_ws.day:1.0f}:{t.gps_ws.seconds:06.0f}"
                for t in dset.used_toe
            ],
        )
        dset.add_float(
            "diff_trans_toe",
            val=(dset.used_transmission_time.gps.mjd - dset.used_toe.gps.mjd) *
            Unit.day2second,
            unit="second",
        )
        dset.add_float(
            "age_of_ephemeris",
            val=(dset.time.gps.mjd - dset.used_toe.gps.mjd) * Unit.day2second,
            unit="second",
        )

    # Select fields available in Dataset
    fields = get_existing_fields(dset, FIELDS)

    # Put together fields in an array as specified by the 'dtype' tuple list
    output_list = list(
        zip(*(get_field(dset, f.field, f.attrs, f.unit) for f in fields)))
    output_array = np.array(output_list,
                            dtype=[(f.name, f.dtype) for f in fields])

    # Write to disk
    header = get_header(
        fields,
        pgm_version=f"where {where.__version__}",
        run_by=util.get_user_info()["inst_abbreviation"]
        if "inst_abbreviation" in util.get_user_info() else "",
        summary="GNSS satellite position results",
    )
    np.savetxt(
        file_path,
        output_array,
        fmt=tuple(f.format for f in fields),
        header=header,
        delimiter="",
        encoding="utf8",
    )