def gnss_receiver_velocity_summary(dset: "Dataset") -> None:
    """Write Doppler estimated GNSS receiver velocity results to a text file


    Args:
        dset:  A dataset containing the data.
    """
    # File name generation
    file_path = config.files.path("output_receiver_velocity_summary", file_vars=dset.vars)

    # Handle GR-I: date field to dataset
    if "date" not in dset.fields:
        dset.add_text("date", val=[d.strftime("%Y/%m/%d %H:%M:%S") for d in dset.time.datetime])

    # Put together fields in an array as specified by the 'dtype' tuple list
    if config.tech.estimate_epochwise.bool:  # Epochwise estimation or over whole time period

        output_list = list()
        for epoch in dset.unique("time"):
            idx = dset.filter(time=epoch)

            # Append current epoch position solution to final output solution
            output_list.append(tuple([get_field(dset, f.field, f.attrs, f.unit)[idx][0] for f in FIELDS]))

    else:
        # Get position solution for first observation
        idx = np.squeeze(np.array(np.nonzero(dset.time.gps.mjd)) == 0)  # first observation -> TODO: Better solution?
        output_list = [tuple([get_field(dset, f.field, f.attrs, f.unit)[idx][0] for f in FIELDS])]

    # define the output atrray
    output_array = np.array(output_list, dtype=[(f.name, f.dtype) for f in FIELDS])

    # Write to disk
    header = get_header(
        FIELDS,
        pgm_version=f"where {where.__version__}",
        run_by=util.get_user_info()["inst_abbreviation"] if "inst_abbreviation" in util.get_user_info() else "",
        summary="GNSS station velocity results",
    )

    np.savetxt(
        file_path,
        output_array,
        fmt=tuple(f.format for f in FIELDS),
        header=header,
        delimiter="",
        encoding="utf8",
    )
示例#2
0
def gnss_residual(dset: "Dataset") -> None:
    """Write GNSS post-fit residual results

    Args:
        dset:  A dataset containing the data.
    """

    file_path = config.files.path("output_residual", file_vars=dset.vars)

    # Add date field to dataset
    if "date" not in dset.fields:
        dset.add_text("date", val=[d.strftime("%Y/%m/%d %H:%M:%S") for d in dset.time.datetime])

    # Put together fields in an array as specified by the 'dtype' tuple list
    output_list = list(zip(*(get_field(dset, f.field, f.attrs, f.unit) for f in FIELDS)))
    output_array = np.array(output_list, dtype=[(f.name, f.dtype) for f in FIELDS])

    # Write to disk
    header = get_header(
        FIELDS,
        pgm_version=f"where {where.__version__}",
        run_by=util.get_user_info()["inst_abbreviation"] if "inst_abbreviation" in util.get_user_info() else "",
        summary="GNSS post-fit residual results",
    )
    np.savetxt(
        file_path,
        output_array,
        fmt=tuple(f.format for f in FIELDS),
        header="\n".join(header),
        delimiter="",
        encoding="utf8",
    )
示例#3
0
def calculate_solution(dset: "Dataset") -> None:
    """Write calculate solution results


    Args:
        dset:  A dataset containing the data.
    """
    file_path = config.files.path("output_calculate_solution",
                                  file_vars={
                                      **dset.vars,
                                      **dset.analysis
                                  })

    # Update WriterField depending on used pipeline
    fields_def = list(FIELDS)
    fields_def.append(
        WriterField(
            "residual_prefit",
            "residual_prefit",
            (),
            float,
            "%15.4f" if dset.vars["pipeline"] == "gnss_vel" else "%15.3f",
            15,
            "RESIDUAL",
            "m/s" if dset.vars["pipeline"] == "gnss_vel" else "meter",
            "Pre-fit residual",
        ))

    # Add date field to dataset
    if "date" not in dset.fields:
        dset.add_text(
            "date",
            val=[d.strftime("%Y/%m/%d %H:%M:%S") for d in dset.time.datetime],
            write_level="detail")

    # Select fields available in Dataset
    fields = get_existing_fields(dset, fields_def)

    # Put together fields in an array as specified by the 'dtype' tuple list
    output_list = list(
        zip(*(get_field(dset, f.field, f.attrs, f.unit) for f in fields)))
    output_array = np.array(output_list,
                            dtype=[(f.name, f.dtype) for f in fields])

    # Write to disk
    header = get_header(
        fields,
        pgm_version=f"where {where.__version__}",
        run_by=util.get_user_info()["inst_abbreviation"]
        if "inst_abbreviation" in util.get_user_info() else "",
        summary="Calculate solutions results",
    )
    np.savetxt(
        file_path,
        output_array,
        fmt=tuple(f.format for f in fields),
        header=header,
        delimiter="",
        encoding="utf8",
    )
示例#4
0
def rinex_nav_writer(dset: "Dataset") -> None:
    """Write selected RINEX navigation file observations

    Args:
        dset:   A dataset containing the data.
    """
    file_path = config.files.path(f"output_rinex_nav", file_vars={**dset.vars, **dset.analysis})

    # Add additional fields used by the writer
    dset.add_text("date", val=[d.strftime("%Y/%m/%d %H:%M:%S") for d in dset.time.datetime])
    dset.add_text(
        "time_gpsweek",
        val=[f"{t.gps.gps_ws.week:04.0f}{t.gps.gps_ws.day:1.0f}:{t.gps.gps_ws.seconds:06.0f}" for t in dset.time],
        write_level="detail",
    )
    dset.add_text(
        "trans_time_gpsweek",
        val=[
            f"{t.gps.gps_ws.week:04.0f}{t.gps.gps_ws.day:1.0f}:{t.gps.gps_ws.seconds:06.0f}"
            for t in dset.transmission_time
        ],
        write_level="detail",
    )
    dset.add_text(
        "toe_gpsweek",
        val=[f"{t.gps.gps_ws.week:04.0f}{t.gps.gps_ws.day:1.0f}:{t.gps.gps_ws.seconds:06.0f}" for t in dset.toe],
        write_level="detail",
    )

    dset.add_float(
        "diff_trans_toe", 
        val=(dset.transmission_time.mjd - dset.toe.mjd) * Unit.day2second, 
        write_level="detail",
    )

    # Select fields available in Dataset (e.g. DVS and SHS fields are only given for Galileo)
    fields = get_existing_fields(dset, FIELDS)

    # List epochs ordered by satellites
    idx = np.concatenate([np.where(dset.filter(satellite=s))[0] for s in dset.unique("satellite")])

    # Put together fields in an array as specified by the fields-tuple
    output_list = list(zip(*(get_field(dset, f.field, f.attrs, f.unit) for f in fields)))
    output_array = np.array(output_list, dtype=[(f.name, f.dtype) for f in fields])[idx]

    # Write to disk
    header = get_header(
        fields,
        pgm_version=f"where {where.__version__}",
        run_by=util.get_user_info()["inst_abbreviation"] if "inst_abbreviation" in util.get_user_info() else "",
        summary="RINEX navigation file analysis results",
    )
    np.savetxt(
        file_path,
        output_array,
        fmt=tuple(f.format for f in fields),
        header=header,
        delimiter="",
        encoding="utf8",
    )
def gnss_position(dset: "Dataset") -> None:
    """Write GNSS position results


    Args:
        dset:  A dataset containing the data.
    """
    file_path = config.files.path("output_dilution_of_precision",
                                  file_vars=dset.vars)

    # Add date and DOPs fields to dataset
    if "date" not in dset.fields:
        dset.add_text(
            "date",
            val=[d.strftime("%Y/%m/%d %H:%M:%S") for d in dset.time.datetime],
            unit="YYYY/MM/DD hh:mm:ss")

    if "pdop" not in dset.fields:
        gnss_dop(dset)

    if "num_satellite_used" not in dset.fields:
        dset.add_float(
            "num_satellite_used",
            val=gnss.get_number_of_satellites(dset.system, dset.satellite,
                                              dset.time),
        )

    # Put together fields in an array as specified by the 'dtype' tuple list
    output_list = list()
    for epoch in dset.unique("time"):
        idx = dset.filter(time=epoch)

        # Append current epoch position solution to final output solution
        output_list.append(
            tuple([
                get_field(dset, f.field, f.attrs, f.unit)[idx][0]
                for f in FIELDS
            ]))

    output_array = np.array(output_list,
                            dtype=[(f.name, f.dtype) for f in FIELDS])

    # Write to disk
    header = get_header(
        FIELDS,
        pgm_version=f"where {where.__version__}",
        run_by=util.get_user_info()["inst_abbreviation"]
        if "inst_abbreviation" in util.get_user_info() else "",
        summary="GNSS dilution of precision results",
    )
    np.savetxt(
        file_path,
        output_array,
        fmt=tuple(f.format for f in FIELDS),
        header="\n".join(header),
        delimiter="",
        encoding="utf8",
    )
示例#6
0
def gnss_residual(dset: "Dataset") -> None:
    """Write GNSS post-fit residual results

    Args:
        dset:  A dataset containing the data.
    """

    file_path = config.files.path("output_residual", file_vars=dset.vars)

    # Update WriterField depending on used pipeline
    fields_def = list(FIELDS)
    fields_def.append(
        WriterField(
            "residual",
            "residual",
            (),
            float,
            "%15.4f" if dset.vars["pipeline"] == "gnss_vel" else "%15.3f",
            15,
            "RESIDUAL",
            "meter/second" if dset.vars["pipeline"] == "gnss_vel" else "meter",
            "Post-fit residual",
        ))

    # Add date field to dataset
    if "date" not in dset.fields:
        dset.add_text(
            "date",
            val=[d.strftime("%Y/%m/%d %H:%M:%S") for d in dset.time.datetime])

    # Put together fields in an array as specified by the 'dtype' tuple list
    output_list = list(
        zip(*(get_field(dset, f.field, f.attrs, f.unit) for f in fields_def)))
    output_array = np.array(output_list,
                            dtype=[(f.name, f.dtype) for f in fields_def])

    # Write to disk
    header = get_header(
        fields_def,
        pgm_version=f"where {where.__version__}",
        run_by=util.get_user_info()["inst_abbreviation"]
        if "inst_abbreviation" in util.get_user_info() else "",
        summary="GNSS post-fit residual results",
    )

    np.savetxt(
        file_path,
        output_array,
        fmt=tuple(f.format for f in fields_def),
        header=header,
        delimiter="",
        encoding="utf8",
    )
示例#7
0
def sisre_writer(dset: "Dataset") -> None:
    """Write SISRE analysis results

    Args:
        dset:   A dataset containing the data.
    """
    # Add additional fields used by the writer
    dset.add_text(
        "date",
        val=[d.strftime("%Y/%m/%d %H:%M:%S") for d in dset.time.datetime])
    dset.add_text(
        "time_gpsweek",
        val=[
            f"{t.gps_ws.week:04.0f}{t.gps_ws.day:1.0f}:{t.gps_ws.seconds:06.0f}"
            for t in dset.time
        ])
    dset.add_text(
        "trans_time_gpsweek",
        val=[
            f"{t.gps_ws.week:04.0f}{t.gps_ws.day:1.0f}:{t.gps_ws.seconds:06.0f}"
            for t in dset.used_transmission_time
        ],
    )
    dset.add_text(
        "toe_gpsweek",
        val=[
            f"{t.gps_ws.week:04.0f}{t.gps_ws.day:1.0f}:{t.gps_ws.seconds:06.0f}"
            for t in dset.used_toe
        ],
    )
    # dset.add_float("diff_time_trans", val=(dset.time.mjd - dset.used_transmission_time.mjd) * Unit.day2second, Unit="second")
    dset.add_float("dalong_track",
                   val=dset.orb_diff.acr.along,
                   unit=dset.unit("orb_diff.acr.along"))
    dset.add_float("dcross_track",
                   val=dset.orb_diff.acr.cross,
                   unit=dset.unit("orb_diff.acr.cross"))
    dset.add_float("dradial",
                   val=dset.orb_diff.acr.radial,
                   unit=dset.unit("orb_diff.acr.radial"))

    ## Add 'detail' fields used by the writer
    # write_level = config.tech.get("write_level", default="operational").as_enum("write_level")
    # if write_level <= enums.get_value("write_level", "detail"):
    #    FIELDS += (
    #        WriterField("clk_brdc_com", "clk_brdc_com", (), float, "%16.4f", 16, "CLK_BRDC", ""),
    #        WriterField("clk_precise_com", "clk_precise_com", (), float, "%16.4f", 16, "CLK_PRECISE", ""),
    #        WriterField("bias_brdc", "bias_brdc", (), float, "%10.4f", 10, "B_BRDC", ""),
    #        WriterField("bias_precise", "bias_precise", (), float, "%10.4f", 10, "B_PREC", ""),
    #        WriterField("dt_mean", "dt_mean", (), float, "%10.4f", 10, "dt_MEAN", ""),
    #    )
    #
    #    dset.add_float("dt_mean", val=dset.clk_diff - dset.clk_diff_with_dt_mean, unit="meter")

    # List epochs ordered by satellites
    idx = np.concatenate([
        np.where(dset.filter(satellite=s))[0] for s in dset.unique("satellite")
    ])

    # Put together fields in an array as specified by the fields-tuple
    output_list = list(
        zip(*(get_field(dset, f.field, f.attrs, f.unit) for f in FIELDS)))
    output_array = np.array(output_list,
                            dtype=[(f.name, f.dtype) for f in FIELDS])[idx]

    # Write to disk
    # NOTE: np.savetxt is used instead of having a loop over all observation epochs, because the performance is better.
    file_path = config.files.path(f"output_sisre_{dset.vars['label']}",
                                  file_vars={
                                      **dset.vars,
                                      **dset.analysis
                                  })
    np.savetxt(
        file_path,
        output_array,
        fmt=tuple(f.format for f in FIELDS),
        header="\n".join(_get_header(dset)),
        delimiter="",
        encoding="utf8",
    )

    # Append SISRE output path to SISRE output buffer file
    if config.tech.sisre_writer.write_buffer_file.bool:
        sisre_output_buffer.sisre_output_buffer(dset)
示例#8
0
def gnss_velocity(dset: "Dataset") -> None:
    """Write GNSS velocity results


    Args:
        dset:  A dataset containing the data.
    """
    file_path = config.files.path("output_velocity", file_vars=dset.vars)

    # Add date field to dataset
    if "date" not in dset.fields:
        dset.add_text(
            "date",
            val=[d.strftime("%Y/%m/%d %H:%M:%S") for d in dset.time.datetime])

    # Add velocity in topocentric coordinates
    lat, lon, height = dset.site_pos.pos.llh.T
    vel_enu = np.squeeze(
        rotation.trs2enu(lat, lon) @ dset.site_vel[:, :, None])
    dset.add_float("site_vel_east", val=vel_enu[:, 0], unit="meter/second")
    dset.add_float("site_vel_north", val=vel_enu[:, 1], unit="meter/second")
    dset.add_float("site_vel_up", val=vel_enu[:, 2], unit="meter/second")

    # Add horizontal velocity (HV), vertical velocity (VV) and 3D velocity
    dset.add_float("site_vel_h",
                   val=np.sqrt(vel_enu[:, 0]**2 + vel_enu[:, 1]**2),
                   unit="meter/second")
    dset.add_float("site_vel_v",
                   val=np.absolute(vel_enu[:, 2]),
                   unit="meter/second")
    dset.add_float(
        "site_vel_3d",
        val=np.sqrt(dset.site_vel[:, 0]**2 + dset.site_vel[:, 1]**2 +
                    dset.site_vel[:, 2]**2),
        #val=np.sqrt(vel_enu[:,0] ** 2 + vel_enu[:,1] ** 2 + vel_enu[:,2] ** 2),
        unit="meter/second",
    )

    # Add site velocity and standard deviation of site velocity coordinates
    dset.add_float("site_vel_x", val=dset.site_vel[:, 0], unit="meter/second")
    dset.add_float("site_vel_y", val=dset.site_vel[:, 1], unit="meter/second")
    dset.add_float("site_vel_z", val=dset.site_vel[:, 2], unit="meter/second")
    dset.add_float("site_vel_sigma_x",
                   val=np.sqrt(dset.estimate_cov_site_vel_xx),
                   unit="meter/second")
    dset.add_float("site_vel_sigma_y",
                   val=np.sqrt(dset.estimate_cov_site_vel_yy),
                   unit="meter/second")
    dset.add_float("site_vel_sigma_z",
                   val=np.sqrt(dset.estimate_cov_site_vel_zz),
                   unit="meter/second")

    # Put together fields in an array as specified by the 'dtype' tuple list
    if config.tech.estimate_epochwise.bool:  # Epochwise estimation or over whole time period

        output_list = list()
        for epoch in dset.unique("time"):
            idx = dset.filter(time=epoch)

            # Append current epoch position solution to final output solution
            output_list.append(
                tuple([
                    get_field(dset, f.field, f.attrs, f.unit)[idx][0]
                    for f in FIELDS
                ]))

    else:
        # Get position solution for first observation
        idx = np.squeeze(np.array(np.nonzero(dset.time.gps.mjd)) ==
                         0)  # first observation -> TODO: Better solution?
        output_list = [
            tuple([
                get_field(dset, idx, f.field, f.attrs, f.unit)[idx][0]
                for f in FIELDS
            ])
        ]

    output_array = np.array(output_list,
                            dtype=[(f.name, f.dtype) for f in FIELDS])

    # Write to disk
    header = get_header(
        FIELDS,
        pgm_version=f"where {where.__version__}",
        run_by=util.get_user_info()["inst_abbreviation"]
        if "inst_abbreviation" in util.get_user_info() else "",
        summary="GNSS velocity results",
    )
    np.savetxt(
        file_path,
        output_array,
        fmt=tuple(f.format for f in FIELDS),
        header=header,
        delimiter="",
        encoding="utf8",
    )
示例#9
0
def gnss_position(dset: "Dataset") -> None:
    """Write GNSS position results


    Args:
        dset:  A dataset containing the data.
    """
    file_path = config.files.path("output_position", file_vars=dset.vars)

    # Add date field to dataset
    if "date" not in dset.fields:
        dset.add_text("date", val=[d.strftime("%Y/%m/%d %H:%M:%S") for d in dset.time.datetime])

    # Add ENU position to dataset
    ref_pos = position.Position(
        val=np.array([dset.meta["pos_x"], dset.meta["pos_y"], dset.meta["pos_z"]]), system="trs"
    )
    enu = (dset.site_pos.trs.pos - ref_pos).enu
    dset.add_float("site_pos_vs_ref_east", val=enu.east, unit="meter")
    dset.add_float("site_pos_vs_ref_north", val=enu.north, unit="meter")
    dset.add_float("site_pos_vs_ref_up", val=enu.up, unit="meter")

    # Add HPE and VPE to dataset
    dset.add_float("hpe", val=np.sqrt(enu.east ** 2 + enu.north ** 2), unit="meter")
    dset.add_float("vpe", val=np.absolute(enu.up), unit="meter")

    # Add standard deviation of site position coordinates
    dset.add_float("site_pos_sigma_x", val=np.sqrt(dset.estimate_cov_site_pos_xx), unit="meter")
    dset.add_float("site_pos_sigma_y", val=np.sqrt(dset.estimate_cov_site_pos_yy), unit="meter")
    dset.add_float("site_pos_sigma_z", val=np.sqrt(dset.estimate_cov_site_pos_zz), unit="meter")

    # Put together fields in an array as specified by the 'dtype' tuple list
    if config.tech.estimate_epochwise.bool:  # Epochwise estimation or over whole time period

        output_list = list()
        for epoch in dset.unique("time"):
            idx = dset.filter(time=epoch)

            # Append current epoch position solution to final output solution
            output_list.append(tuple([get_field(dset, f.field, f.attrs, f.unit)[idx][0] for f in FIELDS]))

    else:
        # Get position solution for first observation
        idx = np.squeeze(np.array(np.nonzero(dset.time.gps.mjd)) == 0)  # first observation -> TODO: Better solution?
        output_list = [tuple([get_field(dset, idx, f.field, f.attrs, f.unit)[idx][0] for f in FIELDS])]

    output_array = np.array(output_list, dtype=[(f.name, f.dtype) for f in FIELDS])

    # Write to disk
    header = get_header(
        FIELDS,
        pgm_version=f"where {where.__version__}",
        run_by=util.get_user_info()["inst_abbreviation"] if "inst_abbreviation" in util.get_user_info() else "",
        summary="GNSS position results",
    )
    np.savetxt(
        file_path,
        output_array,
        fmt=tuple(f.format for f in FIELDS),
        header="\n".join(header),
        delimiter="",
        encoding="utf8",
    )
示例#10
0
def gnss_satellite_position(dset: "Dataset") -> None:
    """Write GNSS satellite position results


    Args:
        dset:  A dataset containing the data.
    """
    file_path = config.files.path("output_satellite_position",
                                  file_vars={
                                      **dset.vars,
                                      **dset.analysis
                                  })

    # Add date field to dataset
    if "date" not in dset.fields:
        dset.add_text(
            "date",
            val=[d.strftime("%Y/%m/%d %H:%M:%S") for d in dset.time.datetime])

    # Add fields in case of broadcast ephemeris
    if config.tech.apriori_orbit.str == "broadcast":
        dset.add_text(
            "trans_time_gpsweek",
            val=[
                f"{t.gps_ws.week:04.0f}{t.gps_ws.day:1.0f}:{t.gps_ws.seconds:06.0f}"
                for t in dset.used_transmission_time
            ],
        )
        dset.add_text(
            "toe_gpsweek",
            val=[
                f"{t.gps_ws.week:04.0f}{t.gps_ws.day:1.0f}:{t.gps_ws.seconds:06.0f}"
                for t in dset.used_toe
            ],
        )
        dset.add_float(
            "diff_trans_toe",
            val=(dset.used_transmission_time.gps.mjd - dset.used_toe.gps.mjd) *
            Unit.day2second,
            unit="second",
        )
        dset.add_float(
            "age_of_ephemeris",
            val=(dset.time.gps.mjd - dset.used_toe.gps.mjd) * Unit.day2second,
            unit="second",
        )

    # Select fields available in Dataset
    fields = get_existing_fields(dset, FIELDS)

    # Put together fields in an array as specified by the 'dtype' tuple list
    output_list = list(
        zip(*(get_field(dset, f.field, f.attrs, f.unit) for f in fields)))
    output_array = np.array(output_list,
                            dtype=[(f.name, f.dtype) for f in fields])

    # Write to disk
    header = get_header(
        fields,
        pgm_version=f"where {where.__version__}",
        run_by=util.get_user_info()["inst_abbreviation"]
        if "inst_abbreviation" in util.get_user_info() else "",
        summary="GNSS satellite position results",
    )
    np.savetxt(
        file_path,
        output_array,
        fmt=tuple(f.format for f in fields),
        header=header,
        delimiter="",
        encoding="utf8",
    )