Example #1
0
def calculate_solution(dset: "Dataset") -> None:
    """Write calculate solution results


    Args:
        dset:  A dataset containing the data.
    """
    file_path = config.files.path("output_calculate_solution",
                                  file_vars={
                                      **dset.vars,
                                      **dset.analysis
                                  })

    # Update WriterField depending on used pipeline
    fields_def = list(FIELDS)
    fields_def.append(
        WriterField(
            "residual_prefit",
            "residual_prefit",
            (),
            float,
            "%15.4f" if dset.vars["pipeline"] == "gnss_vel" else "%15.3f",
            15,
            "RESIDUAL",
            "m/s" if dset.vars["pipeline"] == "gnss_vel" else "meter",
            "Pre-fit residual",
        ))

    # Add date field to dataset
    if "date" not in dset.fields:
        dset.add_text(
            "date",
            val=[d.strftime("%Y/%m/%d %H:%M:%S") for d in dset.time.datetime],
            write_level="detail")

    # Select fields available in Dataset
    fields = get_existing_fields(dset, fields_def)

    # Put together fields in an array as specified by the 'dtype' tuple list
    output_list = list(
        zip(*(get_field(dset, f.field, f.attrs, f.unit) for f in fields)))
    output_array = np.array(output_list,
                            dtype=[(f.name, f.dtype) for f in fields])

    # Write to disk
    header = get_header(
        fields,
        pgm_version=f"where {where.__version__}",
        run_by=util.get_user_info()["inst_abbreviation"]
        if "inst_abbreviation" in util.get_user_info() else "",
        summary="Calculate solutions results",
    )
    np.savetxt(
        file_path,
        output_array,
        fmt=tuple(f.format for f in fields),
        header=header,
        delimiter="",
        encoding="utf8",
    )
Example #2
0
def rinex_nav_writer(dset: "Dataset") -> None:
    """Write selected RINEX navigation file observations

    Args:
        dset:   A dataset containing the data.
    """
    file_path = config.files.path(f"output_rinex_nav", file_vars={**dset.vars, **dset.analysis})

    # Add additional fields used by the writer
    dset.add_text("date", val=[d.strftime("%Y/%m/%d %H:%M:%S") for d in dset.time.datetime])
    dset.add_text(
        "time_gpsweek",
        val=[f"{t.gps.gps_ws.week:04.0f}{t.gps.gps_ws.day:1.0f}:{t.gps.gps_ws.seconds:06.0f}" for t in dset.time],
        write_level="detail",
    )
    dset.add_text(
        "trans_time_gpsweek",
        val=[
            f"{t.gps.gps_ws.week:04.0f}{t.gps.gps_ws.day:1.0f}:{t.gps.gps_ws.seconds:06.0f}"
            for t in dset.transmission_time
        ],
        write_level="detail",
    )
    dset.add_text(
        "toe_gpsweek",
        val=[f"{t.gps.gps_ws.week:04.0f}{t.gps.gps_ws.day:1.0f}:{t.gps.gps_ws.seconds:06.0f}" for t in dset.toe],
        write_level="detail",
    )

    dset.add_float(
        "diff_trans_toe", 
        val=(dset.transmission_time.mjd - dset.toe.mjd) * Unit.day2second, 
        write_level="detail",
    )

    # Select fields available in Dataset (e.g. DVS and SHS fields are only given for Galileo)
    fields = get_existing_fields(dset, FIELDS)

    # List epochs ordered by satellites
    idx = np.concatenate([np.where(dset.filter(satellite=s))[0] for s in dset.unique("satellite")])

    # Put together fields in an array as specified by the fields-tuple
    output_list = list(zip(*(get_field(dset, f.field, f.attrs, f.unit) for f in fields)))
    output_array = np.array(output_list, dtype=[(f.name, f.dtype) for f in fields])[idx]

    # Write to disk
    header = get_header(
        fields,
        pgm_version=f"where {where.__version__}",
        run_by=util.get_user_info()["inst_abbreviation"] if "inst_abbreviation" in util.get_user_info() else "",
        summary="RINEX navigation file analysis results",
    )
    np.savetxt(
        file_path,
        output_array,
        fmt=tuple(f.format for f in fields),
        header=header,
        delimiter="",
        encoding="utf8",
    )
Example #3
0
def calculate_solution(dset: "Dataset") -> None:
    """Write calculate solution results


    Args:
        dset:  A dataset containing the data.
    """
    file_path = config.files.path("output_calculate_solution",
                                  file_vars=dset.vars)

    # Add date field to dataset
    if "date" not in dset.fields:
        dset.add_text(
            "date",
            val=[d.strftime("%Y/%m/%d %H:%M:%S") for d in dset.time.datetime])

    # Select fields available in Dataset
    fields = get_existing_fields(dset, FIELDS)

    # Put together fields in an array as specified by the 'dtype' tuple list
    output_list = list(
        zip(*(get_field(dset, f.field, f.attrs, f.unit) for f in fields)))
    output_array = np.array(output_list,
                            dtype=[(f.name, f.dtype) for f in fields])

    # Write to disk
    header = get_header(
        fields,
        pgm_version=f"where {where.__version__}",
        run_by=util.get_user_info()["inst_abbreviation"]
        if "inst_abbreviation" in util.get_user_info() else "",
        summary="Calculate solutions results",
    )
    np.savetxt(
        file_path,
        output_array,
        fmt=tuple(f.format for f in fields),
        header="\n".join(header),
        delimiter="",
        encoding="utf8",
    )
Example #4
0
def gnss_satellite_position(dset: "Dataset") -> None:
    """Write GNSS satellite position results


    Args:
        dset:  A dataset containing the data.
    """
    file_path = config.files.path("output_satellite_position",
                                  file_vars={
                                      **dset.vars,
                                      **dset.analysis
                                  })

    # Add date field to dataset
    if "date" not in dset.fields:
        dset.add_text(
            "date",
            val=[d.strftime("%Y/%m/%d %H:%M:%S") for d in dset.time.datetime])

    # Add fields in case of broadcast ephemeris
    if config.tech.apriori_orbit.str == "broadcast":
        dset.add_text(
            "trans_time_gpsweek",
            val=[
                f"{t.gps_ws.week:04.0f}{t.gps_ws.day:1.0f}:{t.gps_ws.seconds:06.0f}"
                for t in dset.used_transmission_time
            ],
        )
        dset.add_text(
            "toe_gpsweek",
            val=[
                f"{t.gps_ws.week:04.0f}{t.gps_ws.day:1.0f}:{t.gps_ws.seconds:06.0f}"
                for t in dset.used_toe
            ],
        )
        dset.add_float(
            "diff_trans_toe",
            val=(dset.used_transmission_time.gps.mjd - dset.used_toe.gps.mjd) *
            Unit.day2second,
            unit="second",
        )
        dset.add_float(
            "age_of_ephemeris",
            val=(dset.time.gps.mjd - dset.used_toe.gps.mjd) * Unit.day2second,
            unit="second",
        )

    # Select fields available in Dataset
    fields = get_existing_fields(dset, FIELDS)

    # Put together fields in an array as specified by the 'dtype' tuple list
    output_list = list(
        zip(*(get_field(dset, f.field, f.attrs, f.unit) for f in fields)))
    output_array = np.array(output_list,
                            dtype=[(f.name, f.dtype) for f in fields])

    # Write to disk
    header = get_header(
        fields,
        pgm_version=f"where {where.__version__}",
        run_by=util.get_user_info()["inst_abbreviation"]
        if "inst_abbreviation" in util.get_user_info() else "",
        summary="GNSS satellite position results",
    )
    np.savetxt(
        file_path,
        output_array,
        fmt=tuple(f.format for f in fields),
        header=header,
        delimiter="",
        encoding="utf8",
    )