def test_update_kwargs_for_svp(self):
        empty_fq = fqpr_generation.Fqpr()
        sets = update_kwargs_for_svp(self.expected_output, empty_fq, [self.testsv])

        assert sets['args'] == [empty_fq, [self.testsv]]
        assert sets['tooltip_text'] == self.testsv
        assert sets['input_files'] == [self.testsv]
        empty_fq.close()
 def test_build_svp_action(self):
     empty_fq = fqpr_generation.Fqpr()
     act = build_svp_action(self.expected_output, empty_fq, [self.testsv])
     assert act.args == [empty_fq, [self.testsv]]
     assert act.kwargs is None
     assert act.input_files == [self.testsv]
     assert act.action_type == 'svp'
     assert act.priority == 3
     assert act.is_running is False
     empty_fq.close()
Esempio n. 3
0
def build_georef_correct_comparison(dset='realdual',
                                    vert_ref='waterline',
                                    datum='NAD83'):
    """
   Generate mine/kongsberg xyz88 data set from the test dataset.

   Will run using the 'realdualhead' dataset included in this file or a small synthetic test dataset with meaningless
   numbers that I've just come up with.

   Parameters
   ----------
   dset: str, specify which dataset you want to use
   vert_ref: str, vertical reference, one of ['waterline', 'vessel', 'ellipse']
   datum: str, datum identifier, anything recognized by pyproj CRS

   """

    if dset == 'real':
        synth_dat = RealFqpr()
        synth = load_dataset(synth_dat)
    elif dset == 'realdual':
        synth_dat = RealDualheadFqpr()
        synth = load_dataset(synth_dat)
    else:
        raise NotImplementedError('mode not recognized')

    fq = fqpr_generation.Fqpr(synth)
    fq.logger = logging.getLogger()
    fq.logger.setLevel(logging.INFO)
    fq.read_from_source()
    fq.get_orientation_vectors(dump_data=False, initial_interp=False)
    fq.get_beam_pointing_vectors(dump_data=False)
    fq.sv_correct(dump_data=False)
    fq.construct_crs(datum=datum, projected=True, vert_ref=vert_ref)
    fq.georef_xyz(dump_data=False)

    secs = fq.return_sector_ids()
    tstmp = list(fq.intermediate_dat[secs[0]]['xyz'].keys())[0]

    loaded_xyz_data = [
        fq.intermediate_dat[s]['xyz'][tstmp][0][0].result()
        for s in fq.return_sector_ids()
    ]
    loaded_sv_data = [
        fq.intermediate_dat[s]['sv_corr'][tstmp][0][0].result()
        for s in fq.return_sector_ids()
    ]
    loaded_ang_data = [
        np.rad2deg(fq.intermediate_dat[s]['bpv'][tstmp][0][0].result()[1])
        for s in fq.return_sector_ids()
    ]

    fq.intermediate_dat = {}

    if dset == 'realdual':
        loaded_data = [[
            loaded_sv_data[i][0].values[0], loaded_sv_data[i][1].values[0],
            loaded_xyz_data[i][2].values[0], loaded_ang_data[i].values[0]
        ] for i in range(int(len(loaded_xyz_data)))]

        # apply waterline, z lever arm and z phase center offsets to get at the actual waterline rel value
        depth_wline_addtl = [
            -float(fq.multibeam.xyzrph['waterline'][tstmp]) +
            float(fq.multibeam.xyzrph['tx_port_z'][tstmp]) +
            float(fq.multibeam.xyzrph['tx_port_z_1'][tstmp]),
            -float(fq.multibeam.xyzrph['waterline'][tstmp]) +
            float(fq.multibeam.xyzrph['tx_port_z'][tstmp]) +
            float(fq.multibeam.xyzrph['tx_port_z_1'][tstmp]),
            -float(fq.multibeam.xyzrph['waterline'][tstmp]) +
            float(fq.multibeam.xyzrph['tx_stbd_z'][tstmp]) +
            float(fq.multibeam.xyzrph['tx_stbd_z_1'][tstmp]),
            -float(fq.multibeam.xyzrph['waterline'][tstmp]) +
            float(fq.multibeam.xyzrph['tx_stbd_z'][tstmp]) +
            float(fq.multibeam.xyzrph['tx_stbd_z_1'][tstmp])
        ]

        # kongsberg angles are rel horiz, here is what I came up with to get vert rel angles (to match kluster)
        xyz_88_corrangle = [
            90 - np.array(synth_dat.xyz88_corrangle[0]),
            90 - np.array(synth_dat.xyz88_corrangle[1]),
            np.array(synth_dat.xyz88_corrangle[2]) - 90,
            np.array(synth_dat.xyz88_corrangle[3]) - 90
        ]
        xyz88_data = [[
            np.array(synth_dat.xyz88_alongtrack[i]),
            np.array(synth_dat.xyz88_acrosstrack[i]),
            np.array(synth_dat.xyz88_depth[i]) + depth_wline_addtl[i],
            xyz_88_corrangle[i]
        ] for i in range(int(len(synth_dat.xyz88_depth)))]

    elif dset == 'real':
        loaded_data = []
        for tme in [0, 1]:
            for secs in [[0, 2, 4], [1, 3, 5]]:
                dpth = np.concatenate([
                    loaded_xyz_data[secs[0]][2].values[tme]
                    [~np.isnan(loaded_xyz_data[secs[0]][2].values[tme])],
                    loaded_xyz_data[secs[1]][2].values[tme]
                    [~np.isnan(loaded_xyz_data[secs[1]][2].values[tme])],
                    loaded_xyz_data[secs[2]][2].values[tme]
                    [~np.isnan(loaded_xyz_data[secs[2]][2].values[tme])]
                ])
                along = np.concatenate([
                    loaded_sv_data[secs[0]][0].values[tme]
                    [~np.isnan(loaded_sv_data[secs[0]][0].values[tme])],
                    loaded_sv_data[secs[1]][0].values[tme]
                    [~np.isnan(loaded_sv_data[secs[1]][0].values[tme])],
                    loaded_sv_data[secs[2]][0].values[tme]
                    [~np.isnan(loaded_sv_data[secs[2]][0].values[tme])]
                ])
                across = np.concatenate([
                    loaded_sv_data[secs[0]][1].values[tme]
                    [~np.isnan(loaded_sv_data[secs[0]][1].values[tme])],
                    loaded_sv_data[secs[1]][1].values[tme]
                    [~np.isnan(loaded_sv_data[secs[1]][1].values[tme])],
                    loaded_sv_data[secs[2]][1].values[tme]
                    [~np.isnan(loaded_sv_data[secs[2]][1].values[tme])]
                ])
                angle = np.concatenate([
                    loaded_ang_data[secs[0]].values[tme]
                    [~np.isnan(loaded_ang_data[secs[0]].values[tme])],
                    loaded_ang_data[secs[1]].values[tme]
                    [~np.isnan(loaded_ang_data[secs[1]].values[tme])],
                    loaded_ang_data[secs[2]].values[tme]
                    [~np.isnan(loaded_ang_data[secs[2]].values[tme])]
                ])
                loaded_data.append([along, across, dpth, angle])

        # in the future, include sec index to get the additional phase center offsets included here
        depth_wline_addtl = -float(
            fq.multibeam.xyzrph['waterline'][tstmp]) + float(
                fq.multibeam.xyzrph['tx_z'][tstmp])

        # kongsberg angles are rel horiz, here is what I came up with to get vert rel angles (to match kluster)
        xyz_88_corrangle = []
        for ang in synth_dat.xyz88_corrangle:
            ang = 90 - np.array(ang)
            ang[np.argmin(ang):] = ang[np.argmin(ang):] * -1
            xyz_88_corrangle.append(ang)

        xyz88_data = [[
            np.array(synth_dat.xyz88_alongtrack[i]),
            np.array(synth_dat.xyz88_acrosstrack[i]),
            np.array(synth_dat.xyz88_depth[i]) + depth_wline_addtl,
            xyz_88_corrangle[i]
        ] for i in range(int(len(synth_dat.xyz88_depth)))]

    else:
        raise NotImplementedError(
            'only real and realdual are currently implemented')

    fq.close()
    return loaded_data, xyz88_data
Esempio n. 4
0
def georef_xyz(dset='realdualhead'):
    """
    Automated test of fqpr_generation sv_correct

    Will run using the 'real' dataset or 'realdualhead' included in the test_datasets file.

    No current support for the synthetic dataset, need to look at adding that in.  I've yet to find a reason to do so
    though, now that I have the real pings.

    Parameters
    ----------
    dset: str, specify which dataset you want to use, one of 'real' and 'realdualhead'
    """

    vert_ref = 'waterline'
    datum = 'NAD83'

    if dset == 'real':
        synth = load_dataset(RealFqpr())
        expected_x = [
            np.array([539017.745, 539018.535, 539019.322], dtype=np.float64)
        ]
        expected_y = [
            np.array([5292788.295, 5292789.346, 5292790.396], dtype=np.float64)
        ]
        expected_z = [np.array([91.789, 91.699, 91.605], dtype=np.float32)]
    elif dset == 'realdualhead':
        synth = load_dataset(RealDualheadFqpr())
        expected_x = [
            np.array([492984.906, 492984.867, 492984.787], dtype=np.float64),
            np.array([492943.083, 492942.971, 492942.859], dtype=np.float64)
        ]
        expected_y = [
            np.array([3365068.225, 3365068.25, 3365068.305], dtype=np.float64),
            np.array([3365096.742, 3365096.82, 3365096.898], dtype=np.float64)
        ]
        expected_z = [
            np.array([22.087, 22.124, 22.141], dtype=np.float32),
            np.array([22.692, 22.704, 22.714], dtype=np.float32)
        ]
    else:
        raise NotImplementedError('mode not recognized')

    fq = fqpr_generation.Fqpr(synth)
    fq.logger = logging.getLogger()
    fq.logger.setLevel(logging.INFO)
    fq.read_from_source()
    fq.get_orientation_vectors(dump_data=False, initial_interp=False)
    fq.get_beam_pointing_vectors(dump_data=False)
    fq.sv_correct(dump_data=False)
    fq.construct_crs(datum=datum, projected=True, vert_ref=vert_ref)
    fq.georef_xyz(dump_data=False)

    # arrays of computed vectors
    sysid = [rp.system_identifier for rp in fq.multibeam.raw_ping]
    tstmp = list(fq.intermediate_dat[sysid[0]]['georef'].keys())[0]
    # since we kept data in memory, we can now get the result of get_orientation_vectors using result()
    loaded_data = [
        fq.intermediate_dat[s]['georef'][tstmp][0][0] for s in sysid
    ]

    x_data = [ld[0].isel(time=0).values[0:3] for ld in loaded_data]
    y_data = [ld[1].isel(time=0).values[0:3] for ld in loaded_data]
    z_data = [ld[2].isel(time=0).values[0:3] for ld in loaded_data]

    print('GEOREF {}'.format(dset))
    print([x for y in x_data for x in y.flatten()])
    print([x for y in y_data for x in y.flatten()])
    print([x for y in z_data for x in y.flatten()])

    # easting
    assert np.array_equal(x_data, expected_x)

    # northing
    assert np.array_equal(y_data, expected_y)

    # depth
    assert np.array_equal(z_data, expected_z)

    fq.close()
    print('Passed: georef_xyz')
Esempio n. 5
0
def sv_correct(dset='realdualhead'):
    """
    Automated test of fqpr_generation sv_correct

    Will run using the 'real' dataset or 'realdualhead' included in the test_datasets file.

    No current support for the synthetic dataset, need to look at adding that in.  I've yet to find a reason to do so
    though, now that I have the real pings.

    Parameters
    ----------
    dset: str, specify which dataset you want to use, one of 'real' and 'realdualhead'
    """

    if dset == 'real':
        synth = load_dataset(RealFqpr())
        expected_x = [np.array([-3.419, -3.406, -3.392])]
        expected_y = [np.array([-232.877, -231.562, -230.249])]
        expected_z = [np.array([91.139, 91.049, 90.955])]
    elif dset == 'realdualhead':
        synth = load_dataset(RealDualheadFqpr())
        expected_x = [
            np.array([0.692, 0.693, 0.693]),
            np.array([0.567, 0.565, 0.564])
        ]
        expected_y = [
            np.array([-59.992, -59.945, -59.848]),
            np.array([-9.351, -9.215, -9.078])
        ]
        expected_z = [
            np.array([18.305, 18.342, 18.359]),
            np.array([18.861, 18.873, 18.883])
        ]
    else:
        raise NotImplementedError('mode not recognized')

    fq = fqpr_generation.Fqpr(synth)
    fq.logger = logging.getLogger()
    fq.logger.setLevel(logging.INFO)
    fq.read_from_source()
    fq.get_orientation_vectors(dump_data=False, initial_interp=False)
    fq.get_beam_pointing_vectors(dump_data=False)
    fq.sv_correct(dump_data=False)

    # arrays of computed vectors
    sysid = [rp.system_identifier for rp in fq.multibeam.raw_ping]
    tstmp = list(fq.intermediate_dat[sysid[0]]['sv_corr'].keys())[0]
    # since we kept data in memory, we can now get the result of get_orientation_vectors using result()
    loaded_data = [
        fq.intermediate_dat[s]['sv_corr'][tstmp][0][0] for s in sysid
    ]

    x_data = [ld[0].isel(time=0).values[0:3] for ld in loaded_data]
    y_data = [ld[1].isel(time=0).values[0:3] for ld in loaded_data]
    z_data = [ld[2].isel(time=0).values[0:3] for ld in loaded_data]

    print('SVCORR {}'.format(dset))
    print([x for y in x_data for x in y.flatten()])
    print([x for y in y_data for x in y.flatten()])
    print([x for y in z_data for x in y.flatten()])

    # forward offset check
    assert np.array_equal(x_data, expected_x)

    # acrosstrack offset check
    assert np.array_equal(y_data, expected_y)

    # depth offset check
    assert np.array_equal(z_data, expected_z)

    fq.close()
    print('Passed: sv_correct')
Esempio n. 6
0
def build_beam_pointing_vector(dset='realdualhead'):
    """
    Automated test of fqpr_generation build_beam_pointing_vector

    Will run using the 'real' dataset or 'realdualhead' included in the test_datasets file.

    No current support for the synthetic dataset, need to look at adding that in.  I've yet to find a reason to do so
    though, now that I have the real pings.

    Parameters
    ----------
    dset: str, specify which dataset you want to use, one of 'real' and 'realdualhead'
    """

    if dset == 'real':
        synth = load_dataset(RealFqpr())
        expected_ba = [
            np.array([4.697702878191307, 4.697679369354361, 4.697655798111743])
        ]
        expected_bda = [
            np.array(
                [1.209080677036444, 1.2074367547912856, 1.2057926824074374])
        ]
    elif dset == 'realdualhead':
        synth = load_dataset(RealDualheadFqpr())
        expected_ba = [
            np.array(
                [4.7144694193229295, 4.714486234983295, 4.714503034301336]),
            np.array([4.72527541256665, 4.725306685935214, 4.725337688174256])
        ]
        expected_bda = [
            np.array(
                [1.2049043892451596, 1.20385629874863, 1.2028083855561609]),
            np.array(
                [0.5239366688735714, 0.5181768253459791, 0.5124169874635531])
        ]
    else:
        raise NotImplementedError('mode not recognized')

    fq = fqpr_generation.Fqpr(synth)
    fq.logger = logging.getLogger()
    fq.logger.setLevel(logging.INFO)
    fq.read_from_source()
    fq.get_orientation_vectors(dump_data=False, initial_interp=False)
    fq.get_beam_pointing_vectors(dump_data=False)

    # arrays of computed vectors
    sysid = [rp.system_identifier for rp in fq.multibeam.raw_ping]
    tstmp = list(fq.intermediate_dat[sysid[0]]['bpv'].keys())[0]
    # since we kept data in memory, we can now get the result of get_orientation_vectors using result()
    loaded_data = [fq.intermediate_dat[s]['bpv'][tstmp][0][0] for s in sysid]

    ba_data = [ld[0].isel(time=0).values[0:3] for ld in loaded_data]
    bda_data = [ld[1].isel(time=0).values[0:3] for ld in loaded_data]

    print('BEAMPOINTING {}'.format(dset))
    print([x for y in ba_data for x in y.flatten()])
    print([x for y in bda_data for x in y.flatten()])

    # beam azimuth check
    assert np.array_equal(ba_data, expected_ba)

    # beam depression angle check
    assert np.array_equal(bda_data, expected_bda)

    fq.close()
    print('Passed: build_beam_pointing_vector')
Esempio n. 7
0
def get_orientation_vectors(dset='realdualhead'):
    """
    Automated test of fqpr_generation get_orientation_vectors

    Will run using the 'real' dataset or 'realdualhead' included in the test_datasets file.

    No current support for the synthetic dataset, need to look at adding that in.  I've yet to find a reason to do so
    though, now that I have the real pings.

    Parameters
    ----------
    dset: str, specify which dataset you want to use, one of 'real' and 'realdualhead'
    """

    if dset == 'real':
        synth = load_dataset(RealFqpr())
        expected_tx = [
            np.array([
                0.6136555921172974, -0.7895255928982701, 0.008726535498373935
            ])
        ]
        expected_rx = [
            np.array(
                [0.7834063072490661, 0.6195440454987808, -0.04939365798750035])
        ]
    elif dset == 'realdualhead':
        synth = load_dataset(RealDualheadFqpr())
        expected_tx = [
            np.array([
                -0.8173967230596009, -0.5756459946918305, -0.022232663846213512
            ]),
            np.array([
                -0.818098137098556, -0.5749317404941526, -0.013000579640495315
            ])
        ]
        expected_rx = [
            np.array(
                [0.5707251056249292, -0.8178104883650188,
                 0.07388380848347877]),
            np.array([
                0.5752302545527056, -0.8157217016726686, -0.060896177270015645
            ])
        ]
    else:
        raise NotImplementedError('mode not recognized')

    fq = fqpr_generation.Fqpr(synth)
    fq.logger = logging.getLogger()
    fq.logger.setLevel(logging.INFO)
    fq.read_from_source()
    # dump_data/delete_futs set the workflow to either keeping everything in memory after completion (False) or writing
    #     data to disk (both are True).  Could probably condense these arguments to one argument in the future.
    fq.get_orientation_vectors(dump_data=False, initial_interp=False)

    # arrays of computed vectors
    sysid = [rp.system_identifier for rp in fq.multibeam.raw_ping]
    tstmp = list(fq.intermediate_dat[sysid[0]]['orientation'].keys())[0]
    # since we kept data in memory, we can now get the result of get_orientation_vectors using result()
    loaded_data = [
        fq.intermediate_dat[s]['orientation'][tstmp][0][0] for s in sysid
    ]

    # we examine the tx vector for each sector (not beam based) and the rx vector for each sector's first beam (rx
    #     vectors change per beam, as attitude changes based on beam traveltime)
    txvecdata = [ld[0].values[0][0] for ld in loaded_data]
    rxvecdata = [ld[1].values[0][0] for ld in loaded_data]

    print('ORIENTATION {}'.format(dset))
    print([x for y in txvecdata for x in y.flatten()])
    print([x for y in rxvecdata for x in y.flatten()])

    # check for the expected tx orientation vectors
    assert np.array_equal(expected_tx, txvecdata)

    # check for the expected rx orientation vectors
    assert np.array_equal(expected_rx, rxvecdata)

    fq.close()
    print('Passed: get_orientation_vectors')