def test_read_by_baseline(mwax_corr_context: CorrelatorContext):
    ts = 0
    chan = 0
    data_by_bl = mwax_corr_context.read_by_baseline(ts, chan)
    data_by_f = mwax_corr_context.read_by_frequency(ts, chan)

    # Test length
    assert len(data_by_bl) == mwax_corr_context.num_timestep_coarse_chan_floats
    assert len(data_by_f) == mwax_corr_context.num_timestep_coarse_chan_floats

    # Sum them and compare
    sum_bl = np.sum(data_by_bl)
    sum_f = np.sum(data_by_f)
    assert sum_bl == sum_f
    print(f"\nCorrelator Sum by baseline  == {sum_bl}")
    print(f"Correlator Sum by frequency == {sum_f}")
Example #2
0
def mwax_context():
    return CorrelatorContext(
        prefix_test_data("1297526432_mwax/1297526432.metafits"),
        map(prefix_test_data, [
            "1297526432_mwax/1297526432_20210216160014_ch117_000.fits",
            "1297526432_mwax/1297526432_20210216160014_ch117_001.fits",
            "1297526432_mwax/1297526432_20210216160014_ch118_000.fits",
            "1297526432_mwax/1297526432_20210216160014_ch118_001.fits"
        ]))
def test_corr_get_fine_chan_freqs_hz_array(
        mwax_corr_context: CorrelatorContext):
    # Get the fine channel frequencies for the first coarse channel
    freq_list = mwax_corr_context.get_fine_chan_freqs_hz_array([
        0,
    ])
    assert len(freq_list) == 2
    assert freq_list[0] == 149120000.0
    assert freq_list[1] == 149760000.0
Example #4
0
def dump_mwalib(ant1, ant2, timestep_index, fine_chan_index, fine_chan_count, gpuboxfiles, metafits, out_filename):
    print("pymwalib:")
    print("======================================")
    with CorrelatorContext(metafits, gpuboxfiles) as cc:
        # Get data
        data = cc.read_by_baseline(timestep_index, coarse_chan_index)

        if out_filename is None:
            baseline_index = get_baseline_from_antennas(ant1, ant2, 128)

            # print details
            print(
                f"Timestep[{timestep_index}]          = Unix time {cc.timesteps[timestep_index].unix_time_ms / 1000.0} GPS: {cc.timesteps[timestep_index].gps_time_ms / 1000.0}")
            print(
                f"Coarse Channel[{coarse_chan_index}]    = Reciever Chan {cc.coarse_channels[coarse_chan_index].rec_chan_number}, GPUBOX number {cc.coarse_channels[coarse_chan_index].gpubox_number}")
            print(f"Fine channels[{fine_chan_index}:10]")
            print(
                f"Baseline[{baseline_index}]           = Antenna[ant1] {cc.metafits_context.antennas[ant1].tile_id}, {cc.metafits_context.antennas[ant1].tile_name} vs Antenna[ant2] {cc.metafits_context.antennas[ant2].tile_id}, {cc.metafits_context.antennas[ant2].tile_name}")

            data_bl_index = baseline_index * (
                        cc.metafits_context.num_corr_fine_chans_per_coarse * cc.metafits_context.num_visibility_pols * 2)

            for chan in range(fine_chan_index, fine_chan_index + fine_chan_count):
                data_fine_index = data_bl_index + (chan * cc.metafits_context.num_visibility_pols * 2)
                print(f"chan {chan} "
                      f"XX: {data[data_fine_index]:.2f} {data[data_fine_index + 1]:.2f},\t"
                      f"XY: {data[data_fine_index + 2]:.2f} {data[data_fine_index + 3]:.2f},\t"
                      f"YX: {data[data_fine_index + 4]:.2f} {data[data_fine_index + 5]:.2f},\t"
                      f"YY: {data[data_fine_index + 6]:.2f} {data[data_fine_index + 7]:.2f}")
        else:
            with open(out_filename, "w") as out_file:
                for baseline_index in range(0, int(128*129/2)):
                    data_bl_index = baseline_index * (
                        cc.metafits_context.num_corr_fine_chans_per_coarse * cc.num_visibility_pols * 2)

                    for chan in range(fine_chan_index, fine_chan_index + fine_chan_count):
                        data_fine_index = data_bl_index + (chan * cc.num_visibility_pols * 2)

                        out_file.write(f"{data[data_fine_index]},{data[data_fine_index + 1]},"
                                       f"{data[data_fine_index + 2]},{data[data_fine_index + 3]},"
                                       f"{data[data_fine_index + 4]},{data[data_fine_index + 5]},"
                                       f"{data[data_fine_index + 6]},{data[data_fine_index + 7]}\n")
            print(f"Wrote {out_filename}")
Example #5
0
def sum_by_baseline_task(metafits_filename: str, gpubox_filenames: list,
                         coarse_chan_index: int) -> float:
    chan_sum = 0.

    with CorrelatorContext(metafits_filename, gpubox_filenames) as context:
        if coarse_chan_index < context.correlator_metadata.num_coarse_chans:
            print(
                f"sum_by_baseline_task: Summing {context.correlator_metadata.num_timesteps} timesteps "
                f"and coarse channel index {coarse_chan_index}...")

            for t in range(0, context.correlator_metadata.num_timesteps):
                try:
                    data = context.read_by_frequency(t, coarse_chan_index)
                except Exception as e:
                    print(f"Error: {e}")
                    exit(-1)

                data_sum = np.sum(data, dtype=np.float64)
                chan_sum += data_sum

    return chan_sum
Example #6
0
                        "--metafits",
                        required=True,
                        help="Path to the metafits file.")
    parser.add_argument("gpuboxes",
                        nargs='*',
                        help="Paths to the gpubox files.")
    args = parser.parse_args()

    #
    # Create a context and use it for all the examples below
    #
    # The context object will validate the input args and
    # metadata of the observation matches the contents of
    # the gpubox files passed in.
    #
    with CorrelatorContext(args.metafits, args.gpuboxes) as context:
        # Test printing via repr(context)
        print("\n\n\nTesting Context metafits metadata:")
        print(f"{repr(context.metafits_metadata)}")

        print("\n\n\nTesting Context correlator metadata:")
        print(f"{repr(context.correlator_metadata)}")

        # Test antennas
        print("\n\n\nTesting Antennas:")
        for a in context.antennas:
            print(repr(a))

        # Test baselines
        print("\n\n\nTesting first 5 baselines:")
        for c in range(0, 5):
Example #7
0
    # Check what we have for the data files
    if len(args.datafiles) == 0:
        # We invoke a metafits context
        print(
            "Only metafits file provided, assuming Legacy Correlator interpretation of metafits."
        )
        context = MetafitsContext(args.metafits, MWAVersion.CorrLegacy)
    else:
        corr_suffixes = [x for x in args.datafiles if x[-5:] == ".fits"]
        dat_suffixes = [x for x in args.datafiles if x[-4:] == ".dat"]
        sub_suffixes = [x for x in args.datafiles if x[-4:] == ".sub"]

        if len(corr_suffixes) + len(dat_suffixes) + len(sub_suffixes) == 0:
            print("Error- no .fits, .dat or .sub files provided")
            exit(-2)
        elif len(corr_suffixes
                 ) > 0 and len(dat_suffixes) + len(sub_suffixes) == 0:
            print(f"{len(corr_suffixes)} correlator/gpubox files detected")
            context = CorrelatorContext(args.metafits, args.datafiles)
        elif (len(dat_suffixes) > 0 and len(corr_suffixes) + len(sub_suffixes) == 0) or \
                (len(sub_suffixes) > 0 and len(corr_suffixes) + len(dat_suffixes) == 0):
            print(f"{len(dat_suffixes)} voltage data files detected")
            context = VoltageContext(args.metafits, args.datafiles)
        else:
            print("Error: Combination of different data files supplied.")
            exit(-3)

    # Test the debug "display" method
    print("\nTesting Display method:")
    context.display()