def main(args):

    h5fi = h5py.File(args.input_file, 'r')
    h5fo = h5py.File(args.output_file,'w')

    # Copy over important attributes
    for key in h5fi.attrs.keys():
        h5fo.attrs[key]=h5fi.attrs[key]
    
    # Copy over important datasets
    for key in h5fi.keys():
        temp_arr = h5fi[key]
        h5fo.create_dataset('vis_{}'.format(key),data=temp_arr)
    h5fo.attrs['grid_size']=args.size
    h5fo.attrs['grid_res']=args.res
    h5fo.attrs['grid_wres']=args.wres
    h5fo.create_dataset('l_est', (len(h5fo['vis_l_est']),))
    h5fo.create_dataset('m_est', (len(h5fo['vis_l_est']),))
    h5fo.create_dataset('extent', (len(h5fo['vis_l_est']),4))
    h5fo.create_dataset('elevation', (len(h5fo['vis_l_est']),))
    h5fo.create_dataset('azimuth', (len(h5fo['vis_l_est']),))
    h5fo.create_dataset('height', (len(h5fo['vis_l_est']),))

    
    h5fi.close() # done with input data now

    ## Begin doing stuff
    antennas = station.antennas
    valid_ants, n_baselines = select_antennas(antennas, h5fo.attrs['use_pol'], exclude=[256]) # to exclude outrigger

    tx_coords = h5fo.attrs['tx_coordinates']
    rx_coords = [station.lat * 180/np.pi, station.lon * 180/np.pi]

    ## Build freqs (same for every 'integration')
    freqs = np.empty((h5fo.attrs['fft_len'],),dtype=np.float64)
    #! Need to think of intelligent way of doing this.
    #! target_bin will probably not matter since all vis is the same
    freqs5 =   [5284999.9897182, 5291249.9897182, 5297499.9897182, 5303749.9897182,
                5309999.9897182, 5316249.9897182, 5322499.9897182, 5328749.9897182,
                5334999.9897182, 5341249.9897182, 5347499.9897182, 5353749.9897182,
                5359999.9897182, 5366249.9897182, 5372499.9897182, 5378749.9897182]
    for i in range(len(freqs)):
        freqs[i]=freqs5[i]

    ## Build bl (same for every 'integration')
    pol_string = 'xx' if h5fo.attrs['use_pol'] == 0 else 'yy'
    pol1, pol2 = pol_to_pols(pol_string)
    antennas1 = [a for a in valid_ants if a.pol == pol1]
    antennas2 = [a for a in valid_ants if a.pol == pol2]

    nStands = len(antennas1)
    baselines = uvutils.get_baselines(antennas1, antennas2=antennas2, include_auto=False, indicies=True)

    antennaBaselines = []
    for bl in range(len(baselines)):
            antennaBaselines.append( (antennas1[baselines[bl][0]], antennas2[baselines[bl][1]]) )
    bl = antennaBaselines

    uvw_m = np.array([np.array([b[0].stand.x - b[1].stand.x, b[0].stand.y - b[1].stand.y, b[0].stand.z - b[1].stand.z]) for b in bl])
    uvw = np.empty((len(bl), 3, len(freqs)))
    for i, f in enumerate(freqs):
        # wavelength = 3e8/f # TODO this should be fixed. What is currently happening is not true. Well it is, but only if you're looking for a specific transmitter frequency. Which I guess we are. I just mean it's not generalized.
        wavelength = 3e8/h5fo.attrs['tx_freq']
        uvw[:,:,i] = uvw_m/wavelength


    # Build antenna array (gets used in the VisibilityDataSet)
    # jd can't matter, right?
    jd = 2458847.2362531545
    antenna_array = simVis.build_sim_array(station, valid_ants, freqs/1e9, jd=jd, force_flat=True)
    # we only want the bin nearest to our frequency
    target_bin = np.argmin([abs(h5fo.attrs['tx_freq'] - f) for f in freqs])


    # Needed for PolarizationDataSet
    if h5fo.attrs['use_pol'] == 0:
        pol_string = 'XX'
        p=0 # this is related to the enumerate in lsl.imaging.utils.CorrelatedIDI().get_data_set() (for when there are multiple pols in a single dataset)
    else:
        raise RuntimeError("Only pol. XX supported right now.")


    if args.all_sky:
        fig, ax = plt.subplots()

    for k in np.arange(len(h5fo['vis_l_est'])):
        l_in = h5fo['vis_l_est'][k]
        m_in = h5fo['vis_m_est'][k]

        ## Build vis
        vismodel = point_source_visibility_model_uv(uvw[:,0,0],uvw[:,1,0],l_in,m_in)
        vis = np.empty((len(vismodel), len(freqs)), dtype=np.complex64)
        for i in np.arange(vis.shape[1]):
            vis[:,i] = vismodel

        if args.export_npy:
            print(args.export_npy)
            print("Exporting modelled u, v, w, and visibility")
            np.save('model-uvw{}.npy'.format(k), uvw)
            np.save('model-vis{}.npy'.format(k), vis)

        ## Start to build up the data structure for VisibilityDataSet

        dataSet = VisibilityDataSet(jd=jd, freq=freqs, baselines=bl, uvw=uvw, antennarray=antenna_array)
        polDataSet = PolarizationDataSet(pol_string, data=vis)
        dataSet.append(polDataSet)


        print('| Gridding and imaging with size={}, res={}, wres={}'.format(args.size, args.res, args.wres))

        gridded_image = build_gridded_image(dataSet, pol=pol_string,
            chan=target_bin, size=args.size,
            res=args.res, wres=args.wres)
        
        if args.export_npy:
            print("Exporting gridded u, v, and visibility")
            u,v = gridded_image.get_uv()
            np.save('gridded-u{}.npy'.format(k), u)
            np.save('gridded-v{}.npy'.format(k), v)
            np.save('gridded-vis{}.npy'.format(k), gridded_image.uv)


        l,m,img,extent=get_gimg_max(gridded_image, return_img=True)

        # Compute other values of interest
        elev, az = lm_to_ea(l, m)
        height = flatmirror_height(tx_coords, rx_coords, elev)

        h5fo['l_est'][k] = l
        h5fo['m_est'][k] = m

        h5fo['extent'][k] = extent

        h5fo['elevation'][k] = elev
        h5fo['azimuth'][k] = az
        h5fo['height'][k] = height

        if args.all_sky:
            ax.imshow(img, extent=extent, origin='lower', interpolation='nearest')
            ax.set_title('size={}, res={}, wres={}, iteration={}'.format(args.size,args.res,args.wres,k))
            ax.set_xlabel('l')
            ax.set_ylabel('m')
            ax.plot(l,m,marker='o', color='k', label='Image Max.')
            ax.plot(l_in,m_in,marker='x', color='r', label='Model (input)')
            plt.legend(loc='lower right')
            plt.savefig('allsky{}.png'.format(k))
            plt.cla()

        save_pkl_gridded = args.pkl_gridded and k in args.pkl_gridded
        if save_pkl_gridded:
            quickDict={'image':img, 'extent':extent}
            with open('gridded{}.pkl'.format(k), 'wb') as f:
                pickle.dump(quickDict, f, protocol=pickle.HIGHEST_PROTOCOL)

    h5fo.close()
Beispiel #2
0
def grid_visibilities(bl,
                      freqs,
                      vis,
                      tx_freq,
                      station,
                      valid_ants=None,
                      size=80,
                      res=0.5,
                      wres=0.10,
                      use_pol=0,
                      jd=None):
    '''
    Resamples the baseline-sampled visibilities on to a regular grid. 

    arguments:
    bl = pairs of antenna objects representing baselines (list)
    freqs = frequency channels for which we have correlations (list)
    vis = visibility samples corresponding to the baselines (numpy array)
    tx_freq = the frequency of the signal we want to locate
    valid_ants = which antennas we actually want to use (list)
    station = lsl station object - usually stations.lwasv
    according to LSL docstring:
        size = number of wavelengths which the UV matrix spans (this 
        determines the image resolution).
        res = resolution of the UV matrix (determines image field of view).
        wres: the gridding resolution of sqrt(w) when projecting to w=0.

    use_pol = which polarization to use (only 0 is supported right now)
    returns:
    gridded_image
    '''
    # In order to do the gridding, we need to build a VisibilityDataSet using
    # lsl.imaging.data.VisibilityDataSet. We have to build a bunch of stuff to
    # pass to its constructor.

    if valid_ants is None:
        valid_ants, n_baselines = select_antennas(station.antennas, use_pol)

    # we only want the bin nearest to our frequency
    target_bin = np.argmin([abs(tx_freq - f) for f in freqs])

    # Build antenna array
    freqs = np.array(freqs)
    antenna_array = simVis.build_sim_array(station,
                                           valid_ants,
                                           freqs / 1e9,
                                           jd=jd,
                                           force_flat=True)

    uvw = np.empty((len(bl), 3, len(freqs)))

    for i, f in enumerate(freqs):
        # wavelength = 3e8/f # TODO this should be fixed. What is currently happening is not true. Well it is, but only if you're looking for a specific transmitter frequency. Which I guess we are. I just mean it's not generalized.
        wavelength = 3e8 / tx_freq
        uvw[:, :, i] = uvw_from_antenna_pairs(bl, wavelength=wavelength)

    dataSet = VisibilityDataSet(jd=jd,
                                freq=freqs,
                                baselines=bl,
                                uvw=uvw,
                                antennarray=antenna_array)
    if use_pol == 0:
        pol_string = 'XX'
    else:
        raise RuntimeError("Only pol. XX supported right now.")
    polDataSet = PolarizationDataSet(pol_string, data=vis)
    dataSet.append(polDataSet)

    # Use lsl.imaging.utils.build_gridded_image (takes a VisibilityDataSet)
    gridded_image = build_gridded_image(dataSet,
                                        pol=pol_string,
                                        chan=target_bin,
                                        size=size,
                                        res=res,
                                        wres=wres)

    return gridded_image
Beispiel #3
0
def main(args):

    ## Check we should bother doing anything

    if not args.export_npy and not args.export_h5 and not args.all_sky and not args.pkl_gridded:
        raise RuntimeError(
            "You have not selected a data output of any type. Read the docstring and pick something for me to do."
        )

    # Normalize all inputs to the same length
    sizes = [int(item) for item in args.size.split(',')]
    reses = [float(item) for item in args.res.split(',')]
    wreses = [float(item) for item in args.wres.split(',')]
    maxinputlen = max(len(sizes), len(reses), len(wreses))
    if len(sizes) not in [1, maxinputlen] or len(reses) not in [
            1, maxinputlen
    ] or len(wreses) not in [1, maxinputlen]:
        raise RuntimeError(" \
        For size, res and wres you must pass either the same number of values as the max or a single value.\n \
        For example:\n \
        ALLOWED     -> sizes=175,180,190, res=0.5, wres=0.5\n \
                    -> sizes=175,180,190, res=0.5, wres=0.5,0.6,0.7\n \
        NOT ALLOWED -> sizes=175,180,190, res=0.5, wres=0.5,0.6 \
        ")
    if len(
            sizes
    ) != maxinputlen:  # You'd think there must be a good way to do this with list comprehension.
        sizes = sizes * maxinputlen
    if len(reses) != maxinputlen:
        reses = reses * maxinputlen
    if len(wreses) != maxinputlen:
        wreses = wreses * maxinputlen
    all_grid_params = []
    while len(sizes) > 0:
        all_grid_params.append({
            'size': sizes.pop(),
            'res': reses.pop(),
            'wres': wreses.pop()
        })

    ## Begin doing stuff
    tx_coords = known_transmitters.parse_args(args)
    if not transmitter_coords:
        print("Please specify a transmitter location")
        return
    rx_coords = [station.lat * 180 / np.pi, station.lon * 180 / np.pi]

    antennas = station.antennas

    valid_ants, n_baselines = select_antennas(antennas, args.use_pol)

    if args.export_h5:
        h5fname = "simulation-results.h5"
        print("Output will be written to {}".format(h5fname))
        h5f = h5py.File(h5fname, 'w')

        ats = h5f.attrs
        ats['transmitter'] = args.transmitter
        ats['tx_freq'] = args.tx_freq
        ats['valid_ants'] = [a.id for a in valid_ants]
        ats['n_baselines'] = n_baselines
        ats['fft_len'] = args.fft_len
        ats['use_pol'] = args.use_pol
        ats['int_length'] = args.integration_length
        ats['l_model'] = args.l_model
        ats['m_model'] = args.m_model

        h5f.create_dataset('l_est', (len(all_grid_params), ))
        h5f.create_dataset('m_est', (len(all_grid_params), ))
        h5f.create_dataset('wres', (len(all_grid_params), ))
        h5f.create_dataset('res', (len(all_grid_params), ))
        h5f.create_dataset('size', (len(all_grid_params), ))
        h5f.create_dataset('extent', (len(all_grid_params), 4))
        h5f.create_dataset('elevation', (len(all_grid_params), ))
        h5f.create_dataset('azimuth', (len(all_grid_params), ))
        h5f.create_dataset('height', (len(all_grid_params), ))

    ## Build freqs
    freqs = np.empty((args.fft_len, ), dtype=np.float64)
    #! Need to think of intelligent way of doing this.
    #! target_bin will probably not matter since all vis is the same
    freqs5 = [
        5284999.9897182, 5291249.9897182, 5297499.9897182, 5303749.9897182,
        5309999.9897182, 5316249.9897182, 5322499.9897182, 5328749.9897182,
        5334999.9897182, 5341249.9897182, 5347499.9897182, 5353749.9897182,
        5359999.9897182, 5366249.9897182, 5372499.9897182, 5378749.9897182
    ]
    for i in range(len(freqs)):
        freqs[i] = freqs5[i]

    ## Build bl
    pol_string = 'xx' if args.use_pol == 0 else 'yy'
    pol1, pol2 = pol_to_pols(pol_string)
    antennas1 = [a for a in valid_ants if a.pol == pol1]
    antennas2 = [a for a in valid_ants if a.pol == pol2]

    nStands = len(antennas1)
    baselines = uvutils.get_baselines(antennas1,
                                      antennas2=antennas2,
                                      include_auto=False,
                                      indicies=True)

    antennaBaselines = []
    for bl in range(len(baselines)):
        antennaBaselines.append(
            (antennas1[baselines[bl][0]], antennas2[baselines[bl][1]]))
    bl = antennaBaselines

    uvw_m = np.array([
        np.array([
            b[0].stand.x - b[1].stand.x, b[0].stand.y - b[1].stand.y,
            b[0].stand.z - b[1].stand.z
        ]) for b in bl
    ])
    uvw = np.empty((len(bl), 3, len(freqs)))
    for i, f in enumerate(freqs):
        # wavelength = 3e8/f # TODO this should be fixed. What is currently happening is not true. Well it is, but only if you're looking for a specific transmitter frequency. Which I guess we are. I just mean it's not generalized.
        wavelength = 3e8 / args.tx_freq
        uvw[:, :, i] = uvw_m / wavelength

    ## Build vis
    vismodel = point_source_visibility_model_uv(uvw[:, 0, 0], uvw[:, 1, 0],
                                                args.l_model, args.m_model)
    vis = np.empty((len(vismodel), len(freqs)), dtype=np.complex64)
    for i in np.arange(vis.shape[1]):
        vis[:, i] = vismodel

    if args.export_npy:
        print(args.export_npy)
        print("Exporting modelled u, v, w, and visibility")
        np.save('model-uvw.npy', uvw)
        np.save('model-vis.npy', vis)

    ## Start to build up the data structure for VisibilityDataSet
    # we only want the bin nearest to our frequency
    target_bin = np.argmin([abs(args.tx_freq - f) for f in freqs])

    # This can't matter, right?
    # jd = tbnf.get_info('start_time').jd
    jd = 2458847.2362531545

    # Build antenna array
    antenna_array = simVis.build_sim_array(station,
                                           antennas,
                                           freqs / 1e9,
                                           jd=jd,
                                           force_flat=True)

    dataSet = VisibilityDataSet(jd=jd,
                                freq=freqs,
                                baselines=bl,
                                uvw=uvw,
                                antennarray=antenna_array)
    if args.use_pol == 0:
        pol_string = 'XX'
        p = 0  # this is related to the enumerate in lsl.imaging.utils.CorrelatedIDI().get_data_set() (for when there are multiple pols in a single dataset)
    else:
        raise RuntimeError("Only pol. XX supported right now.")
    polDataSet = PolarizationDataSet(pol_string, data=vis)
    dataSet.append(polDataSet)

    if args.all_sky:
        fig, ax = plt.subplots()

    # Iterate over size/res/wres and generate multiple grids/images
    k = 0
    for grid_params in all_grid_params:
        print('| Gridding and imaging with size={}, res={}, wres={}'.format(
            grid_params['size'], grid_params['res'], grid_params['wres']))

        gridded_image = build_gridded_image(dataSet,
                                            pol=pol_string,
                                            chan=target_bin,
                                            size=grid_params['size'],
                                            res=grid_params['res'],
                                            wres=grid_params['wres'])

        if args.export_npy:
            print("Exporting gridded u, v, and visibility")
            u, v = gridded_image.get_uv()
            np.save(
                'gridded-u-size-{}-res-{}-wres-{}.npy'.format(
                    grid_params['size'], grid_params['res'],
                    grid_params['wres']), u)
            np.save(
                'gridded-v-size-{}-res-{}-wres-{}.npy'.format(
                    grid_params['size'], grid_params['res'],
                    grid_params['wres']), v)
            np.save(
                'gridded-vis-size-{}-res-{}-wres-{}.npy'.format(
                    grid_params['size'], grid_params['res'],
                    grid_params['wres']), gridded_image.uv)

        l, m, img, extent = get_gimg_max(gridded_image, return_img=True)

        # Compute other values of interest
        elev, az = lm_to_ea(l, m)
        height = flatmirror_height(tx_coords, rx_coords, elev)

        if args.export_h5:
            h5f['l_est'][k] = l
            h5f['m_est'][k] = m
            h5f['wres'][k] = grid_params['wres']
            h5f['res'][k] = grid_params['res']
            h5f['size'][k] = grid_params['size']

            h5f['extent'][k] = extent

            h5f['elevation'][k] = elev
            h5f['azimuth'][k] = az
            h5f['height'][k] = height

        if args.all_sky:
            ax.imshow(img,
                      extent=extent,
                      origin='lower',
                      interpolation='nearest')
            ax.set_title('size={}, res={}, wres={}'.format(
                grid_params['size'], grid_params['res'], grid_params['wres']))
            ax.set_xlabel('l')
            ax.set_ylabel('m')
            ax.plot(l, m, marker='o', color='k', label='Image Max.')
            ax.plot(args.l_model,
                    args.m_model,
                    marker='x',
                    color='r',
                    label='Model (input)')
            plt.legend(loc='lower right')
            plt.savefig('allsky_size_{}_res_{}_wres_{}.png'.format(
                grid_params['size'], grid_params['res'], grid_params['wres']))
            plt.cla()

        save_pkl_gridded = args.pkl_gridded and k in args.pkl_gridded
        if save_pkl_gridded:
            quickDict = {'image': img, 'extent': extent}
            with open(
                    'gridded_size_{}_res_{}_wres_{}.pkl'.format(
                        grid_params['size'], grid_params['res'],
                        grid_params['wres']), 'wb') as f:
                pickle.dump(quickDict, f, protocol=pickle.HIGHEST_PROTOCOL)
        k += 1

    if args.export_h5:
        h5f.close()
    m_range = np.linspace(m_c - m_width/2.0, m_c + m_width/2.0, N)
    print(m_range)
    for x, l in enumerate(l_range):
        for y, m in enumerate(m_range):
            cost[x,y] = ls_cost([l, m], u, v, vis)


    plt.contourf(l_range, m_range, cost)
    plt.colorbar()
    plt.xlabel("l")
    plt.ylabel("m")
    plt.show()

if __name__ == "__main__":
    plt.close('all')
    ants, n_baselines = select_antennas(station.antennas, use_pol=0)
    dfile = LWASVDataFile(tbn_filename)
    #baselines, visibilities = compute_visibilities(dfile, ants, target_freq)
    dfile.close()

    azimuth = station.get_pointing_and_distance(transmitter_coords + [0])[0]

    bl1d = project_baselines(baselines, azimuth)
    phases = np.angle(visibilities[0])
    

    vis = visibilities[0]
    bl2d = np.array([np.array([b[0].stand.x - b[1].stand.x, b[0].stand.y - b[1].stand.y]) for b in baselines])

    u = bl2d[:, 0]
    v = bl2d[:, 1]
Beispiel #5
0
def main(args):

    # saz and sel are used later
    img = aipy.img.ImgW(size=50, res=0.5)
    top = img.get_top(center=(50, 50))
    saz, sel = aipy.coord.top2azalt(top)

    station = stations.lwasv

    tx_coords = known_transmitters.parse_args(args)

    print("Opening TBN file ({})".format(args.tbn_filename))
    with LWASVDataFile(args.tbn_filename, ignore_timetag_errors=True) as tbnf:

        antennas = station.antennas

        valid_ants, n_baselines = select_antennas(antennas, args.use_pol)

        if not args.hdf5_file:
            raise RuntimeError('Please provide an output filename')
        else:
            with build_output_file(
                    h5_fname=args.hdf5_file,
                    tbnf=tbnf,
                    valid_ants=valid_ants,
                    n_baselines=n_baselines,
                    # use_pfb=args.use_pfb, use_pol=args.use_pol,
                    integration_length=args.integration_length,
                    transmitter_coords=tx_coords) as h5f:

                del h5f['l_est']
                del h5f['m_est']

                freq = tbnf.get_info('freq1')
                idx = [ant.digitizer - 1 for ant in valid_ants]
                xyz = np.array([[ant.stand.x, ant.stand.y, ant.stand.z]
                                for ant in valid_ants])
                delays = np.array(
                    [ant.cable.delay(freq) for ant in valid_ants])
                delays -= delays.min()

                n_samples = tbnf.get_info('nframe') / tbnf.get_info('nantenna')
                samples_per_integration = int(args.integration_length *
                                              tbnf.get_info('sample_rate') /
                                              512)
                n_integrations = int(
                    np.floor(n_samples / samples_per_integration))

                for int_num in range(n_integrations):
                    print(
                        f"Starting iteration {int_num + 1} of {n_integrations}"
                    )
                    # Load in the data and select what we need
                    tInt, t0, data = tbnf.read(args.integration_length)

                    data = data[idx, :]

                    # Apply a phase rotation to deal with the cable delays
                    for i in range(data.shape[0]):
                        data[i, :] *= np.exp(2j * np.pi * freq * delays[i])
                    data /= (np.abs(data)).max()

                    # Calculate Rx - The time-averaged autocorrelation matrix
                    nSamp = data.shape[1]
                    xOutput = []
                    print("Computing time-averaged autocorrelation matrix")
                    for i in range(nSamp):
                        x = np.matrix(data[:, i]).T
                        xOutput.append(x)
                        try:
                            Rx += x * x.H
                        except:
                            Rx = x * x.H
                    Rx /= nSamp

                    # Find the eigenvectors/values for Rx and order them by significance
                    print("Computing eigenvectors/values of the ACM")
                    w, v = np.linalg.eig(Rx)
                    order = np.argsort(np.abs(w))[::-1]
                    w = w[order]
                    v = v[:, order]

                    # Break the eigenvalues into a signal sub-space, Us, and a noise sub-
                    # space, Un.  This is currently done based on the number of sources
                    # we have rather than inferred from the eigenvalues.
                    ##Us = numpy.where( numpy.abs(w) > sigma )[0] #TODO I think this part should help find frequency too but Jayce had it commented out because the sigma section wasn't working (see her tbnMusic.py script I think)
                    ##Un = numpy.where( numpy.abs(w) <= sigma )[0]
                    # Us = range(3) #TODO What Jayce had. I imagine she had 4 sources
                    # Un = range(3, w.size)
                    Us = range(1)
                    Un = range(1, w.size)

                    print("Evaluating MUSIC spectrum")
                    P = np.zeros_like(saz)
                    E = np.zeros_like(saz)
                    for i in range(saz.shape[0]):
                        print(
                            f"Starting row {i+1} / {saz.shape[0]} for integration {int_num}"
                        )
                        for j in range(saz.shape[1]):
                            ta = saz[i, j]
                            te = sel[i, j]
                            if not np.isfinite(ta) or not np.isfinite(te):
                                continue

                            pv = np.array([
                                np.cos(te) * np.sin(ta),
                                np.cos(te) * np.cos(ta),
                                np.sin(te)
                            ])

                            a = np.zeros((len(valid_ants), 1),
                                         dtype=np.complex128)
                            for k in range(len(valid_ants)):
                                a[k, 0] = np.exp(
                                    2j * np.pi * freq *
                                    np.dot(xyz[k, :] - xyz[0, :], pv) /
                                    speedOfLight)
                            a = np.matrix(a)

                            v2 = np.matrix(v[:, Un])
                            o = a.H * v2 * v2.H * a
                            P[i, j] = 1.0 / max([1e-9, o[0, 0].real])

                    spectrum_max_idx = np.where(P == P.max())
                    el_max = sel[spectrum_max_idx][0]
                    az_max = saz[spectrum_max_idx][0]
                    h5f['elevation'][int_num] = el_max
                    h5f['azimuth'][int_num] = az_max
                    print(
                        f"Integration complete - az = {az_max:.2f} el = {el_max:.2f}"
                    )
def main(args):
    # this first part of the code is run by all processes

    # set up MPI environment
    comm = MPI.COMM_WORLD
    rank = comm.Get_rank()
    size = comm.Get_size()

    if size < 2:
        raise RuntimeError(
            f"This program requires at least two MPI processes to function. Please rerun with more resources"
        )

    # designate the last process as the supervisor/file reader
    supervisor = size - 1

    # open the TBN file for reading
    tbnf = LWASVDataFile(args.tbn_filename, ignore_timetag_errors=True)

    # figure out the details of the run we want to do
    tx_coords = known_transmitters.parse_args(args)
    antennas = station.antennas
    valid_ants, n_baselines = select_antennas(antennas, args.use_pol)
    n_ants = len(valid_ants)

    sample_rate = tbnf.get_info('sample_rate')
    # some of our TBNs claim to have frame size 1024 but they are lying
    frame_size = 512
    tbn_center_freq = tbnf.get_info('freq1')

    total_integrations, _ = compute_integration_numbers(
        tbnf, args.integration_length)

    # open the output HDF5 file and create datasets
    # because of the way parallelism in h5py works all processes (even ones
    # that don't write to the file) must do this
    h5f = build_output_file(args.hdf5_file,
                            tbnf,
                            valid_ants,
                            n_baselines,
                            args.integration_length,
                            tx_freq=args.tx_freq,
                            fft_len=args.fft_len,
                            use_pfb=args.use_pfb,
                            use_pol=args.use_pol,
                            opt_method=opt_method,
                            vis_model='gaussian',
                            transmitter_coords=tx_coords,
                            mpi_comm=comm)

    if rank == supervisor:
        # the supervisor process runs this code
        print("supervisor: started")

        # state info
        reached_end = False
        workers_alive = [True for _ in range(size - 1)]
        int_no = 0

        while True:
            if not reached_end:
                # grab data for the next available worker
                try:
                    duration, start_time, data = tbnf.read(
                        args.integration_length)
                    # only use data from valid antennas
                    data = data[[a.digitizer - 1 for a in valid_ants], :]
                except EOFError:
                    reached_end = True
                    print(f"supervisor: reached EOF")

                if int_no >= total_integrations:
                    print(f"supervisor: this is the last integration")
                    reached_end = True

            # get the next "ready" message from the workers
            st = MPI.Status()
            msg = comm.recv(status=st)
            if msg == "ready":
                print(
                    f"supervisor: received 'ready' message from worker {st.source}"
                )

                # if we're done, send an exit message and mark that we've killed this worker
                # an empty array indicates that the worker should exit
                if reached_end:
                    print(
                        f"supervisor: sending exit message to worker {st.source}"
                    )
                    comm.Send(np.array([]), dest=st.source, tag=int_no)
                    workers_alive[st.source] = False

                    if not any(workers_alive):
                        print(f"supervisor: all workers told to exit, goodbye")
                        break
                # otherwise, send the data to the worker for processing
                else:
                    print(
                        f"supervisor: sending data for integration {int_no}/{total_integrations} to worker {st.source}"
                    )
                    # Send with a capital S is optimized to send numpy arrays
                    comm.Send(data, dest=st.source, tag=int_no)
                    int_no += 1
            else:
                raise ValueError(
                    f"Supervisor received unrecognized message '{msg}' from worker {st.source}"
                )

        tbnf.close()

    else:
        # the worker processes run this code
        print(f"worker {rank} started")

        # workers don't need access to the TBN file
        tbnf.close()

        # figure out the size of the incoming data buffer
        samples_per_integration = int(
            round(args.integration_length * sample_rate /
                  frame_size)) * frame_size
        buffer_shape = (n_ants, samples_per_integration)

        while True:
            # send with a lowercase s can send any pickle-able python object
            # this is a synchronous send - it will block until the message is read by the supervisor
            # the other sends (e.g. comm.Send) only block until the message is safely taken by MPI, which might happen before the receiver actually reads it
            comm.ssend("ready", dest=supervisor)

            # build a buffer to be filled with data
            data = np.empty(buffer_shape, np.complex64)

            # receive the data from the supervisor
            st = MPI.Status()
            comm.Recv(data, source=supervisor, status=st)

            int_no = st.tag

            # if the buffer is empty, we're done
            if st.count == 0:
                print(f"worker {rank}: received exit message, exiting")
                break

            # otherwise process the data we've recieved
            print(
                f"worker {rank}: received data for integration {int_no}, starting processing"
            )

            # run the correlator
            bl, freqs, vis = fxc.FXMaster(
                data,
                valid_ants,
                LFFT=args.fft_len,
                pfb=args.use_pfb,
                sample_rate=sample_rate,
                central_freq=tbn_center_freq,
                Pol='xx' if args.use_pol == 0 else 'yy',
                return_baselines=True,
                gain_correct=True)

            # extract the frequency bin we want
            target_bin = np.argmin([abs(args.tx_freq - f) for f in freqs])
            vis_tbin = vis[:, target_bin]

            # baselines in wavelengths
            uvw = uvw_from_antenna_pairs(bl, wavelength=3e8 / args.tx_freq)

            # model fitting
            l_out, m_out, opt_result = fit_model_to_vis(uvw,
                                                        vis_tbin,
                                                        residual_function,
                                                        l_init,
                                                        m_init,
                                                        verbose=False)

            # convert direction cosines to sky coords
            src_elev, src_az = lm_to_ea(l_out, m_out)

            # write data to h5 file
            h5f['l_start'][int_no] = l_init
            h5f['m_start'][int_no] = m_init
            h5f['l_est'][int_no] = l_out
            h5f['m_est'][int_no] = m_out
            h5f['elevation'][int_no] = src_elev
            h5f['azimuth'][int_no] = src_az
            h5f['cost'][int_no] = opt_result['cost']
            h5f['nfev'][int_no] = opt_result['nfev']

            # compute the bin power and save it to the file
            # arbitrarily picking the tenth antenna in this list
            power_calc_data = data[10, :]
            h5f['snr_est'][int_no] = estimate_snr(power_calc_data,
                                                  args.fft_len, args.tx_freq,
                                                  sample_rate, tbn_center_freq)

            print(f"worker {rank}: done processing integration {int_no}")

    # back to common code for both supervisor and workers

    h5f.attrs['total_integrations'] = int_no
    h5f.close()
def main(args):
    # this first part of the code is run by all processes

    # set up MPI environment
    comm = MPI.COMM_WORLD
    rank = comm.Get_rank()
    size = comm.Get_size()

    if size < 2:
        raise RuntimeError(
            f"This program requires at least two MPI processes to function. Please rerun with more resources"
        )

    # designate the last process as the supervisor/file reader
    supervisor = size - 1

    # open the TBN file for reading
    tbnf = LWASVDataFile(args.tbn_filename, ignore_timetag_errors=True)

    # figure out the details of the run we want to do
    tx_coords = known_transmitters.parse_args(args)
    antennas = station.antennas
    valid_ants, n_baselines = select_antennas(antennas, args.use_pol)
    n_ants = len(valid_ants)
    total_integrations, _ = compute_integration_numbers(
        tbnf, args.integration_length)

    sample_rate = tbnf.get_info('sample_rate')
    # some of our TBNs claim to have frame size 1024 but they are lying
    frame_size = 512
    tbn_center_freq = tbnf.get_info('freq1')

    # open the output HDF5 file and create datasets
    # because of the way parallelism in h5py works all processes (even ones
    # that don't write to the file) must do this
    h5f = build_output_file(args.hdf5_file,
                            tbnf,
                            valid_ants,
                            n_baselines,
                            args.integration_length,
                            tx_freq=args.tx_freq,
                            fft_len=args.fft_len,
                            use_pfb=args.use_pfb,
                            use_pol=args.use_pol,
                            transmitter_coords=tx_coords,
                            mpi_comm=comm)

    if args.point_finding_alg == 'all' or args.point_finding_alg == 'peak':
        h5f.create_dataset_like('l_peak', h5f['l_est'])
        h5f.create_dataset_like('m_peak', h5f['m_est'])
        h5f.create_dataset_like('elevation_peak', h5f['elevation'])
        h5f.create_dataset_like('azimuth_peak', h5f['azimuth'])
    if args.point_finding_alg == 'all' or args.point_finding_alg == 'CoM':
        h5f.create_dataset_like('l_CoM', h5f['l_est'])
        h5f.create_dataset_like('m_CoM', h5f['m_est'])
        h5f.create_dataset_like('elevation_CoM', h5f['elevation'])
        h5f.create_dataset_like('azimuth_CoM', h5f['azimuth'])
    else:
        raise NotImplementedError(
            f"Unrecognized point finding algorithm: {args.point_finding_alg}")
    del h5f['l_est']
    del h5f['m_est']
    del h5f['elevation']
    del h5f['azimuth']

    if rank == supervisor:
        # the supervisor process runs this code
        print("supervisor: started")

        # state info
        reached_end = False
        workers_alive = [True for _ in range(size - 1)]
        int_no = 0

        while True:
            if not reached_end:
                # grab data for the next available worker
                try:
                    duration, start_time, data = tbnf.read(
                        args.integration_length)
                    # only use data from valid antennas
                    data = data[[a.digitizer - 1 for a in valid_ants], :]
                except EOFError:
                    reached_end = True
                    print(f"supervisor: reached EOF")
                if int_no >= total_integrations:
                    print(f"supervisor: this is the last integration")
                    reached_end = True

            # get the next "ready" message from the workers
            st = MPI.Status()
            msg = comm.recv(status=st)
            if msg == "ready":
                print(
                    f"supervisor: received 'ready' message from worker {st.source}"
                )

                # if we're done, send an exit message and mark that we've killed this worker
                # an empty array indicates that the worker should exit
                if reached_end:
                    print(
                        f"supervisor: sending exit message to worker {st.source}"
                    )
                    comm.Send(np.array([]), dest=st.source, tag=int_no)
                    workers_alive[st.source] = False

                    if not any(workers_alive):
                        print(f"supervisor: all workers told to exit, goodbye")
                        break
                # otherwise, send the data to the worker for processing
                else:
                    print(
                        f"supervisor: sending data for integration {int_no}/{total_integrations} to worker {st.source}"
                    )
                    # Send with a capital S is optimized to send numpy arrays
                    comm.Send(data, dest=st.source, tag=int_no)
                    int_no += 1
            else:
                raise ValueError(
                    f"Supervisor received unrecognized message '{msg}' from worker {st.source}"
                )

        tbnf.close()

    else:
        # the worker processes run this code
        print(f"worker {rank} started")

        # workers don't need access to the TBN file
        tbnf.close()

        # figure out the size of the incoming data buffer
        samples_per_integration = int(
            round(args.integration_length * sample_rate /
                  frame_size)) * frame_size
        buffer_shape = (n_ants, samples_per_integration)

        while True:
            # send with a lowercase s can send any pickle-able python object
            # this is a synchronous send - it will block until the message is read by the supervisor
            # the other sends (e.g. comm.Send) only block until the message is safely taken by MPI, which might happen before the receiver actually reads it
            comm.ssend("ready", dest=supervisor)

            # build a buffer to be filled with data
            data = np.empty(buffer_shape, np.complex64)

            # receive the data from the supervisor
            st = MPI.Status()
            comm.Recv(data, source=supervisor, status=st)

            int_no = st.tag

            # if the buffer is empty, we're done
            if st.count == 0:
                print(f"worker {rank}: received exit message, exiting")
                break

            # otherwise process the data we've recieved
            print(
                f"worker {rank}: received data for integration {int_no}, starting processing"
            )

            # run the correlator
            bl, freqs, vis = fxc.FXMaster(
                data,
                valid_ants,
                LFFT=args.fft_len,
                pfb=args.use_pfb,
                sample_rate=sample_rate,
                central_freq=tbn_center_freq,
                Pol='xx' if args.use_pol == 0 else 'yy',
                return_baselines=True,
                gain_correct=True)

            gridded_image = grid_visibilities(bl, freqs, vis, args.tx_freq,
                                              station)

            save_all_sky = (args.all_sky and int_no in args.all_sky) or (
                args.all_sky_every and int_no % args.all_sky_every == 0)

            if args.point_finding_alg == 'all' or 'peak':
                result = get_gimg_max(gridded_image, return_img=save_all_sky)
                l = result[0]
                m = result[1]
                src_elev, src_az = lm_to_ea(l, m)
                h5f['l_peak'][int_no] = l
                h5f['m_peak'][int_no] = m
                h5f['elevation_peak'][int_no] = src_elev
                h5f['azimuth_peak'][int_no] = src_az

            if args.point_finding_alg == 'all' or args.point_finding_alg == 'CoM':
                result = get_gimg_center_of_mass(gridded_image,
                                                 return_img=save_all_sky)
                l = result[0]
                m = result[1]
                src_elev, src_az = lm_to_ea(l, m)
                h5f['l_CoM'][int_no] = l
                h5f['m_CoM'][int_no] = m
                h5f['elevation_CoM'][int_no] = src_elev
                h5f['azimuth_CoM'][int_no] = src_az

            if save_all_sky:
                img = result[2]
                extent = result[3]
                fig, ax = plt.subplots()
                ax.imshow(img,
                          extent=extent,
                          origin='lower',
                          interpolation='nearest')
                plt.savefig('allsky_int_{}.png'.format(int_no))

            # compute the bin power and save it to the file
            # arbitrarily picking the tenth antenna in this list
            power_calc_data = data[10, :]
            h5f['snr_est'][int_no] = estimate_snr(power_calc_data,
                                                  args.fft_len, args.tx_freq,
                                                  sample_rate, tbn_center_freq)

            print(f"worker {rank}: done processing integration {int_no}")

    # back to common code for both supervisor and workers
    h5f.attrs['total_integrations'] = int_no
    h5f.close()
Beispiel #8
0
def main(args):
    station = stations.lwasv

    tx_coords = known_transmitters.parse_args(args)

    print("Opening TBN file ({})".format(args.tbn_filename))
    with LWASVDataFile(args.tbn_filename, ignore_timetag_errors=True) as tbnf:

        antennas = station.antennas

        valid_ants, n_baselines = select_antennas(antennas, args.use_pol)

        if not args.hdf5_file:
            raise RuntimeError('Please provide an output filename')
        else:
            with build_output_file(h5_fname=args.hdf5_file,
                                   tbnf=tbnf,
                                   valid_ants=valid_ants,
                                   n_baselines=n_baselines,
                                   tx_freq=args.tx_freq,
                                   fft_len=args.fft_len,
                                   use_pfb=args.use_pfb,
                                   use_pol=args.use_pol,
                                   integration_length=args.integration_length,
                                   transmitter_coords=tx_coords) as h5f:

                if args.point_finding_alg == 'all' or args.point_finding_alg == 'peak':
                    h5f.create_dataset_like('l_peak', h5f['l_est'])
                    h5f.create_dataset_like('m_peak', h5f['m_est'])
                    h5f.create_dataset_like('elevation_peak', h5f['elevation'])
                    h5f.create_dataset_like('azimuth_peak', h5f['azimuth'])
                if args.point_finding_alg == 'all' or args.point_finding_alg == 'CoM':
                    h5f.create_dataset_like('l_CoM', h5f['l_est'])
                    h5f.create_dataset_like('m_CoM', h5f['m_est'])
                    h5f.create_dataset_like('elevation_CoM', h5f['elevation'])
                    h5f.create_dataset_like('azimuth_CoM', h5f['azimuth'])
                else:
                    raise NotImplementedError(
                        f"Unrecognized point finding algorithm: {args.point_finding_alg}"
                    )
                del h5f['l_est']
                del h5f['m_est']
                del h5f['elevation']
                del h5f['azimuth']

                k = 0

                save_all_sky = (args.all_sky and k in args.all_sky) or (
                    args.all_sky_every and k % args.all_sky_every == 0
                )  # or (args.scatter_bad_fits and skip)

                if save_all_sky:
                    fig, ax = plt.subplots()

                for bl, freqs, vis in compute_visibilities_gen(
                        tbnf,
                        valid_ants,
                        integration_length=args.integration_length,
                        fft_length=args.fft_len,
                        use_pol=args.use_pol,
                        use_pfb=args.use_pfb):

                    gridded_image = grid_visibilities(bl, freqs, vis,
                                                      args.tx_freq, station)

                    save_all_sky = (args.all_sky and k in args.all_sky) or (
                        args.all_sky_every and k % args.all_sky_every == 0)

                    if args.point_finding_alg == 'all' or 'peak':
                        result = get_gimg_max(gridded_image,
                                              return_img=save_all_sky)
                        l = result[0]
                        m = result[1]
                        src_elev, src_az = lm_to_ea(l, m)
                        h5f['l_peak'][k] = l
                        h5f['m_peak'][k] = m
                        h5f['elevation_peak'][k] = src_elev
                        h5f['azimuth_peak'][k] = src_az

                    if args.point_finding_alg == 'all' or args.point_finding_alg == 'CoM':
                        result = get_gimg_center_of_mass(
                            gridded_image, return_img=save_all_sky)
                        l = result[0]
                        m = result[1]
                        src_elev, src_az = lm_to_ea(l, m)
                        h5f['l_CoM'][k] = l
                        h5f['m_CoM'][k] = m
                        h5f['elevation_CoM'][k] = src_elev
                        h5f['azimuth_CoM'][k] = src_az

                    if save_all_sky:
                        img = result[2]
                        extent = result[3]
                        ax.imshow(img,
                                  extent=extent,
                                  origin='lower',
                                  interpolation='nearest')
                        plt.savefig('allsky_int_{}.png'.format(k))

                    k += 1
                    print("\n\n")
                    if args.stop_after >= 0 and k >= args.stop_after:
                        break
Beispiel #9
0
def main(args):

    print("Opening TBN file ({})".format(args.tbn_filename))
    with LWASVDataFile(args.tbn_filename, ignore_timetag_errors=True) as tbnf:
    
        antennas = station.antennas

        valid_ants, n_baselines = select_antennas(antennas, args.use_pol)

        tx_coords = known_transmitters.parse_args(args)

        if args.visibility_model == 'point':
            residual_function = point_residual_abs
            residual_function_chain = None
        elif args.visibility_model == 'gaussian':
            residual_function = bind_gaussian_residual(1)
            residual_function_chain = None
        elif args.visibility_model == 'chained':
            residual_function = bind_gaussian_residual(0.5)
            residual_function_chain = point_residual_abs
        else:
            raise RuntimeError("Unknown visibility model option: {args.visibility_model}")

        if not args.hdf5_file:
            raise RuntimeError('Please provide an output filename')
        else:
            with build_output_file(args.hdf5_file, tbnf, valid_ants,
                    n_baselines, args.integration_length, tx_freq=args.tx_freq,
                    fft_len=args.fft_len, use_pfb=args.use_pfb,
                    use_pol=args.use_pol, opt_method=opt_method,
                    vis_model=args.visibility_model,
                    transmitter_coords=tx_coords) as h5f:

                # arrays for estimated parameters from each integration
                l_est = np.array([args.l_guess])
                m_est = np.array([args.m_guess])

                k = 0
                for bl, freqs, vis in compute_visibilities_gen(tbnf, valid_ants, integration_length=args.integration_length, fft_length=args.fft_len, use_pol=args.use_pol, use_pfb=args.use_pfb):

                    # start the optimization at the mean point of the 10 most recent fits
                    if args.visibility_model == 'point':
                        l_init = l_est[-param_guess_av_length:].mean()
                        m_init = m_est[-param_guess_av_length:].mean()
                    else:
                        l_init = 0
                        m_init = 0

                    target_bin = np.argmin([abs(args.tx_freq - f) for f in freqs])
                    
                    # TODO: is this correct? should it be the bin center?
                    uvw = uvw_from_antenna_pairs(bl, wavelength=3e8/args.tx_freq)

                    vis_tbin = vis[:, target_bin]

                    # do the model fitting to get parameter estimates
                    l_out, m_out, opt_result = fit_model_to_vis(uvw, vis_tbin, residual_function, 
                            l_init, m_init, export_npy=args.export_npy)

                    nfev = opt_result['nfev']

                    if residual_function_chain:
                        l_out, m_out, opt_result_chain = fit_model_to_vis(uvw, vis_tbin, residual_function_chain,
                                l_out, m_out, export_npy=args.export_npy)

                        nfev += opt_result_chain['nfev']

                    cost = opt_result['cost']

                    # see if we should skip including this in future starting parameter estimates
                    skip = False
                    if args.exclude and k in args.exclude:
                        print("Not including in parameter estimates by request")
                        skip = True

                    if not skip:
                        l_est = np.append(l_est, l_out)
                        m_est = np.append(m_est, m_out)
                        #costs = np.append(costs, cost)

                    # compute source sky location from parameter values
                    src_elev, src_az = lm_to_ea(l_out, m_out)

                    # write data to h5 file
                    h5f['l_start'][k] = l_init
                    h5f['m_start'][k] = m_init
                    h5f['l_est'][k] = l_out
                    h5f['m_est'][k] = m_out
                    h5f['elevation'][k] = src_elev
                    h5f['azimuth'][k] = src_az
                    h5f['cost'][k] = cost
                    h5f['skipped'][k] = skip
                    h5f['nfev'][k] = nfev

                    save_scatter = (args.scatter and k in args.scatter) or (args.scatter_every and k % args.scatter_every == 0)
                    if save_scatter:
                        print("Plotting model and data scatter")
                        vis_phase_scatter_3d(uvw[:,0], uvw[:,1], vis_tbin, show=False,
                                html_savename=f"scatter_{k}.html", l=l_out, m=m_out)

                    k += 1
                    print("\n\n")