Example #1
0
def make_psf(vis_mxds, img_xds, grid_parms, vis_sel_parms, img_sel_parms):
    """
    Creates a cube or continuum point spread function (psf) image from the user specified uvw and imaging weight data. Only the prolate spheroidal convolutional gridding function is supported (this will change in a future releases.)
    
    Parameters
    ----------
    vis_mxds : xarray.core.dataset.Dataset
        Input multi-xarray Dataset with global data.
    img_xds : xarray.core.dataset.Dataset
        Input image dataset.
    grid_parms : dictionary
    grid_parms['image_size'] : list of int, length = 2
        The image size (no padding).
    grid_parms['cell_size']  : list of number, length = 2, units = arcseconds
        The image cell size.
    grid_parms['chan_mode'] : {'continuum'/'cube'}, default = 'continuum'
        Create a continuum or cube image.
    grid_parms['fft_padding'] : number, acceptable range [1,100], default = 1.2
        The factor that determines how much the gridded visibilities are padded before the fft is done.
    vis_sel_parms : dictionary
    vis_sel_parms['xds'] : str
        The xds within the mxds to use to calculate the imaging weights for.
    vis_sel_parms['data_group_in_id'] : int, default = first id in xds.data_groups
        The data group in the xds to use.
    img_sel_parms : dictionary
    img_sel_parms['data_group_in_id'] : int, default = first id in xds.data_groups
        The data group in the image xds to use.
    img_sel_parms['psf'] : str, default ='PSF'
        The created image name.
    img_sel_parms['psf_sum_weight'] : str, default ='PSF_SUM_WEIGHT'
        The created sum of weights name.
    Returns
    -------
    img_xds : xarray.core.dataset.Dataset
        The image_dataset will contain the image created and the sum of weights.
    """
    print('######################### Start make_psf #########################')
    import numpy as np
    from numba import jit
    import time
    import math
    import dask.array.fft as dafft
    import xarray as xr
    import dask.array as da
    import matplotlib.pylab as plt
    import dask
    import copy, os
    from numcodecs import Blosc
    from itertools import cycle
    
    from cngi._utils._check_parms import _check_sel_parms, _check_existence_sel_parms
    from ._imaging_utils._check_imaging_parms import _check_grid_parms
    from ._imaging_utils._gridding_convolutional_kernels import _create_prolate_spheroidal_kernel, _create_prolate_spheroidal_kernel_1D
    from ._imaging_utils._standard_grid import _graph_standard_grid
    from ._imaging_utils._remove_padding import _remove_padding
    from ._imaging_utils._aperture_grid import _graph_aperture_grid
    from cngi.image import make_empty_sky_image
    from cngi.image import fit_gaussian
    
    #print('****',sel_parms,'****')
    _mxds = vis_mxds.copy(deep=True)
    _img_xds = img_xds.copy(deep=True)
    _vis_sel_parms = copy.deepcopy(vis_sel_parms)
    _img_sel_parms = copy.deepcopy(img_sel_parms)
    _grid_parms = copy.deepcopy(grid_parms)

    ##############Parameter Checking and Set Defaults##############
    assert(_check_grid_parms(_grid_parms)), "######### ERROR: grid_parms checking failed"
    assert('xds' in _vis_sel_parms), "######### ERROR: xds must be specified in sel_parms" #Can't have a default since xds names are not fixed.
    _vis_xds = _mxds.attrs[_vis_sel_parms['xds']]
    
    #Check vis data_group
    _check_sel_parms(_vis_xds,_vis_sel_parms)
    
    #Check img data_group
    _check_sel_parms(_img_xds,_img_sel_parms,new_or_modified_data_variables={'sum_weight':'PSF_SUM_WEIGHT','psf':'PSF','psf_fit':'PSF_FIT'},append_to_in_id=True)

    ##################################################################################
    
    # Creating gridding kernel
    _grid_parms['oversampling'] = 100
    _grid_parms['support'] = 7
    
    cgk, correcting_cgk_image = _create_prolate_spheroidal_kernel(_grid_parms['oversampling'], _grid_parms['support'], _grid_parms['image_size_padded'])
    cgk_1D = _create_prolate_spheroidal_kernel_1D(_grid_parms['oversampling'], _grid_parms['support'])
    
    _grid_parms['complex_grid'] = False
    _grid_parms['do_psf'] = True
    _grid_parms['do_imaging_weight'] = False
    grids_and_sum_weights = _graph_standard_grid(_vis_xds, cgk_1D, _grid_parms, _vis_sel_parms)
    uncorrected_dirty_image = dafft.fftshift(dafft.ifft2(dafft.ifftshift(grids_and_sum_weights[0], axes=(0, 1)), axes=(0, 1)), axes=(0, 1))
    
    #Remove Padding
    correcting_cgk_image = _remove_padding(correcting_cgk_image,_grid_parms['image_size'])
    uncorrected_dirty_image = _remove_padding(uncorrected_dirty_image,_grid_parms['image_size']).real * (_grid_parms['image_size_padded'][0] * _grid_parms['image_size_padded'][1])
    
    #############Normalize#############
    def correct_image(uncorrected_dirty_image, sum_weights, correcting_cgk):
        sum_weights_copy = copy.deepcopy(sum_weights) ##Don't mutate inputs, therefore do deep copy (https://docs.dask.org/en/latest/delayed-best-practices.html).
        sum_weights_copy[sum_weights_copy == 0] = 1
        # corrected_image = (uncorrected_dirty_image/sum_weights[:,:,None,None])/correcting_cgk[None,None,:,:]
        corrected_image = (uncorrected_dirty_image / sum_weights_copy) / correcting_cgk
        return corrected_image

    corrected_dirty_image = da.map_blocks(correct_image, uncorrected_dirty_image, grids_and_sum_weights[1][None, None, :, :],correcting_cgk_image[:, :, None, None])
    ####################################################

    if _grid_parms['chan_mode'] == 'continuum':
        freq_coords = [da.mean(_vis_xds.coords['chan'].values)]
        chan_width = da.from_array([da.mean(_vis_xds['chan_width'].data)],chunks=(1,))
        imag_chan_chunk_size = 1
    elif _grid_parms['chan_mode'] == 'cube':
        freq_coords = _vis_xds.coords['chan'].values
        chan_width = _vis_xds['chan_width'].data
        imag_chan_chunk_size = _vis_xds.DATA.chunks[2][0]
    
    phase_center = _grid_parms['phase_center']
    image_size = _grid_parms['image_size']
    cell_size = _grid_parms['cell_size']
    phase_center = _grid_parms['phase_center']

    pol_coords = _vis_xds.pol.data
    time_coords = [_vis_xds.time.mean().data]
    
    _img_xds = make_empty_sky_image(_img_xds,phase_center,image_size,cell_size,freq_coords,chan_width,pol_coords,time_coords)
    
    
    
    _img_xds[_img_sel_parms['data_group_out']['sum_weight']] = xr.DataArray(grids_and_sum_weights[1][None,:,:], dims=['time','chan','pol'])
    _img_xds[_img_sel_parms['data_group_out']['psf']] = xr.DataArray(corrected_dirty_image[:,:,None,:,:], dims=['l', 'm', 'time', 'chan', 'pol'])
    _img_xds.attrs['data_groups'][0] = {**_img_xds.attrs['data_groups'][0],**{_img_sel_parms['data_group_out']['id']:_img_sel_parms['data_group_out']}}
    
    _img_xds = fit_gaussian(_img_xds,dv=_img_sel_parms['data_group_out']['psf'],beam_set_name=_img_sel_parms['data_group_out']['psf_fit'])

    
    print('######################### Created graph for make_psf #########################')
    return _img_xds
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    '''
def synthesis_imaging_cube(vis_mxds, img_xds, grid_parms,
                           imaging_weights_parms, pb_parms, vis_sel_parms,
                           img_sel_parms):
    print('v3')

    print(
        '######################### Start Synthesis Imaging Cube #########################'
    )
    import numpy as np
    from numba import jit
    import time
    import math
    import dask.array.fft as dafft
    import xarray as xr
    import dask.array as da
    import matplotlib.pylab as plt
    import dask
    import copy, os
    from numcodecs import Blosc
    from itertools import cycle
    import itertools
    from cngi._utils._check_parms import _check_sel_parms, _check_existence_sel_parms
    from ._imaging_utils._check_imaging_parms import _check_imaging_weights_parms, _check_grid_parms, _check_pb_parms
    from ._imaging_utils._make_pb_symmetric import _airy_disk, _casa_airy_disk
    from cngi.image import make_empty_sky_image

    _mxds = vis_mxds.copy(deep=True)
    _vis_sel_parms = copy.deepcopy(vis_sel_parms)
    _img_sel_parms = copy.deepcopy(img_sel_parms)
    _grid_parms = copy.deepcopy(grid_parms)
    _imaging_weights_parms = copy.deepcopy(imaging_weights_parms)
    _img_xds = copy.deepcopy(img_xds)
    _pb_parms = copy.deepcopy(pb_parms)

    assert (
        'xds' in _vis_sel_parms
    ), "######### ERROR: xds must be specified in sel_parms"  #Can't have a default since xds names are not fixed.
    _vis_xds = _mxds.attrs[_vis_sel_parms['xds']]
    assert _vis_xds.dims['pol'] <= 2, "Full polarization is not supported."
    assert (_check_imaging_weights_parms(_imaging_weights_parms)
            ), "######### ERROR: imaging_weights_parms checking failed"
    assert (_check_grid_parms(_grid_parms)
            ), "######### ERROR: grid_parms checking failed"
    assert (_check_pb_parms(_img_xds, _pb_parms)
            ), "######### ERROR: user_imaging_weights_parms checking failed"

    #Check vis data_group
    _check_sel_parms(_vis_xds, _vis_sel_parms)

    #Check img data_group
    _check_sel_parms(_img_xds,
                     _img_sel_parms,
                     new_or_modified_data_variables={
                         'image_sum_weight': 'IMAGE_SUM_WEIGHT',
                         'image': 'IMAGE',
                         'psf_sum_weight': 'PSF_SUM_WEIGHT',
                         'psf': 'PSF',
                         'pb': 'PB',
                         'restore_parms': 'RESTORE_PARMS'
                     },
                     append_to_in_id=True)

    parms = {
        'grid_parms': _grid_parms,
        'imaging_weights_parms': _imaging_weights_parms,
        'pb_parms': _pb_parms,
        'vis_sel_parms': _vis_sel_parms,
        'img_sel_parms': _img_sel_parms
    }

    chunk_sizes = list(
        _vis_xds[_vis_sel_parms["data_group_in"]["data"]].chunks)
    chunk_sizes[0] = (np.sum(chunk_sizes[2]), )
    chunk_sizes[1] = (np.sum(chunk_sizes[1]), )
    chunk_sizes[3] = (np.sum(chunk_sizes[3]), )
    n_pol = _vis_xds.dims['pol']

    #assert n_chunks_in_each_dim[3] == 1, "Chunking is not allowed on pol dim."
    n_chunks_in_each_dim = list(
        _vis_xds[_vis_sel_parms["data_group_in"]["data"]].data.numblocks)
    n_chunks_in_each_dim[0] = 1  #time
    n_chunks_in_each_dim[1] = 1  #baseline
    n_chunks_in_each_dim[3] = 1  #pol

    #Iter over time,baseline,chan
    iter_chunks_indx = itertools.product(np.arange(n_chunks_in_each_dim[0]),
                                         np.arange(n_chunks_in_each_dim[1]),
                                         np.arange(n_chunks_in_each_dim[2]),
                                         np.arange(n_chunks_in_each_dim[3]))

    image_list = _ndim_list(n_chunks_in_each_dim)
    image_sum_weight_list = _ndim_list(n_chunks_in_each_dim[2:])
    psf_list = _ndim_list(n_chunks_in_each_dim)
    psf_sum_weight_list = _ndim_list(n_chunks_in_each_dim[2:])

    pb_list = _ndim_list(tuple(n_chunks_in_each_dim) + (1, ))
    ellipse_parms_list = _ndim_list(tuple(n_chunks_in_each_dim[2:]) + (1, ))
    n_dish_type = len(_pb_parms['list_dish_diameters'])
    n_elps = 3

    freq_chan = da.from_array(
        _vis_xds.coords['chan'].values,
        chunks=(_vis_xds[_vis_sel_parms["data_group_in"]["data"]].chunks[2]))

    # Build graph
    for c_time, c_baseline, c_chan, c_pol in iter_chunks_indx:
        #c_time, c_baseline, c_chan, c_pol
        #print(_vis_xds[_vis_sel_parms["data_group_in"]["data"]].data.partitions[:, :, c_chan, :].shape)
        synthesis_chunk = dask.delayed(_synthesis_imaging_cube_std_chunk)(
            _vis_xds[_vis_sel_parms["data_group_in"]
                     ["data"]].data.partitions[:, :, c_chan, :],
            _vis_xds[_vis_sel_parms["data_group_in"]
                     ["uvw"]].data.partitions[:, :, :],
            _vis_xds[_vis_sel_parms["data_group_in"]
                     ["weight"]].data.partitions[:, :, c_chan, :],
            _vis_xds[_vis_sel_parms["data_group_in"]
                     ["flag"]].data.partitions[:, :, c_chan, :],
            freq_chan.partitions[c_chan], dask.delayed(parms))

        image_list[c_time][c_baseline][c_chan][c_pol] = da.from_delayed(
            synthesis_chunk[0],
            (_grid_parms['image_size'][0], _grid_parms['image_size'][1],
             chunk_sizes[2][c_chan], chunk_sizes[3][c_pol]),
            dtype=np.double)
        image_sum_weight_list[c_chan][c_pol] = da.from_delayed(
            synthesis_chunk[1],
            (chunk_sizes[2][c_chan], chunk_sizes[3][c_pol]),
            dtype=np.double)

        psf_list[c_time][c_baseline][c_chan][c_pol] = da.from_delayed(
            synthesis_chunk[2],
            (_grid_parms['image_size'][0], _grid_parms['image_size'][1],
             chunk_sizes[2][c_chan], chunk_sizes[3][c_pol]),
            dtype=np.double)
        psf_sum_weight_list[c_chan][c_pol] = da.from_delayed(
            synthesis_chunk[3],
            (chunk_sizes[2][c_chan], chunk_sizes[3][c_pol]),
            dtype=np.double)

        pb_list[c_time][c_baseline][c_chan][c_pol][0] = da.from_delayed(
            synthesis_chunk[4],
            (_grid_parms['image_size'][0], _grid_parms['image_size'][1],
             chunk_sizes[2][c_chan], chunk_sizes[3][c_pol], n_dish_type),
            dtype=np.double)

        ellipse_parms_list[c_chan][c_pol][0] = da.from_delayed(
            synthesis_chunk[5],
            (chunk_sizes[2][c_chan], chunk_sizes[3][c_pol], n_elps),
            dtype=np.double)

        #return image, image_sum_weight, psf, psf_sum_weight, pb

    if _grid_parms['chan_mode'] == 'continuum':
        freq_coords = [da.mean(_vis_xds.coords['chan'].values)]
        chan_width = da.from_array([da.mean(_vis_xds['chan_width'].data)],
                                   chunks=(1, ))
        imag_chan_chunk_size = 1
    elif _grid_parms['chan_mode'] == 'cube':
        freq_coords = _vis_xds.coords['chan'].values
        chan_width = _vis_xds['chan_width'].data
        imag_chan_chunk_size = _vis_xds.DATA.chunks[2][0]

    phase_center = _grid_parms['phase_center']
    image_size = _grid_parms['image_size']
    cell_size = _grid_parms['cell_size']
    phase_center = _grid_parms['phase_center']

    pol_coords = _vis_xds.pol.data
    time_coords = [_vis_xds.time.mean().data]

    _img_xds = make_empty_sky_image(_img_xds, phase_center, image_size,
                                    cell_size, freq_coords, chan_width,
                                    pol_coords, time_coords)

    #print(da.block(image_list))
    #print(da.block(psf_list))
    #print(pb_list)
    #print(da.block(pb_list))

    _img_xds[_img_sel_parms['data_group_out']['image']] = xr.DataArray(
        da.block(image_list)[:, :, None, :, :],
        dims=['l', 'm', 'time', 'chan', 'pol'])
    _img_xds[_img_sel_parms['data_group_out']
             ['image_sum_weight']] = xr.DataArray(
                 da.block(image_sum_weight_list)[None, :, :],
                 dims=['time', 'chan', 'pol'])

    print(da.block(ellipse_parms_list))

    _img_xds[_img_sel_parms['data_group_out']['restore_parms']] = xr.DataArray(
        da.block(ellipse_parms_list)[None, :, :, :],
        dims=['time', 'chan', 'pol', 'elps_index'])

    _img_xds[_img_sel_parms['data_group_out']['psf']] = xr.DataArray(
        da.block(psf_list)[:, :, None, :, :],
        dims=['l', 'm', 'time', 'chan', 'pol'])
    _img_xds[_img_sel_parms['data_group_out']
             ['psf_sum_weight']] = xr.DataArray(
                 da.block(psf_sum_weight_list)[None, :, :],
                 dims=['time', 'chan', 'pol'])

    _img_xds[_img_sel_parms['data_group_out']['pb']] = xr.DataArray(
        da.block(pb_list)[:, :, None, :, :, :],
        dims=['l', 'm', 'time', 'chan', 'pol', 'dish_type'])
    _img_xds = _img_xds.assign_coords(
        {'dish_type': np.arange(len(_pb_parms['list_dish_diameters']))})
    _img_xds.attrs['data_groups'][0] = {
        **_img_xds.attrs['data_groups'][0],
        **{
            _img_sel_parms['data_group_out']['id']:
            _img_sel_parms['data_group_out']
        }
    }

    return _img_xds
Example #3
0
def make_psf_with_gcf(mxds, gcf_dataset, img_dataset, grid_parms, norm_parms,
                      vis_sel_parms, img_sel_parms):
    """
    Creates a cube or continuum dirty image from the user specified visibility, uvw and imaging weight data. A gridding convolution function (gcf_dataset), primary beam image (img_dataset) and a primary beam weight image (img_dataset) must be supplied.
    
    Parameters
    ----------
    vis_dataset : xarray.core.dataset.Dataset
        Input visibility dataset.
    gcf_dataset : xarray.core.dataset.Dataset
         Input gridding convolution dataset.
    img_dataset : xarray.core.dataset.Dataset
         Input image dataset.
    grid_parms : dictionary
    grid_parms['image_size'] : list of int, length = 2
        The image size (no padding).
    grid_parms['cell_size']  : list of number, length = 2, units = arcseconds
        The image cell size.
    grid_parms['chan_mode'] : {'continuum'/'cube'}, default = 'continuum'
        Create a continuum or cube image.
    grid_parms['fft_padding'] : number, acceptable range [1,100], default = 1.2
        The factor that determines how much the gridded visibilities are padded before the fft is done.
    norm_parms : dictionary
    norm_parms['norm_type'] : {'none'/'flat_noise'/'flat_sky'}, default = 'flat_sky'
         Gridded (and FT'd) images represent the PB-weighted sky image.
         Qualitatively it can be approximated as two instances of the PB
         applied to the sky image (one naturally present in the data
         and one introduced during gridding via the convolution functions).
         normtype='flat_noise' : Divide the raw image by sqrt(sel_parms['weight_pb']) so that
                                             the input to the minor cycle represents the
                                             product of the sky and PB. The noise is 'flat'
                                             across the region covered by each PB.
        normtype='flat_sky' : Divide the raw image by sel_parms['weight_pb'] so that the input
                                         to the minor cycle represents only the sky.
                                         The noise is higher in the outer regions of the
                                         primary beam where the sensitivity is low.
        normtype='none' : No normalization after gridding and FFT.
    sel_parms : dictionary
    sel_parms['uvw'] : str, default ='UVW'
        The name of uvw data variable that will be used to grid the visibilities.
    sel_parms['data'] : str, default = 'DATA'
        The name of the visibility data to be gridded.
    sel_parms['imaging_weight'] : str, default ='IMAGING_WEIGHT'
        The name of the imaging weights to be used.
    sel_parms['image'] : str, default ='IMAGE'
        The created image name.
    sel_parms['sum_weight'] : str, default ='SUM_WEIGHT'
        The created sum of weights name.
    sel_parms['pb'] : str, default ='PB'
         The primary beam image to use for normalization.
    sel_parms['weight_pb'] : str, default ='WEIGHT_PB'
         The primary beam weight image to use for normalization.
    Returns
    -------
    image_dataset : xarray.core.dataset.Dataset
        The image_dataset will contain the image created and the sum of weights.
    """
    print(
        '######################### Start make_psf_with_gcf #########################'
    )
    import numpy as np
    from numba import jit
    import time
    import math
    import dask.array.fft as dafft
    import xarray as xr
    import dask.array as da
    import matplotlib.pylab as plt
    import dask
    import copy, os
    from numcodecs import Blosc
    from itertools import cycle

    from cngi._utils._check_parms import _check_sel_parms, _check_existence_sel_parms
    from ._imaging_utils._check_imaging_parms import _check_grid_parms, _check_norm_parms
    #from ._imaging_utils._gridding_convolutional_kernels import _create_prolate_spheroidal_kernel, _create_prolate_spheroidal_kernel_1D
    from ._imaging_utils._standard_grid import _graph_standard_grid
    from ._imaging_utils._remove_padding import _remove_padding
    from ._imaging_utils._aperture_grid import _graph_aperture_grid
    from ._imaging_utils._normalize import _normalize
    from cngi.image import make_empty_sky_image
    from cngi.image import fit_gaussian

    #Deep copy so that inputs are not modified
    _mxds = mxds.copy(deep=True)
    _img_dataset = img_dataset.copy(deep=True)
    _vis_sel_parms = copy.deepcopy(vis_sel_parms)
    _img_sel_parms = copy.deepcopy(img_sel_parms)
    _grid_parms = copy.deepcopy(grid_parms)
    _norm_parms = copy.deepcopy(norm_parms)

    ##############Parameter Checking and Set Defaults##############
    assert (
        'xds' in _vis_sel_parms
    ), "######### ERROR: xds must be specified in sel_parms"  #Can't have a default since xds names are not fixed.
    _vis_dataset = _mxds.attrs[_vis_sel_parms['xds']]

    assert (_check_grid_parms(_grid_parms)
            ), "######### ERROR: grid_parms checking failed"
    assert (_check_norm_parms(_norm_parms)
            ), "######### ERROR: norm_parms checking failed"

    #Check vis data_group
    _check_sel_parms(_vis_dataset, _vis_sel_parms)

    #Check img data_group

    _check_sel_parms(_img_dataset,
                     _img_sel_parms,
                     new_or_modified_data_variables={
                         'sum_weight': 'PSF_SUM_WEIGHT',
                         'psf': 'PSF',
                         'psf_fit': 'PSF_FIT'
                     },
                     required_data_variables={
                         'pb': 'PB',
                         'weight_pb': 'WEIGHT_PB'
                     },
                     append_to_in_id=False)
    #'pb':'PB','weight_pb':'WEIGHT_PB',
    #print('did this work',_img_sel_parms)

    _grid_parms['grid_weights'] = False
    _grid_parms['do_psf'] = True
    _grid_parms['oversampling'] = np.array(gcf_dataset.oversampling)

    grids_and_sum_weights = _graph_aperture_grid(_vis_dataset, gcf_dataset,
                                                 _grid_parms, _vis_sel_parms)
    uncorrected_dirty_image = dafft.fftshift(dafft.ifft2(dafft.ifftshift(
        grids_and_sum_weights[0], axes=(0, 1)),
                                                         axes=(0, 1)),
                                             axes=(0, 1))

    #Remove Padding
    #print('grid sizes',_grid_parms['image_size_padded'][0], _grid_parms['image_size_padded'][1])
    uncorrected_dirty_image = _remove_padding(
        uncorrected_dirty_image, _grid_parms['image_size']).real * (
            _grid_parms['image_size_padded'][0] *
            _grid_parms['image_size_padded'][1])

    #print(_img_sel_parms)
    normalized_image = _normalize(uncorrected_dirty_image,
                                  grids_and_sum_weights[1], img_dataset,
                                  gcf_dataset, 'forward', _norm_parms,
                                  _img_sel_parms)

    normalized_image = normalized_image / normalized_image[
        _grid_parms['image_center'][0], _grid_parms['image_center'][1], :, :]

    if _grid_parms['chan_mode'] == 'continuum':
        freq_coords = [da.mean(_vis_dataset.coords['chan'].values)]
        chan_width = da.from_array([da.mean(_vis_dataset['chan_width'].data)],
                                   chunks=(1, ))
        imag_chan_chunk_size = 1
    elif _grid_parms['chan_mode'] == 'cube':
        freq_coords = _vis_dataset.coords['chan'].values
        chan_width = _vis_dataset['chan_width'].data
        imag_chan_chunk_size = _vis_dataset.DATA.chunks[2][0]

    ###Create Image Dataset
    chunks = _vis_dataset.DATA.chunks
    n_imag_pol = chunks[3][0]

    #coords = {'d0': np.arange(_grid_parms['image_size'][0]), 'd1': np.arange(_grid_parms['image_size'][1]),
    #          'chan': freq_coords, 'pol': np.arange(n_imag_pol), 'chan_width' : ('chan',chan_width)}
    #img_dataset = img_dataset.assign_coords(coords)
    #img_dataset[_sel_parms['sum_weight']] = xr.DataArray(grids_and_sum_weights[1], dims=['chan','pol'])
    #img_dataset[_sel_parms['image']] = xr.DataArray(normalized_image, dims=['d0', 'd1', 'chan', 'pol'])

    phase_center = _grid_parms['phase_center']
    image_size = _grid_parms['image_size']
    cell_size = _grid_parms['cell_size']
    phase_center = _grid_parms['phase_center']

    pol_coords = _vis_dataset.pol.data
    time_coords = [_vis_dataset.time.mean().data]

    _img_dataset = make_empty_sky_image(_img_dataset, phase_center, image_size,
                                        cell_size, freq_coords, chan_width,
                                        pol_coords, time_coords)

    _img_dataset[_img_sel_parms['data_group_out']
                 ['sum_weight']] = xr.DataArray(
                     grids_and_sum_weights[1][None, :, :],
                     dims=['time', 'chan', 'pol'])
    _img_dataset[_img_sel_parms['data_group_out']['psf']] = xr.DataArray(
        normalized_image[:, :, None, :, :],
        dims=['l', 'm', 'time', 'chan', 'pol'])
    _img_dataset.attrs['data_groups'][0] = {
        **_img_dataset.attrs['data_groups'][0],
        **{
            _img_sel_parms['data_group_out']['id']:
            _img_sel_parms['data_group_out']
        }
    }

    #list_xarray_data_variables = [img_dataset[_sel_parms['image']],img_dataset[_sel_parms['sum_weight']]]
    #return _store(img_dataset,list_xarray_data_variables,_storage_parms)
    _img_dataset = fit_gaussian(
        _img_dataset,
        dv=_img_sel_parms['data_group_out']['psf'],
        beam_set_name=_img_sel_parms['data_group_out']['psf_fit'])

    print(
        '#########################  Created graph for make_psf_with_gcf #########################'
    )
    return _img_dataset
    '''
Example #4
0
def make_mosaic_pb(mxds, gcf_dataset, img_dataset, vis_sel_parms,
                   img_sel_parms, grid_parms):
    """
    The make_pb function currently supports rotationally symmetric airy disk primary beams. Primary beams can be generated for any number of dishes.
    The make_pb_parms['list_dish_diameters'] and make_pb_parms['list_blockage_diameters'] must be specified for each dish.
    
    Parameters
    ----------
    vis_dataset : xarray.core.dataset.Dataset
        Input visibility dataset.
    gcf_dataset : xarray.core.dataset.Dataset
        Input gridding convolution function dataset.
    img_dataset : xarray.core.dataset.Dataset
        Input image dataset. ()
    make_pb_parms : dictionary
    make_pb_parms['function'] : {'airy'}, default='airy'
        Only the airy disk function is currently supported.
    grid_parms['imsize'] : list of int, length = 2
        The image size (no padding).
    grid_parms['cell']  : list of number, length = 2, units = arcseconds
        The image cell size.
    make_pb_parms['list_dish_diameters'] : list of number
        The list of dish diameters.
    make_pb_parms['list_blockage_diameters'] = list of number
        The list of blockage diameters for each dish.
    vis_sel_parms : dictionary
    vis_sel_parms['xds'] : str
        The xds within the mxds to use to calculate the imaging weights for.
    vis_sel_parms['data_group_in_id'] : int, default = first id in xds.data_groups
        The data group in the xds to use.
    img_sel_parms : dictionary
    img_sel_parms['data_group_in_id'] : int, default = first id in xds.data_groups
        The data group in the image xds to use.
    img_sel_parms['pb'] : str, default ='PB'
        The mosaic primary beam.
    img_sel_parms['weight_pb'] : str, default ='WEIGHT_PB'
        The weight image.
    img_sel_parms['weight_pb_sum_weight'] : str, default ='WEIGHT_PB_SUM_WEIGHT'
        The sum of weight calculated when gridding the gcfs to create the weight image.
    Returns
    -------
    img_xds : xarray.core.dataset.Dataset
    """
    print(
        '######################### Start make_mosaic_pb #########################'
    )

    #from ngcasa._ngcasa_utils._store import _store
    #from ngcasa._ngcasa_utils._check_parms import _check_storage_parms, _check_sel_parms, _check_existence_sel_parms
    from cngi._utils._check_parms import _check_sel_parms, _check_existence_sel_parms
    from ._imaging_utils._check_imaging_parms import _check_grid_parms, _check_mosaic_pb_parms
    from ._imaging_utils._aperture_grid import _graph_aperture_grid
    import dask.array.fft as dafft
    import matplotlib.pylab as plt
    import numpy as np
    import dask.array as da
    import copy
    import xarray as xr
    from ._imaging_utils._remove_padding import _remove_padding
    from ._imaging_utils._normalize import _normalize
    from cngi.image import make_empty_sky_image
    import dask

    #Deep copy so that inputs are not modified
    _mxds = mxds.copy(deep=True)
    _img_dataset = img_dataset.copy(deep=True)
    _vis_sel_parms = copy.deepcopy(vis_sel_parms)
    _img_sel_parms = copy.deepcopy(img_sel_parms)
    _grid_parms = copy.deepcopy(grid_parms)

    ##############Parameter Checking and Set Defaults##############
    assert (
        'xds' in _vis_sel_parms
    ), "######### ERROR: xds must be specified in sel_parms"  #Can't have a default since xds names are not fixed.
    _vis_dataset = _mxds.attrs[_vis_sel_parms['xds']]

    assert (_check_grid_parms(_grid_parms)
            ), "######### ERROR: grid_parms checking failed"

    #Check vis data_group
    _check_sel_parms(_vis_dataset, _vis_sel_parms)
    #print(_vis_sel_parms)

    #Check img data_group
    _check_sel_parms(_img_dataset,
                     _img_sel_parms,
                     new_or_modified_data_variables={
                         'pb': 'PB',
                         'weight_pb': 'WEIGHT_PB',
                         'weight_pb_sum_weight': 'WEIGHT_PB_SUM_WEIGHT'
                     },
                     append_to_in_id=True)
    #print('did this work',_img_sel_parms)

    _grid_parms['grid_weights'] = True
    _grid_parms['do_psf'] = False
    #_grid_parms['image_size_padded'] = _grid_parms['image_size']
    _grid_parms['oversampling'] = np.array(gcf_dataset.attrs['oversampling'])
    grids_and_sum_weights = _graph_aperture_grid(_vis_dataset, gcf_dataset,
                                                 _grid_parms, _vis_sel_parms)

    #grids_and_sum_weights = _graph_aperture_grid(_vis_dataset,gcf_dataset,_grid_parms)
    weight_image = _remove_padding(
        dafft.fftshift(dafft.ifft2(dafft.ifftshift(grids_and_sum_weights[0],
                                                   axes=(0, 1)),
                                   axes=(0, 1)),
                       axes=(0, 1)), _grid_parms['image_size']).real * (
                           _grid_parms['image_size_padded'][0] *
                           _grid_parms['image_size_padded'][1])

    #############Move this to Normalizer#############
    def correct_image(weight_image, sum_weights):
        sum_weights_copy = copy.deepcopy(
            sum_weights
        )  ##Don't mutate inputs, therefore do deep copy (https://docs.dask.org/en/latest/delayed-best-practices.html).
        sum_weights_copy[sum_weights_copy == 0] = 1
        weight_image = (weight_image / sum_weights_copy[None, None, :, :])
        return weight_image

    weight_image = da.map_blocks(correct_image,
                                 weight_image,
                                 grids_and_sum_weights[1],
                                 dtype=np.double)
    mosaic_primary_beam = da.sqrt(np.abs(weight_image))

    if _grid_parms['chan_mode'] == 'continuum':
        freq_coords = [da.mean(_vis_dataset.coords['chan'].values)]
        chan_width = da.from_array([da.mean(_vis_dataset['chan_width'].data)],
                                   chunks=(1, ))
        imag_chan_chunk_size = 1
    elif _grid_parms['chan_mode'] == 'cube':
        freq_coords = _vis_dataset.coords['chan'].values
        chan_width = _vis_dataset['chan_width'].data
        imag_chan_chunk_size = _vis_dataset.DATA.chunks[2][0]

    phase_center = _grid_parms['phase_center']
    image_size = _grid_parms['image_size']
    cell_size = _grid_parms['cell_size']
    phase_center = _grid_parms['phase_center']

    pol_coords = _vis_dataset.pol.data
    time_coords = [_vis_dataset.time.mean().data]

    _img_dataset = make_empty_sky_image(_img_dataset, phase_center, image_size,
                                        cell_size, freq_coords, chan_width,
                                        pol_coords, time_coords)

    _img_dataset[_img_sel_parms['data_group_out']['pb']] = xr.DataArray(
        mosaic_primary_beam[:, :, None, :, :],
        dims=['l', 'm', 'time', 'chan', 'pol'])
    _img_dataset[_img_sel_parms['data_group_out']['weight_pb']] = xr.DataArray(
        weight_image[:, :, None, :, :], dims=['l', 'm', 'time', 'chan', 'pol'])
    _img_dataset[_img_sel_parms['data_group_out']
                 ['weight_pb_sum_weight']] = xr.DataArray(
                     grids_and_sum_weights[1][None, :, :],
                     dims=['time', 'chan', 'pol'])
    _img_dataset.attrs['data_groups'][0] = {
        **_img_dataset.attrs['data_groups'][0],
        **{
            _img_sel_parms['data_group_out']['id']:
            _img_sel_parms['data_group_out']
        }
    }

    #list_xarray_data_variables = [_img_dataset[_sel_parms['pb']],_img_dataset[_sel_parms['weight']]]
    #return _store(_img_dataset,list_xarray_data_variables,_storage_parms)

    print(
        '#########################  Created graph for make_mosaic_pb #########################'
    )
    return _img_dataset
    '''
Example #5
0
def make_gridding_convolution_function(mxds, gcf_parms, grid_parms, sel_parms):
    """
    Currently creates a gcf to correct for the primary beams of antennas and supports heterogenous arrays (antennas with different dish sizes).
    Only the airy disk and ALMA airy disk model is implemented.
    In the future support will be added for beam squint, pointing corrections, w projection, and including a prolate spheroidal term.
    
    Parameters
    ----------
    vis_dataset : xarray.core.dataset.Dataset
        Input visibility dataset.
    gcf_parms : dictionary
    gcf_parms['function'] : {'alma_airy'/'airy'}, default = 'alma_airy'
        The primary beam model used (a function of the dish diameter and blockage diameter).
    gcf_parms['list_dish_diameters']  : list of number, units = meter
        A list of unique antenna dish diameters.
    gcf_parms['list_blockage_diameters']  : list of number, units = meter
        A list of unique feed blockage diameters (must be the same length as gcf_parms['list_dish_diameters']).
    gcf_parms['unique_ant_indx']  : list of int
        A list that has indeces for the gcf_parms['list_dish_diameters'] and gcf_parms['list_blockage_diameters'] lists, for each antenna.
    gcf_parms['image_phase_center']  : list of number, length = 2, units = radians
        The mosaic image phase center.
    gcf_parms['a_chan_num_chunk']  : int, default = 3
        The number of chunks in the channel dimension of the gridding convolution function data variable.
    gcf_parms['oversampling']  : list of int, length = 2, default = [10,10]
        The oversampling of the gridding convolution function.
    gcf_parms['max_support']  : list of int, length = 2, default = [15,15]
        The maximum allowable support of the gridding convolution function.
    gcf_parms['support_cut_level']  : number, default = 0.025
        The antennuation at which to truncate the gridding convolution function.
    gcf_parms['chan_tolerance_factor']  : number, default = 0.005
        It is the fractional bandwidth at which the frequency dependence of the primary beam can be ignored and determines the number of frequencies for which to calculate a gridding convolution function. Number of channels equals the fractional bandwidth devided by gcf_parms['chan_tolerance_factor'].
    grid_parms : dictionary
    grid_parms['image_size'] : list of int, length = 2
        The image size (no padding).
    grid_parms['cell_size']  : list of number, length = 2, units = arcseconds
        The image cell size.
    Returns
    -------
    gcf_dataset : xarray.core.dataset.Dataset
            
    """
    print(
        '######################### Start make_gridding_convolution_function #########################'
    )

    from ._imaging_utils._check_imaging_parms import _check_pb_parms
    from cngi._utils._check_parms import _check_sel_parms, _check_existence_sel_parms
    from ._imaging_utils._check_imaging_parms import _check_grid_parms, _check_gcf_parms
    from ._imaging_utils._gridding_convolutional_kernels import _create_prolate_spheroidal_kernel_2D, _create_prolate_spheroidal_image_2D
    from ._imaging_utils._remove_padding import _remove_padding
    import numpy as np
    import dask.array as da
    import copy, os
    import xarray as xr
    import itertools
    import dask
    import dask.array.fft as dafft
    import time

    import matplotlib.pylab as plt

    #Deep copy so that inputs are not modified
    _mxds = mxds.copy(deep=True)
    _gcf_parms = copy.deepcopy(gcf_parms)
    _grid_parms = copy.deepcopy(grid_parms)
    _sel_parms = copy.deepcopy(sel_parms)

    ##############Parameter Checking and Set Defaults##############
    assert (
        'xds' in _sel_parms
    ), "######### ERROR: xds must be specified in sel_parms"  #Can't have a default since xds names are not fixed.
    _vis_dataset = _mxds.attrs[sel_parms['xds']]

    assert (
        'xds' in _sel_parms
    ), "######### ERROR: xds must be specified in sel_parms"  #Can't have a default since xds names are not fixed.
    _vis_dataset = _mxds.attrs[sel_parms['xds']]

    _check_sel_parms(_vis_dataset, _sel_parms)

    #_gcf_parms['basline_ant'] = np.unique([_vis_dataset.ANTENNA1.max(axis=0), _vis_dataset.ANTENNA2.max(axis=0)], axis=0).T
    _gcf_parms['basline_ant'] = np.array(
        [_vis_dataset.ANTENNA1.values, _vis_dataset.ANTENNA2.values]).T

    _gcf_parms['freq_chan'] = _vis_dataset.chan.values
    _gcf_parms['pol'] = _vis_dataset.pol.values
    _gcf_parms['vis_data_chunks'] = _vis_dataset.DATA.chunks

    _gcf_parms['field_phase_dir'] = mxds.FIELD.PHASE_DIR[:,
                                                         0, :].data.compute()
    field_id = mxds.FIELD.field_id.data  #.compute()

    #print(_gcf_parms['field_phase_dir'])
    #_gcf_parms['field_phase_dir'] = np.array(global_dataset.FIELD_PHASE_DIR.values[:,:,vis_dataset.attrs['ddi']])

    assert (_check_gcf_parms(_gcf_parms)
            ), "######### ERROR: gcf_parms checking failed"
    assert (_check_grid_parms(_grid_parms)
            ), "######### ERROR: grid_parms checking failed"

    if _gcf_parms['function'] == 'airy':
        from ._imaging_utils._make_pb_symmetric import _airy_disk_rorder
        pb_func = _airy_disk_rorder
    elif _gcf_parms['function'] == 'alma_airy':
        from ._imaging_utils._make_pb_symmetric import _alma_airy_disk_rorder
        pb_func = _alma_airy_disk_rorder
    else:
        assert (
            False
        ), "######### ERROR: Only airy and alma_airy function has been implemented"

    #For now only a_term works
    _gcf_parms['a_term'] = True
    _gcf_parms['ps_term'] = False

    _gcf_parms['resize_conv_size'] = (_gcf_parms['max_support'] +
                                      1) * _gcf_parms['oversampling']
    #resize_conv_size = _gcf_parms['resize_conv_size']

    if _gcf_parms['ps_term'] == True:
        '''
        ps_term = _create_prolate_spheroidal_kernel_2D(_gcf_parms['oversampling'],np.array([7,7])) #This is only used with a_term == False. Support is hardcoded to 7 until old ps code is replaced by a general function.
        center = _grid_parms['image_center']
        center_embed = np.array(ps_term.shape)//2
        ps_term_padded = np.zeros(_grid_parms['image_size'])
        ps_term_padded[center[0]-center_embed[0]:center[0]+center_embed[0],center[1]-center_embed[1] : center[1]+center_embed[1]] = ps_term
        ps_term_padded_ifft = dafft.fftshift(dafft.ifft2(dafft.ifftshift(da.from_array(ps_term_padded))))

        ps_image = da.from_array(_remove_padding(_create_prolate_spheroidal_image_2D(_grid_parms['image_size_padded']),_grid_parms['image_size']),chunks=_grid_parms['image_size'])

        #Effecively no mapping needed if ps_term == True and a_term == False
        cf_baseline_map = np.zeros((len(_gcf_parms['basline_ant']),),dtype=int)
        cf_chan_map = np.zeros((len(_gcf_parms['freq_chan']),),dtype=int)
        cf_pol_map = np.zeros((len(_gcf_parms['pol']),),dtype=int)
        '''

    if _gcf_parms['a_term'] == True:
        n_unique_ant = len(_gcf_parms['list_dish_diameters'])

        cf_baseline_map, pb_ant_pairs = create_cf_baseline_map(
            _gcf_parms['unique_ant_indx'], _gcf_parms['basline_ant'],
            n_unique_ant)

        cf_chan_map, pb_freq = create_cf_chan_map(
            _gcf_parms['freq_chan'], _gcf_parms['chan_tolerance_factor'])
        #print('****',pb_freq)
        pb_freq = da.from_array(
            pb_freq,
            chunks=np.ceil(len(pb_freq) / _gcf_parms['a_chan_num_chunk']))

        cf_pol_map = np.zeros(
            (len(_gcf_parms['pol']), ), dtype=int
        )  #create_cf_pol_map(), currently treating all pols the same
        pb_pol = da.from_array(np.array([0]), 1)

        n_chunks_in_each_dim = [pb_freq.numblocks[0], pb_pol.numblocks[0]]
        iter_chunks_indx = itertools.product(
            np.arange(n_chunks_in_each_dim[0]),
            np.arange(n_chunks_in_each_dim[1]))
        chan_chunk_sizes = pb_freq.chunks
        pol_chunk_sizes = pb_pol.chunks

        #print(pb_freq, pb_pol,pol_chunk_sizes)
        list_baseline_pb = []
        list_weight_baseline_pb_sqrd = []
        for c_chan, c_pol in iter_chunks_indx:
            #print('chan, pol ',c_chan,c_pol)
            _gcf_parms['ipower'] = 1
            delayed_baseline_pb = dask.delayed(make_baseline_patterns)(
                pb_freq.partitions[c_chan], pb_pol.partitions[c_pol],
                dask.delayed(pb_ant_pairs), dask.delayed(pb_func),
                dask.delayed(_gcf_parms), dask.delayed(_grid_parms))

            list_baseline_pb.append(
                da.from_delayed(
                    delayed_baseline_pb,
                    (len(pb_ant_pairs), chan_chunk_sizes[0][c_chan],
                     pol_chunk_sizes[0][c_pol],
                     _grid_parms['image_size_padded'][0],
                     _grid_parms['image_size_padded'][1]),
                    dtype=np.double))

            _gcf_parms['ipower'] = 2
            delayed_weight_baseline_pb_sqrd = dask.delayed(
                make_baseline_patterns)(pb_freq.partitions[c_chan],
                                        pb_pol.partitions[c_pol],
                                        dask.delayed(pb_ant_pairs),
                                        dask.delayed(pb_func),
                                        dask.delayed(_gcf_parms),
                                        dask.delayed(_grid_parms))

            list_weight_baseline_pb_sqrd.append(
                da.from_delayed(
                    delayed_weight_baseline_pb_sqrd,
                    (len(pb_ant_pairs), chan_chunk_sizes[0][c_chan],
                     pol_chunk_sizes[0][c_pol],
                     _grid_parms['image_size_padded'][0],
                     _grid_parms['image_size_padded'][1]),
                    dtype=np.double))

        baseline_pb = da.concatenate(list_baseline_pb, axis=1)
        weight_baseline_pb_sqrd = da.concatenate(list_weight_baseline_pb_sqrd,
                                                 axis=1)

#    x = baseline_pb.compute()
#    print("&*&*&*&",x.shape)
#    plt.figure()
#    plt.imshow(x[0,0,0,240:260,240:260])
#    plt.show()

#Combine patterns and fft to obtain the gridding convolutional kernel
#print(weight_baseline_pb_sqrd)

    dataset_dict = {}
    list_xarray_data_variables = []
    if (_gcf_parms['a_term'] == True) and (_gcf_parms['ps_term'] == True):
        conv_kernel = da.real(
            dafft.fftshift(dafft.fft2(dafft.ifftshift(ps_term_padded_ifft *
                                                      baseline_pb,
                                                      axes=(3, 4)),
                                      axes=(3, 4)),
                           axes=(3, 4)))
        conv_weight_kernel = da.real(
            dafft.fftshift(dafft.fft2(dafft.ifftshift(weight_baseline_pb_sqrd,
                                                      axes=(3, 4)),
                                      axes=(3, 4)),
                           axes=(3, 4)))

        list_conv_kernel = []
        list_weight_conv_kernel = []
        list_conv_support = []
        iter_chunks_indx = itertools.product(
            np.arange(n_chunks_in_each_dim[0]),
            np.arange(n_chunks_in_each_dim[1]))
        for c_chan, c_pol in iter_chunks_indx:
            delayed_kernels_and_support = dask.delayed(
                resize_and_calc_support)(
                    conv_kernel.partitions[:, c_chan, c_pol, :, :],
                    conv_weight_kernel.partitions[:, c_chan, c_pol, :, :],
                    dask.delayed(_gcf_parms), dask.delayed(_grid_parms))
            list_conv_kernel.append(
                da.from_delayed(
                    delayed_kernels_and_support[0],
                    (len(pb_ant_pairs), chan_chunk_sizes[0][c_chan],
                     pol_chunk_sizes[0][c_pol],
                     _gcf_parms['resize_conv_size'][0],
                     _gcf_parms['resize_conv_size'][1]),
                    dtype=np.double))
            list_weight_conv_kernel.append(
                da.from_delayed(
                    delayed_kernels_and_support[1],
                    (len(pb_ant_pairs), chan_chunk_sizes[0][c_chan],
                     pol_chunk_sizes[0][c_pol],
                     _gcf_parms['resize_conv_size'][0],
                     _gcf_parms['resize_conv_size'][1]),
                    dtype=np.double))
            list_conv_support.append(
                da.from_delayed(
                    delayed_kernels_and_support[2],
                    (len(pb_ant_pairs), chan_chunk_sizes[0][c_chan],
                     pol_chunk_sizes[0][c_pol], 2),
                    dtype=np.int))

        conv_kernel = da.concatenate(list_conv_kernel, axis=1)
        weight_conv_kernel = da.concatenate(list_weight_conv_kernel, axis=1)
        conv_support = da.concatenate(list_conv_support, axis=1)

        dataset_dict['SUPPORT'] = xr.DataArray(
            conv_support,
            dims=['conv_baseline', 'conv_chan', 'conv_pol', 'xy'])
        dataset_dict['PS_CORR_IMAGE'] = xr.DataArray(ps_image, dims=['l', 'm'])
        dataset_dict['WEIGHT_CONV_KERNEL'] = xr.DataArray(
            weight_conv_kernel,
            dims=['conv_baseline', 'conv_chan', 'conv_pol', 'u', 'v'])
    elif (_gcf_parms['a_term'] == False) and (_gcf_parms['ps_term'] == True):
        support = np.array([7, 7])
        dataset_dict['SUPPORT'] = xr.DataArray(
            support[None, None, None, :],
            dims=['conv_baseline', 'conv_chan', 'conv_pol', 'xy'])
        conv_kernel = np.zeros((1, 1, 1, _gcf_parms['resize_conv_size'][0],
                                _gcf_parms['resize_conv_size'][1]))
        center = _gcf_parms['resize_conv_size'] // 2
        center_embed = np.array(ps_term.shape) // 2
        conv_kernel[0, 0, 0,
                    center[0] - center_embed[0]:center[0] + center_embed[0],
                    center[1] - center_embed[1]:center[1] +
                    center_embed[1]] = ps_term
        dataset_dict['PS_CORR_IMAGE'] = xr.DataArray(ps_image, dims=['l', 'm'])
        ##Enabled for test
        #dataset_dict['WEIGHT_CONV_KERNEL'] = xr.DataArray(conv_kernel, dims=['conv_baseline','conv_chan','conv_pol','u','v'])
    elif (_gcf_parms['a_term'] == True) and (_gcf_parms['ps_term'] == False):
        conv_kernel = da.real(
            dafft.fftshift(dafft.fft2(dafft.ifftshift(baseline_pb,
                                                      axes=(3, 4)),
                                      axes=(3, 4)),
                           axes=(3, 4)))
        conv_weight_kernel = da.real(
            dafft.fftshift(dafft.fft2(dafft.ifftshift(weight_baseline_pb_sqrd,
                                                      axes=(3, 4)),
                                      axes=(3, 4)),
                           axes=(3, 4)))

        #        x = conv_weight_kernel.compute()
        #        print("&*&*&*&",x.shape)
        #        plt.figure()
        #        #plt.imshow(x[0,0,0,240:260,240:260])
        #        plt.imshow(x[0,0,0,:,:])
        #        plt.show()

        list_conv_kernel = []
        list_weight_conv_kernel = []
        list_conv_support = []
        iter_chunks_indx = itertools.product(
            np.arange(n_chunks_in_each_dim[0]),
            np.arange(n_chunks_in_each_dim[1]))
        for c_chan, c_pol in iter_chunks_indx:
            delayed_kernels_and_support = dask.delayed(
                resize_and_calc_support)(
                    conv_kernel.partitions[:, c_chan, c_pol, :, :],
                    conv_weight_kernel.partitions[:, c_chan, c_pol, :, :],
                    dask.delayed(_gcf_parms), dask.delayed(_grid_parms))
            list_conv_kernel.append(
                da.from_delayed(
                    delayed_kernels_and_support[0],
                    (len(pb_ant_pairs), chan_chunk_sizes[0][c_chan],
                     pol_chunk_sizes[0][c_pol],
                     _gcf_parms['resize_conv_size'][0],
                     _gcf_parms['resize_conv_size'][1]),
                    dtype=np.double))
            list_weight_conv_kernel.append(
                da.from_delayed(
                    delayed_kernels_and_support[1],
                    (len(pb_ant_pairs), chan_chunk_sizes[0][c_chan],
                     pol_chunk_sizes[0][c_pol],
                     _gcf_parms['resize_conv_size'][0],
                     _gcf_parms['resize_conv_size'][1]),
                    dtype=np.double))
            list_conv_support.append(
                da.from_delayed(
                    delayed_kernels_and_support[2],
                    (len(pb_ant_pairs), chan_chunk_sizes[0][c_chan],
                     pol_chunk_sizes[0][c_pol], 2),
                    dtype=np.int))

        conv_kernel = da.concatenate(list_conv_kernel, axis=1)
        weight_conv_kernel = da.concatenate(list_weight_conv_kernel, axis=1)
        conv_support = da.concatenate(list_conv_support, axis=1)

        #        x = weight_conv_kernel.compute()
        #        print("&*&*&*&",x.shape)
        #        plt.figure()
        #        #plt.imshow(x[0,0,0,240:260,240:260])
        #        plt.imshow(x[0,0,0,:,:])
        #        plt.show()

        dataset_dict['SUPPORT'] = xr.DataArray(
            conv_support,
            dims=['conv_baseline', 'conv_chan', 'conv_pol', 'xy'])
        dataset_dict['WEIGHT_CONV_KERNEL'] = xr.DataArray(
            weight_conv_kernel,
            dims=['conv_baseline', 'conv_chan', 'conv_pol', 'u', 'v'])
        dataset_dict['PS_CORR_IMAGE'] = xr.DataArray(da.from_array(
            np.ones(_grid_parms['image_size']),
            chunks=_grid_parms['image_size']),
                                                     dims=['l', 'm'])
    else:
        assert (
            False
        ), "######### ERROR: At least 'a_term' or 'ps_term' must be true."

    ###########################################################
    #Make phase gradient (one for each field)
    field_phase_dir = _gcf_parms['field_phase_dir']
    field_phase_dir = da.from_array(
        field_phase_dir,
        chunks=(np.ceil(len(field_phase_dir) / _gcf_parms['a_chan_num_chunk']),
                2))

    phase_gradient = da.blockwise(make_phase_gradient,
                                  ("n_field", "n_x", "n_y"),
                                  field_phase_dir, ("n_field", "2"),
                                  gcf_parms=_gcf_parms,
                                  grid_parms=_grid_parms,
                                  dtype=complex,
                                  new_axes={
                                      "n_x": _gcf_parms['resize_conv_size'][0],
                                      "n_y": _gcf_parms['resize_conv_size'][1]
                                  })

    ###########################################################

    #coords = {'baseline': np.arange(n_unique_ant), 'chan': pb_freq, 'pol' : pb_pol, 'u': np.arange(resize_conv_size[0]), 'v': np.arange(resize_conv_size[1]), 'xy':np.arange(2), 'field':np.arange(field_phase_dir.shape[0]),'l':np.arange(_gridding_convolution_parms['imsize'][0]),'m':np.arange(_gridding_convolution_parms['imsize'][1])}

    #coords = { 'conv_chan': pb_freq, 'conv_pol' : pb_pol, 'u': np.arange(resize_conv_size[0]), 'v': np.arange(resize_conv_size[1]), 'xy':np.arange(2), 'field':np.arange(field_phase_dir.shape[0]),'l':np.arange(_gridding_convolution_parms['imsize'][0]),'m':np.arange(_gridding_convolution_parms['imsize'][1])}

    coords = {
        'u': np.arange(_gcf_parms['resize_conv_size'][0]),
        'v': np.arange(_gcf_parms['resize_conv_size'][1]),
        'xy': np.arange(2),
        'field_id': field_id,
        'l': np.arange(_grid_parms['image_size'][0]),
        'm': np.arange(_grid_parms['image_size'][1])
    }

    dataset_dict['CF_BASELINE_MAP'] = xr.DataArray(
        cf_baseline_map,
        dims=('baseline')).chunk(_gcf_parms['vis_data_chunks'][1])
    dataset_dict['CF_CHAN_MAP'] = xr.DataArray(
        cf_chan_map, dims=('chan')).chunk(_gcf_parms['vis_data_chunks'][2])
    dataset_dict['CF_POL_MAP'] = xr.DataArray(cf_pol_map, dims=('pol')).chunk(
        _gcf_parms['vis_data_chunks'][3])

    dataset_dict['CONV_KERNEL'] = xr.DataArray(conv_kernel,
                                               dims=('conv_baseline',
                                                     'conv_chan', 'conv_pol',
                                                     'u', 'v'))
    dataset_dict['PHASE_GRADIENT'] = xr.DataArray(phase_gradient,
                                                  dims=('field_id', 'u', 'v'))

    #print(field_id)
    gcf_dataset = xr.Dataset(dataset_dict, coords=coords)
    gcf_dataset.attrs['cell_uv'] = 1 / (_grid_parms['image_size_padded'] *
                                        _grid_parms['cell_size'] *
                                        _gcf_parms['oversampling'])
    gcf_dataset.attrs['oversampling'] = _gcf_parms['oversampling']

    #list_xarray_data_variables = [gcf_dataset['A_TERM'],gcf_dataset['WEIGHT_A_TERM'],gcf_dataset['A_SUPPORT'],gcf_dataset['WEIGHT_A_SUPPORT'],gcf_dataset['PHASE_GRADIENT']]
    #return _store(gcf_dataset,list_xarray_data_variables,_storage_parms)

    print(
        '#########################  Created graph for make_gridding_convolution_function #########################'
    )

    return gcf_dataset
Example #6
0
def self_cal(vis_mxds, solve_parms, sel_parms):
    """
    .. todo::
        This function is not yet implemented
    
    Calculate antenna gain solutions according to the parameters in solpars.
    The input dataset has been pre-averaged/processed and the model visibilities exist
    
    Iteratively solve the system of equations g_i g_j* = V_data_ij/V_model_ij  for all ij.
    Construct a separate solution for each timestep and channel in the input dataset.
    
    Options :
    
    amp, phase or both
    solution type (?) G-term, D-term, etc...
    Data array for which to calculate solutions. Default='DATA'
    
    TBD :
    
    Single method with options for solutions of different types ?
    Or, separate methods for G/B, D, P etc.. : solve_B, solve_D, solve_B, etc...
          
    Returns
    -------
    
    """

    print('######################### Start self_cal #########################')
    import numpy as np
    from numba import jit
    import time
    import math
    import dask.array.fft as dafft
    import xarray as xr
    import dask.array as da
    import matplotlib.pylab as plt
    import dask
    import copy, os
    from numcodecs import Blosc
    from itertools import cycle
    import itertools
    from ._calibration_utils._check_calibration_parms import _check_self_cal
    from cngi._utils._check_parms import _check_sel_parms, _check_existence_sel_parms

    _mxds = vis_mxds.copy(deep=True)
    _sel_parms = copy.deepcopy(sel_parms)
    _solve_parms = copy.deepcopy(solve_parms)
    
    assert('xds' in _sel_parms), "######### ERROR: xds must be specified in sel_parms" #Can't have a default since xds names are not fixed.
    _vis_xds = _mxds.attrs[_sel_parms['xds']]
    
    assert(_check_self_cal(_solve_parms)), "######### ERROR: solve_parms checking failed"

    _check_sel_parms(_vis_xds,_sel_parms,new_or_modified_data_variables={'corrected_data':'CORRECTED_DATA','corrected_data_weight':'CORRECTED_DATA_WEIGHT','corrected_flag':'CORRECTED_FLAG','flag_info':'FLAG_INFO'})
    
    # data_groups with model_data is not handled correctly by converter.
    _sel_parms["data_group_in"]["model_data"] = "MODEL_DATA"
    _sel_parms["data_group_out"]["model_data"] = "MODEL_DATA"

    chunk_sizes = list(_vis_xds[_sel_parms["data_group_in"]["data"]].chunks)
    chunk_sizes[1] = (np.sum(chunk_sizes[1]),)
    chunk_sizes[2] = (np.sum(chunk_sizes[2]),)
    chunk_sizes[3] = (np.sum(chunk_sizes[3]),)
    n_pol = _vis_xds.dims['pol']
 
    #assert n_chunks_in_each_dim[3] == 1, "Chunking is not allowed on pol dim."
    n_chunks_in_each_dim = list(_vis_xds[_sel_parms["data_group_in"]["data"]].data.numblocks)
    n_chunks_in_each_dim[1] = 1 #baseline
    n_chunks_in_each_dim[2] = 1 #chan
    n_chunks_in_each_dim[3] = 1 #pol

    #Iter over time,baseline,chan
    iter_chunks_indx = itertools.product(np.arange(n_chunks_in_each_dim[0]), np.arange(n_chunks_in_each_dim[1]),
                                         np.arange(n_chunks_in_each_dim[2]), np.arange(n_chunks_in_each_dim[3]))
                                         

    vis_corrected_list = _ndim_list(n_chunks_in_each_dim)
    weight_corrected_list = _ndim_list(n_chunks_in_each_dim)
    flag_corrected_list = _ndim_list(n_chunks_in_each_dim)
    finfo_list = _ndim_list(n_chunks_in_each_dim)
  
    # Build graph
    for c_time, c_baseline, c_chan, c_pol in iter_chunks_indx:
        #c_time, c_baseline, c_chan, c_pol
        cal_solution_chunk = dask.delayed(_gain_selfcal_chunk)(
            _vis_xds[_sel_parms["data_group_in"]["data"]].data.partitions[c_time, :, :, :],
            _vis_xds[_sel_parms["data_group_in"]["model_data"]].data.partitions[c_time, :, :, :],
            _vis_xds[_sel_parms["data_group_in"]["weight"]].data.partitions[c_time, :, :, :],
            _vis_xds[_sel_parms["data_group_in"]["flag"]].data.partitions[c_time,  :, :, :],
            _vis_xds["ANTENNA1"].data.partitions[:],
            _vis_xds["ANTENNA2"].data.partitions[:],
            dask.delayed(_solve_parms))
            
        
        #print(cal_solution_chunk)
        vis_corrected_list[c_time][c_baseline][c_chan][c_pol] = da.from_delayed(cal_solution_chunk[0],(chunk_sizes[0][c_time],chunk_sizes[1][c_baseline],chunk_sizes[2][c_chan],chunk_sizes[3][c_pol]),dtype=np.complex)
        
        weight_corrected_list[c_time][c_baseline][c_chan][c_pol] = da.from_delayed(cal_solution_chunk[1],(chunk_sizes[0][c_time],chunk_sizes[1][c_baseline],chunk_sizes[2][c_chan],chunk_sizes[3][c_pol]),dtype=np.complex)
        
        flag_corrected_list[c_time][c_baseline][c_chan][c_pol] = da.from_delayed(cal_solution_chunk[2],(chunk_sizes[0][c_time],chunk_sizes[1][c_baseline],chunk_sizes[2][c_chan],chunk_sizes[3][c_pol]),dtype=np.complex)
        
        finfo_list[c_time][c_baseline][c_chan][c_pol] = da.from_delayed(cal_solution_chunk[3],(3,),dtype=np.complex)
        
    _vis_xds[_sel_parms['data_group_out']['corrected_data']] = xr.DataArray(da.block(vis_corrected_list),dims=_vis_xds[_sel_parms['data_group_in']['data']].dims).chunk(_vis_xds[_sel_parms["data_group_in"]["data"]].chunks)
    _vis_xds[_sel_parms['data_group_out']['corrected_data_weight']] = xr.DataArray(da.block(weight_corrected_list),dims=_vis_xds[_sel_parms['data_group_in']['data']].dims).chunk(_vis_xds[_sel_parms["data_group_in"]["data"]].chunks)
    _vis_xds[_sel_parms['data_group_out']['corrected_flag']] = xr.DataArray(da.block(flag_corrected_list),dims=_vis_xds[_sel_parms['data_group_in']['weight']].dims).chunk(_vis_xds[_sel_parms["data_group_in"]["data"]].chunks)
    
    #Will this be added to the data group model?
    _vis_xds[_sel_parms['data_group_out']['flag_info']] = xr.DataArray(np.sum(da.block(finfo_list),axis=(0,1,2)),dims={'flag_info':3})
    #_vis_xds.attrs['flag_info'] = xr.DataArray(np.sum(da.block(finfo_list),axis=(0,1,2)),dims={'flag_info':3})

    
    #Update data_group
    _vis_xds.attrs['data_groups'][0] = {**_vis_xds.attrs['data_groups'][0], **{_sel_parms['data_group_out']['id']:_sel_parms['data_group_out']}}
    
    print('######################### Created self_cal graph #########################')
    return _mxds
Example #7
0
def make_pb(img_xds, pb_parms, grid_parms, sel_parms):
    """
    The make_pb function currently supports rotationally symmetric airy disk primary beams. Primary beams can be generated for any number of dishes.
    The make_pb_parms['list_dish_diameters'] and make_pb_parms['list_blockage_diameters'] must be specified for each dish.
    
    Parameters
    ----------
    img_xds : xarray.core.dataset.Dataset
        Input image dataset.
    pb_parms : dictionary
    pb_parms['list_dish_diameters'] : list of number
        The list of dish diameters.
    pb_parms['list_blockage_diameters'] = list of number
        The list of blockage diameters for each dish.
    pb_parms['function'] : {'alma_airy','airy'}, default='alma_airy'
        Only the airy disk function is currently supported.
    grid_parms : dictionary
    grid_parms['image_size'] : list of int, length = 2
        The image size (no padding).
    grid_parms['cell_size']  : list of number, length = 2, units = arcseconds
        The image cell size.
    grid_parms['chan_mode'] : {'continuum'/'cube'}, default = 'continuum'
        Create a continuum or cube image.
    grid_parms['fft_padding'] : number, acceptable range [1,100], default = 1.2
        The factor that determines how much the gridded visibilities are padded before the fft is done.
    sel_parms : dictionary
    sel_parms['pb'] = 'PB'
        The created PB name.
    Returns
    -------
    img_xds : xarray.core.dataset.Dataset
    """
    from cngi._utils._check_parms import _check_sel_parms, _check_existence_sel_parms
    from ._imaging_utils._check_imaging_parms import _check_pb_parms, _check_grid_parms
    import numpy as np
    import dask.array as da
    import copy, os
    import xarray as xr
    import matplotlib.pylab as plt

    print('######################### Start make_pb #########################')

    _img_xds = img_xds.copy(deep=True)
    _grid_parms = copy.deepcopy(grid_parms)
    _pb_parms = copy.deepcopy(pb_parms)
    _sel_parms = copy.deepcopy(sel_parms)

    #Check img data_group
    _check_sel_parms(_img_xds,
                     _sel_parms,
                     new_or_modified_data_variables={'pb': 'PB'},
                     append_to_in_id=True)

    assert (_check_pb_parms(_img_xds, _pb_parms)
            ), "######### ERROR: user_imaging_weights_parms checking failed"
    assert (_check_grid_parms(_grid_parms)
            ), "######### ERROR: grid_parms checking failed"

    #parameter check
    #cube continuum check

    if _pb_parms['function'] == 'airy':
        from ._imaging_utils._make_pb_symmetric import _airy_disk
        pb_func = _airy_disk
    elif _pb_parms['function'] == 'alma_airy':
        from ._imaging_utils._make_pb_symmetric import _alma_airy_disk
        pb_func = _alma_airy_disk
    else:
        print('Only the airy function has been implemented')

    _pb_parms['ipower'] = 2
    _pb_parms['center_indx'] = []

    chan_chunk_size = _img_xds.chan_width.chunks[0]
    freq_coords = da.from_array(_img_xds.coords['chan'].values,
                                chunks=(chan_chunk_size))

    pol = _img_xds.pol.values  #don't want chunking here

    chunksize = (_grid_parms['image_size'][0], _grid_parms['image_size'][1],
                 chan_chunk_size, len(pol),
                 len(_pb_parms['list_dish_diameters']))

    #print(freq_coords.chunksize)
    #print(chan_chunk_size)
    #print(chunksize)

    pb = da.map_blocks(pb_func,
                       freq_coords,
                       pol,
                       _pb_parms,
                       _grid_parms,
                       chunks=chunksize,
                       new_axis=[0, 1, 3, 4],
                       dtype=np.double)

    ## Add PB to img_xds

    #    coords = {'d0': np.arange(pb_parms['imsize'][0]), 'd1': np.arange(_pb_parms['imsize'][1]),
    #              'chan': freq_coords.compute(), 'pol': pol,'dish_type': np.arange(len(_pb_parms['list_dish_diameters']))}

    _img_xds[_sel_parms['data_group_out']['pb']] = xr.DataArray(
        pb[:, :, None, :, :, :],
        dims=['l', 'm', 'time', 'chan', 'pol', 'dish_type'])
    _img_xds = _img_xds.assign_coords(
        {'dish_type': np.arange(len(_pb_parms['list_dish_diameters']))})
    _img_xds.attrs['data_groups'][0] = {
        **_img_xds.attrs['data_groups'][0],
        **{
            _sel_parms['data_group_out']['id']: _sel_parms['data_group_out']
        }
    }

    print(
        '######################### Created graph for make_pb #########################'
    )
    return _img_xds
Example #8
0
def direction_rotate(mxds, rotation_parms, sel_parms):
    """
    Rotate uvw coordinates and phase rotate visibilities. For a joint mosaics rotation_parms['common_tangent_reprojection'] must be true.
    The specified phasecenter and field phase centers are assumed to be in the same frame.
    East-west arrays, emphemeris objects or objects within the nearfield are not supported.
    
    Parameters
    ----------
    mxds : xarray.core.dataset.Dataset
        Input vis.zarr multi dataset.
    rotation_parms : dictionary
    rotation_parms['new_phase_center'] : list of number, length = 2, units = radians
       The phase center to rotate to (right ascension and declination).
    rotation_parms['common_tangent_reprojection']  : bool, default = True
       If true common tangent reprojection is used (should be true if a joint mosaic image is being created).
    rotation_parms['single_precision'] : bool, default = True
       If rotation_parms['single_precision'] is true then the output visibilities are cast from 128 bit complex to 64 bit complex. Mathematical operations are always done in double precision.
    sel_parms : dict
    sel_parms['data_group_in'] : dict, default = vis_dataset.data_group[0][0]
        Only the id has to be specified
        The names of the input data and uvw data variables.
        For example sel_parms['data_group_in'] = {'id':'1', 'data':'DATA','uvw':'UVW'}.
    sel_parms['data_group_out'] : dict, default = {**_vis_dataset.data_group[0],**{'id':str(new_data_group_id),'uvw':'UVW_ROT','data':'DATA_ROT','field_id':rotation_parms['new_phase_center']}}
        The names of the new data and uvw data variables that have been direction rotated.
        For example sel_parms['data_group_out'] = {'id':2,'data':'DATA_ROT','uvw':'UVW_ROT'}.
        The new_data_group_id is the next available id.
    Returns
    -------
    psf_dataset : xarray.core.dataset.Dataset
    """
    #Based on CASACORE measures/Measures/UVWMachine.cc and CASA code/synthesis/TransformMachines2/FTMachine.cc::girarUVW

    print(
        '######################### Start direction_rotate #########################'
    )

    import copy
    import dask.array as da
    import xarray as xr
    from cngi._utils._check_parms import _check_sel_parms, _check_existence_sel_parms
    from ._imaging_utils._check_imaging_parms import _check_rotation_parms
    import time
    import dask

    #start_time = time.time()
    #Deep copy so that inputs are not modified
    _mxds = mxds.copy(deep=True)
    _sel_parms = copy.deepcopy(sel_parms)
    _rotation_parms = copy.deepcopy(rotation_parms)

    ##############Parameter Checking and Set Defaults##############

    assert (_check_rotation_parms(_rotation_parms)
            ), "######### ERROR: rotation_parms checking failed"

    assert (
        'xds' in _sel_parms
    ), "######### ERROR: xds must be specified in sel_parms"  #Can't have a default since xds names are not fixed.
    _vis_dataset = _mxds.attrs[sel_parms['xds']]

    #{'uvw':'UVW_ROT','data':'DATA_ROT','properties':{'new_phase_center':_rotation_parms['new_phase_center']}
    _check_sel_parms(_vis_dataset,
                     _sel_parms,
                     new_or_modified_data_variables={
                         'uvw': 'UVW_ROT',
                         'data': 'DATA_ROT'
                     })
    #If only uvw is to be modified drop data
    #print('copy and check',time.time()-start_time)

    #################################################################

    #start_time = time.time()
    #Calculate rotation matrices for each field (not parallelized)
    #n_field number of fields in _vis_dataset
    #uvw_rotmat n_field x 3 x 3
    #phase_rotation n_field x 3
    #rot_field_id n_field
    uvw_rotmat, phase_rotation, rot_field_id = calc_rotation_mats(
        _vis_dataset, _mxds.FIELD, _rotation_parms)
    #print('calc_rotation_mats',time.time()-start_time)

    #start_time = time.time()
    #Apply rotation matrix to uvw
    #uvw time x baseline x 3
    uvw = da.map_blocks(apply_rotation_matrix,
                        _vis_dataset[_sel_parms['data_group_in']['uvw']].data,
                        _vis_dataset.FIELD_ID.data[:, :, None],
                        uvw_rotmat,
                        rot_field_id,
                        dtype=np.double)
    #print('apply_rotation_matrix',time.time()-start_time)

    #start_time = time.time()
    #Phase shift vis data
    #vis_rot time x baseline x chan x pol
    chan_chunk_size = _vis_dataset[_sel_parms['data_group_in']
                                   ['data']].chunks[2]
    freq_chan = da.from_array(_vis_dataset.coords['chan'].values,
                              chunks=(chan_chunk_size))
    vis_rot = da.map_blocks(
        apply_phasor,
        _vis_dataset[_sel_parms['data_group_in']['data']].data,
        uvw[:, :, :, None],
        _vis_dataset.FIELD_ID.data[:, :, None, None],
        freq_chan[None, None, :, None],
        phase_rotation,
        rot_field_id,
        _rotation_parms['common_tangent_reprojection'],
        _rotation_parms['single_precision'],
        dtype=np.complex)
    #print('apply_phasor',time.time()-start_time)

    #Add new datavariables
    _vis_dataset[_sel_parms['data_group_out']['uvw']] = xr.DataArray(
        uvw, dims=_vis_dataset[_sel_parms['data_group_in']['uvw']].dims)
    _vis_dataset[_sel_parms['data_group_out']['data']] = xr.DataArray(
        vis_rot, dims=_vis_dataset[_sel_parms['data_group_in']['data']].dims)

    #Update data_group
    _vis_dataset.attrs['data_groups'][0] = {
        **_vis_dataset.attrs['data_groups'][0],
        **{
            _sel_parms['data_group_out']['id']: _sel_parms['data_group_out']
        }
    }

    print(
        '######################### Created graph for direction_rotate #########################'
    )
    return _mxds
Example #9
0
def make_grid(vis_mxds, img_xds, grid_parms,  vis_sel_parms, img_sel_parms):
    """

    
    Parameters
    ----------
    vis_mxds : xarray.core.dataset.Dataset
        Input multi-xarray Dataset with global data.
    img_xds : xarray.core.dataset.Dataset
        Input image dataset.
    grid_parms : dictionary
    grid_parms['image_size'] : list of int, length = 2
        The image size (no padding).
    grid_parms['cell_size']  : list of number, length = 2, units = arcseconds
        The image cell size.
    grid_parms['chan_mode'] : {'continuum'/'cube'}, default = 'continuum'
        Create a continuum or cube image.
    grid_parms['fft_padding'] : number, acceptable range [1,100], default = 1.2
        The factor that determines how much the gridded visibilities are padded before the fft is done.
    vis_sel_parms : dictionary
    vis_sel_parms['xds'] : str
        The xds within the mxds to use to calculate the imaging weights for.
    vis_sel_parms['data_group_in_id'] : int, default = first id in xds.data_groups
        The data group in the xds to use.
    img_sel_parms : dictionary
    img_sel_parms['data_group_in_id'] : int, default = first id in xds.data_groups
        The data group in the image xds to use.
    img_sel_parms['image'] : str, default ='IMAGE'
        The created image name.
    img_sel_parms['sum_weight'] : str, default ='SUM_WEIGHT'
        The created sum of weights name.
    Returns
    -------
    img_xds : xarray.core.dataset.Dataset
        The image_dataset will contain the image created and the sum of weights.
    """
    print('######################### Start make_image #########################')
    import numpy as np
    from numba import jit
    import time
    import math
    import dask.array.fft as dafft
    import xarray as xr
    import dask.array as da
    import matplotlib.pylab as plt
    import dask
    import copy, os
    from numcodecs import Blosc
    from itertools import cycle
    
    from cngi._utils._check_parms import _check_sel_parms, _check_existence_sel_parms
    from ._imaging_utils._check_imaging_parms import _check_grid_parms
    from ._imaging_utils._gridding_convolutional_kernels import _create_prolate_spheroidal_kernel, _create_prolate_spheroidal_kernel_1D
    from ._imaging_utils._standard_grid import _graph_standard_grid
    from ._imaging_utils._remove_padding import _remove_padding
    from ._imaging_utils._aperture_grid import _graph_aperture_grid
    from cngi.image import make_empty_sky_image
    
    #print('****',sel_parms,'****')
    _mxds = vis_mxds.copy(deep=True)
    _img_xds = img_xds.copy(deep=True)
    _vis_sel_parms = copy.deepcopy(vis_sel_parms)
    _img_sel_parms = copy.deepcopy(img_sel_parms)
    _grid_parms = copy.deepcopy(grid_parms)

    ##############Parameter Checking and Set Defaults##############
    assert(_check_grid_parms(_grid_parms)), "######### ERROR: grid_parms checking failed"
    assert('xds' in _vis_sel_parms), "######### ERROR: xds must be specified in sel_parms" #Can't have a default since xds names are not fixed.
    _vis_xds = _mxds.attrs[_vis_sel_parms['xds']]
    
    #Check vis data_group
    _check_sel_parms(_vis_xds,_vis_sel_parms)
    
    #Check img data_group
    _check_sel_parms(_img_xds,_img_sel_parms,new_or_modified_data_variables={'sum_weight':'SUM_WEIGHT','grid':'GRID'},append_to_in_id=True)

    ##################################################################################
    
    # Creating gridding kernel
    _grid_parms['oversampling'] = 100
    _grid_parms['support'] = 7
    
    cgk, correcting_cgk_image = _create_prolate_spheroidal_kernel(_grid_parms['oversampling'], _grid_parms['support'], _grid_parms['image_size_padded'])
    cgk_1D = _create_prolate_spheroidal_kernel_1D(_grid_parms['oversampling'], _grid_parms['support'])
    
    _grid_parms['complex_grid'] = True
    _grid_parms['do_psf'] = False
    grids_and_sum_weights = _graph_standard_grid(_vis_xds, cgk_1D, _grid_parms, _vis_sel_parms)
    
    
    if _grid_parms['chan_mode'] == 'continuum':
        freq_coords = [da.mean(_vis_xds.coords['chan'].values)]
        chan_width = da.from_array([da.mean(_vis_xds['chan_width'].data)],chunks=(1,))
        imag_chan_chunk_size = 1
    elif _grid_parms['chan_mode'] == 'cube':
        freq_coords = _vis_xds.coords['chan'].values
        chan_width = _vis_xds['chan_width'].data
        imag_chan_chunk_size = _vis_xds.DATA.chunks[2][0]
    
    phase_center = _grid_parms['phase_center']
    image_size = _grid_parms['image_size']
    cell_size = _grid_parms['cell_size']
    phase_center = _grid_parms['phase_center']
    
    pol_coords = _vis_xds.pol.data
    time_coords = [_vis_xds.time.mean().data]
    
    _img_xds = make_empty_sky_image(_img_xds,grid_parms['phase_center'],image_size,cell_size,freq_coords,chan_width,pol_coords,time_coords)
    
    _img_xds[_img_sel_parms['data_group_out']['sum_weight']] = xr.DataArray(grids_and_sum_weights[1][None,:,:], dims=['time','chan','pol'])
    _img_xds[_img_sel_parms['data_group_out']['grid']] = xr.DataArray(grids_and_sum_weights[0][:,:,None,:,:], dims=['u', 'v', 'time', 'chan', 'pol'])
    _img_xds.attrs['data_groups'][0] = {**_img_xds.attrs['data_groups'][0],**{_img_sel_parms['data_group_out']['id']:_img_sel_parms['data_group_out']}}
    
    
    print('######################### Created graph for make_image #########################')
    return _img_xds
Example #10
0
def make_imaging_weight(vis_mxds, imaging_weights_parms, grid_parms,
                        sel_parms):
    """
    Creates the imaging weight data variable that has dimensions time x baseline x chan x pol (matches the visibility data variable).
    The weight density can be averaged over channels or calculated independently for each channel using imaging_weights_parms['chan_mode'].
    The following imaging weighting schemes are supported 'natural', 'uniform', 'briggs', 'briggs_abs'.
    The grid_parms['image_size'] and grid_parms['cell_size'] should usually be the same values that will be used for subsequent synthesis blocks (for example making the psf).
    To achieve something similar to 'superuniform' weighting in CASA tclean grid_parms['image_size'] and imaging_weights_parms['cell_size'] can be varied relative to the values used in subsequent synthesis blocks.
    
    Parameters
    ----------
    vis_mxds : xarray.core.dataset.Dataset
        Input multi-xarray Dataset with global data.
    imaging_weights_parms : dictionary
    imaging_weights_parms['weighting'] : {'natural', 'uniform', 'briggs', 'briggs_abs'}, default = natural
        Weighting scheme used for creating the imaging weights.
    imaging_weights_parms['robust'] : number, acceptable range [-2,2], default = 0.5
        Robustness parameter for Briggs weighting.
        robust = -2.0 maps to uniform weighting.
        robust = +2.0 maps to natural weighting.
    imaging_weights_parms['briggs_abs_noise'] : number, default=1.0
        Noise parameter for imaging_weights_parms['weighting']='briggs_abs' mode weighting.
    grid_parms : dictionary
    grid_parms['image_size'] : list of int, length = 2
        The image size (no padding).
    grid_parms['cell_size']  : list of number, length = 2, units = arcseconds
        The image cell size.
    grid_parms['chan_mode'] : {'continuum'/'cube'}, default = 'continuum'
        Create a continuum or cube image.
    grid_parms['fft_padding'] : number, acceptable range [1,100], default = 1.2
        The factor that determines how much the gridded visibilities are padded before the fft is done.
    sel_parms : dictionary
    sel_parms['xds'] : str
        The xds within the mxds to use to calculate the imaging weights for.
    sel_parms['data_group_in_id'] : int, default = first id in xds.data_groups
        The data group in the xds to use.
    sel_parms['data_group_out_id'] : int, default = sel_parms['data_group_id']
        The output data group. The default will append the imaging weight to the input data group.
    sel_parms['imaging_weight'] : str, default ='IMAGING_WEIGHT'
        The name of that will be used for the imaging weight data variable.
    Returns
    -------
    vis_xds : xarray.core.dataset.Dataset
        The vis_xds will contain a new data variable for the imaging weights the name is defined by the input parameter sel_parms['imaging_weight'].
    """
    print(
        '######################### Start make_imaging_weights #########################'
    )
    import time
    import math
    import xarray as xr
    import dask.array as da
    import matplotlib.pylab as plt
    import dask.array.fft as dafft
    import dask
    import copy, os
    from numcodecs import Blosc
    from itertools import cycle
    import zarr

    from cngi._utils._check_parms import _check_sel_parms, _check_existence_sel_parms
    from ._imaging_utils._check_imaging_parms import _check_imaging_weights_parms, _check_grid_parms

    #Deep copy so that inputs are not modified
    _mxds = vis_mxds.copy(deep=True)
    _imaging_weights_parms = copy.deepcopy(imaging_weights_parms)
    _grid_parms = copy.deepcopy(grid_parms)
    _sel_parms = copy.deepcopy(sel_parms)

    ##############Parameter Checking and Set Defaults##############
    assert (
        'xds' in _sel_parms
    ), "######### ERROR: xds must be specified in sel_parms"  #Can't have a default since xds names are not fixed.
    _vis_xds = _mxds.attrs[sel_parms['xds']]

    assert _vis_xds.dims['pol'] <= 2, "Full polarization is not supported."

    _check_sel_parms(
        _vis_xds,
        _sel_parms,
        new_or_modified_data_variables={'imaging_weight': 'IMAGING_WEIGHT'},
        append_to_in_id=True)

    assert (_check_imaging_weights_parms(_imaging_weights_parms)
            ), "######### ERROR: imaging_weights_parms checking failed"
    if _imaging_weights_parms['weighting'] != 'natural':
        assert (_check_grid_parms(_grid_parms)
                ), "######### ERROR: grid_parms checking failed"
    else:
        #If natural weighting reuse weight
        _sel_parms['data_group_out']['imaging_weight'] = _sel_parms[
            'data_group_in']['weight']
        _vis_xds.attrs['data_groups'][
            0] = {
                **_vis_xds.attrs['data_groups'][0],
                **{
                    _sel_parms['data_group_out']['id']:
                    _sel_parms['data_group_out']
                }
            }

        print(
            "Since weighting is natural input weight will be reused as imaging weight."
        )
        print(
            '######################### Created graph for make_imaging_weight #########################'
        )
        return _mxds

    #################################################################
    _vis_xds[_sel_parms['data_group_out']['imaging_weight']] = _vis_xds[
        _sel_parms['data_group_in']['weight']]
    _sel_parms['data_group_in']['imaging_weight'] = _sel_parms[
        'data_group_out']['imaging_weight']
    calc_briggs_weights(_vis_xds, _imaging_weights_parms, _grid_parms,
                        _sel_parms)

    #print(_vis_xds)
    _vis_xds.attrs['data_groups'][0] = {
        **_vis_xds.attrs['data_groups'][0],
        **{
            _sel_parms['data_group_out']['id']: _sel_parms['data_group_out']
        }
    }

    print(
        '######################### Created graph for make_imaging_weight #########################'
    )
    return _mxds