コード例 #1
0
def commonbeam(major1, minor1, pa1, major2, minor2, pa2):
    """
    Create a smallest bounding ellipse around two other ellipses.
    Give ellipse dimensions as astropy units quantities.
    """
    major1 = ucheck(major1, unit=u.deg)
    minor1 = ucheck(minor1, unit=u.deg)
    pa1 = ucheck(pa1, unit=u.deg)
    major2 = ucheck(major2, unit=u.deg)
    minor2 = ucheck(minor2, unit=u.deg)
    pa2 = ucheck(pa2, unit=u.deg)

    somebeams = Beams([major1.to(u.arcsec), major2.to(u.arcsec)]*u.arcsec,
                      [minor1.to(u.arcsec), minor2.to(u.arcsec)]*u.arcsec,
                      [pa1, pa2]*u.deg)

    for tolerance in (1e-4, 5e-5, 1e-5, 1e-6, 1e-7):
        try:
            common = somebeams.common_beam(tolerance=tolerance)
            break
        except BeamError:
            continue

    new_major = common._major
    new_minor = common._minor
    new_pa = common._pa

    return new_major.to(u.deg), new_minor.to(u.deg), new_pa
コード例 #2
0
    def common_beam(self, circbeam=True):
        """
        Return parameters of the smallest common beam
        Parameters
        ----------
        circbeam: bool, optional. Default True - force beam circular

        Returns
        -------
        bmaj, bmin, bpa: beam in deg

        """
        if circbeam:
            maxmaj = np.max([image.get_beam()[0] for image in self.images])
            target_beam = [maxmaj * 1.01, maxmaj * 1.01,
                           0.]  # add 1% to prevent crash in convolution
        else:
            from radio_beam import Beams
            my_beams = Beams([image.get_beam()[0]
                              for image in self.images] * u.deg,
                             [image.get_beam()[1]
                              for image in self.images] * u.deg,
                             [image.get_beam()[2]
                              for image in self.images] * u.deg)
            common_beam = my_beams.common_beam()
            target_beam = [
                common_beam.major.to_value('deg'),
                common_beam.minor.to_value('deg'),
                common_beam.pa.to_value('deg')
            ]
        return target_beam
コード例 #3
0
ファイル: utils.py プロジェクト: mccbc/nrao
def commonbeam(major1, minor1, pa1, major2, minor2, pa2):
    """
    Create a smallest bounding ellipse around two other ellipses. Give ellipse dimensions as astropy units quantities.
    """
    somebeams = Beams([major1.to(u.arcsec), major2.to(u.arcsec)] * u.arcsec, [minor1.to(u.arcsec), minor2.to(u.arcsec)] * u.arcsec, [pa1.to(u.deg), pa2.to(u.deg)] * u.deg)
    common = somebeams.common_beam()
    new_major = common._major
    new_minor = common._minor
    new_pa = common._pa
    return new_major.to(u.deg), new_minor.to(u.deg), new_pa
コード例 #4
0
def test_VRODS_with_beams():

    exp_beams = Beams(np.arange(1, twelve_qty_1d.size + 1) * u.arcsec)

    p = VaryingResolutionOneDSpectrum(twelve_qty_1d, copy=False, beams=exp_beams)
    assert (p.beams == exp_beams).all()

    new_beams = Beams(np.arange(2, twelve_qty_1d.size + 2) * u.arcsec)

    p = p.with_beams(new_beams)
    assert np.all(p.beams == new_beams)
コード例 #5
0
def getmaxbeam(files, verbose=False):
    """Get largest beam
    """
    beams = []
    for file in files:
        header = fits.getheader(file, memmap=True)
        beam = Beam.from_fits_header(header)
        beams.append(beam)

    beams = Beams([beam.major.value for beam in beams] * u.deg,
                  [beam.minor.value for beam in beams] * u.deg,
                  [beam.pa.value for beam in beams] * u.deg)

    return beams.common_beam()
コード例 #6
0
ファイル: utils.py プロジェクト: apertif/amosaic
def get_common_psf(self, veri, format='fits'):
    """
    Common psf for the list of fits files
    """
    beams = []

    if format == 'fits':
        bmajes = []
        bmines = []
        bpas = []
        for f in veri:
            ih = pyfits.getheader(f)
            bmajes.append(ih['BMAJ'])
            bmines.append(ih['BMIN'])
            bpas.append(ih['BPA'])
        bmajarr = np.array(bmajes)
        bminarr = np.array(bmines)
        bpaarr = np.array(bpas)
        for i in range(0, len(bmajes) - 1):
            ni = i + 1
            beams = Beams((bmajarr[[i, ni]]) * u.deg, (bminarr[[i, ni]]) * u.deg, bpaarr[[i, ni]] * u.deg)
            common = commonbeam.commonbeam(beams)
            bmajarr[ni] = common.major/u.deg
            bminarr[ni] = common.minor / u.deg
            bpaarr[ni] = common.pa / u.deg
    elif format == 'array':
        bmajes = np.empty(0)
        bmines = np.empty(0)
        bpas = np.empty(0)
        for b in range(40):
            for sb in range(self.pol_start_sb, self.pol_end_sb + 1):
                if veri[b,sb]:
                    bmajes = np.append(bmajes, (get_param(self, 'polarisation_B' + str(b).zfill(2) + '_targetbeams_qu_beamparams')[:, 0, 0][sb]))
                    bmines = np.append(bmines, (get_param(self, 'polarisation_B' + str(b).zfill(2) + '_targetbeams_qu_beamparams')[:, 1, 0][sb]))
                    bpas = np.append(bpas, (get_param(self, 'polarisation_B' + str(b).zfill(2) + '_targetbeams_qu_beamparams')[:, 2, 0][sb]))
        bmajarr = bmajes[~pd.isnull(bmajes)]
        bminarr = bmines[~pd.isnull(bmines)]
        bpaarr = bpas[~pd.isnull(bpas)]
        for i in range(0, len(bmajarr) - 1):
            ni = i + 1
            beams = Beams((bmajarr[[i,ni]]/3600.0) * u.deg, (bminarr[[i,ni]]/3600.0) * u.deg, bpaarr[[i,ni]] * u.deg)
            common = commonbeam.commonbeam(beams)
            bmajarr[ni] = (common.major / u.deg) * 3600.0
            bminarr[ni] = (common.minor / u.deg) * 3600.0
            bpaarr[ni] = common.pa / u.deg
    common = Beam.__new__(Beam, major=common.major * 1.01, minor=common.minor * 1.01, pa=common.pa)
    print('Increased final smallest common beam by 1 %')
    print('The final smallest common beam is ' + str(common))
    return common
コード例 #7
0
def test_VRODS_slice_with_beams():

    exp_beams = Beams(np.arange(1, twelve_qty_1d.size + 1) * u.arcsec)

    p = VaryingResolutionOneDSpectrum(twelve_qty_1d, copy=False,
                                      wcs=WCS(naxis=1),
                                      beams=exp_beams)

    assert np.all(p[:5].beams == exp_beams[:5])
コード例 #8
0
def test_VRODS_wrong_beams_shape():
    '''
    Check that passing Beams with a different shape than the data
    is caught.
    '''
    exp_beams = Beams(np.arange(1, 4) * u.arcsec)

    p = VaryingResolutionOneDSpectrum(twelve_qty_1d, copy=False,
                                      beams=exp_beams)
コード例 #9
0
ファイル: amos.py プロジェクト: apertif/amosaic
def get_common_psf(fitsfiles):
    """ common psf for the list of fits files """
    beams = []
    bmajes = []
    bmines = []
    bpas = []
    for f in fitsfiles:
        ih = fits.getheader(f)
        bmajes.append(ih['BMAJ'])
        bmines.append(ih['BMIN'])
        bpas.append(ih['BPA'])
        beam = Beam.from_fits_header(ih)
        beams.append(beam)
    beams = Beams(bmajes * u.deg, bmines * u.deg, bpas * u.deg)
    common = beams.common_beam()
    smallest = beams.smallest_beam()
    logging.info('Smallest PSF: %s', smallest)
    logging.info('Common PSF: %s', common)
    return common
コード例 #10
0
    def from_hdu(hdu):
        '''
        Return a OneDSpectrum from a FITS HDU or HDU list.
        '''

        if isinstance(hdu, HDUList):
            hdul = hdu
            hdu = hdul[0]
        else:
            hdul = HDUList([hdu])

        if not len(hdu.data.shape) == 1:
            raise ValueError("HDU must contain one-dimensional data.")

        meta = {}

        mywcs = wcs.WCS(hdu.header)

        if "BUNIT" in hdu.header:
            unit = convert_bunit(hdu.header["BUNIT"])
            meta["BUNIT"] = hdu.header["BUNIT"]
        else:
            unit = None

        beams_table = cube_utils.try_load_beams(hdul)

        if beams_table is not None:
            # Convert to a beams object from the table
            beams = Beams(
                major=u.Quantity(beams_table['BMAJ'], u.arcsec),
                minor=u.Quantity(beams_table['BMIN'], u.arcsec),
                pa=u.Quantity(beams_table['BPA'], u.deg),
                meta=[{
                    key: row[key]
                    for key in beams_table.names
                    if key not in ('BMAJ', 'BPA', 'BMIN')
                } for row in beams_table],
            )
            self = VaryingResolutionOneDSpectrum(hdu.data,
                                                 unit=unit,
                                                 wcs=mywcs,
                                                 meta=meta,
                                                 header=hdu.header,
                                                 beams=beams)
        else:
            beam = cube_utils.try_load_beam(hdu.header)
            self = OneDSpectrum(hdu.data,
                                unit=unit,
                                wcs=mywcs,
                                meta=meta,
                                header=hdu.header,
                                beam=beam)

        return self
コード例 #11
0
    def __new__(cls,
                value,
                beams=None,
                read_beam=False,
                goodbeams_mask=None,
                **kwargs):
        self = super(VaryingResolutionOneDSpectrum,
                     cls).__new__(cls, value, **kwargs)
        assert hasattr(self, '_fill_value')

        if beams is None:
            if "beams" in self.meta:
                beams = self.meta['beams']
            elif read_beam:
                beams = cube_utils.try_load_beams(self.header)
                if beams is None:
                    warnings.warn("Cannot load beams table from header.",
                                  BeamWarning)

        if beams is not None:
            if isinstance(beams, BinTableHDU):
                beam_data_table = beams.data
            elif isinstance(beams, FITS_rec):
                beam_data_table = beams
            else:
                beam_data_table = None

            if beam_data_table is not None:
                beams = Beams(
                    major=u.Quantity(beam_data_table['BMAJ'], u.arcsec),
                    minor=u.Quantity(beam_data_table['BMIN'], u.arcsec),
                    pa=u.Quantity(beam_data_table['BPA'], u.deg),
                    meta=[{
                        key: row[key]
                        for key in beam_data_table.names
                        if key not in ('BMAJ', 'BPA', 'BMIN')
                    } for row in beam_data_table],
                )
            self.beams = beams
            self.meta['beams'] = beams

        if goodbeams_mask is not None:
            self.goodbeams_mask = goodbeams_mask

        self._cache = {}

        return self
コード例 #12
0
def test_VRODS_arith_with_beams():

    exp_beams = Beams(np.arange(1, twelve_qty_1d.size + 1) * u.arcsec)

    p = VaryingResolutionOneDSpectrum(twelve_qty_1d, copy=False, beams=exp_beams)

    p2 = p + p

    assert hasattr(p2, '_wcs')
    assert p2.wcs == p.wcs
    assert np.all(p2.value==2)
    assert np.all(p2.beams == exp_beams)

    p2 = p - p

    assert hasattr(p2, '_wcs')
    assert p2.wcs == p.wcs
    assert np.all(p2.value==0)
    assert np.all(p2.beams == exp_beams)
コード例 #13
0
ファイル: makecube.py プロジェクト: gheald/quocka
def getmaxbeam(data_dict,
               band,
               cutoff=15 * u.arcsec,
               tolerance=0.0001,
               nsamps=200,
               epsilon=0.0005,
               verbose=False,
               debug=False):
    """Find common beam.

    Arguments:
        data_dict {dict} -- Dict containing fits files.
        band {int} -- ATCA band name.

    Keyword Arguments:
        tolerance {float} -- See common_beam (default: {0.0001})
        nsamps {int} -- See common_beam (default: {200})
        epsilon {float} -- See common_beam (default: {0.0005})
        verbose {bool} -- Verbose output (default: {False})
        debug {bool} -- Show dubugging plots (default: {False})

    Returns:
        beam_dict {dict} -- Beam and frequency data.
    """
    files = data_dict[band]
    stokes = ['i', 'q', 'u', 'v']
    beam_dict = {}
    for stoke in stokes:
        beams = []
        freqs = []
        for file in files[stoke]:
            header = fits.getheader(file, memmap=True)
            freqs.append(header['CRVAL3'])
            beam = Beam.from_fits_header(header)
            beams.append(beam)
        beams = Beams([beam.major.value for beam in beams] * u.deg,
                      [beam.minor.value for beam in beams] * u.deg,
                      [beam.pa.value for beam in beams] * u.deg)
        flags = beams.major > cutoff
        beam_dict.update({
            stoke + '_beams': beams,
            stoke + '_freqs': np.array(freqs) * u.Hz,
            stoke + '_flags': flags
        })
    if debug:
        plt.figure()
        plt.title(band)
        for stoke in stokes:
            idx = [not flag for flag in beam_dict[stoke + '_flags']]
            plt.plot(beam_dict[stoke + '_freqs'][idx],
                     beam_dict[stoke + '_beams'].major.to(u.arcsec)[idx],
                     '.',
                     alpha=0.5,
                     label=stoke + '--BMAJ')

        plt.plot(beam_dict[stoke + '_freqs'][idx],
                 beam_dict[stoke + '_beams'].minor.to(u.arcsec)[idx],
                 '.',
                 alpha=0.5,
                 label=stoke + '--BMIN')
        plt.xlabel('Frequency [Hz]')
        plt.ylabel('Beam size [arcsec]')
        plt.legend()
        plt.show()

    bmaj = []
    bmin = []
    bpa = []
    for stoke in stokes:
        bmaj += list(beam_dict[f'{stoke}_beams'].major.
                     value[~beam_dict[f'{stoke}_flags']])
        bmin += list(beam_dict[f'{stoke}_beams'].minor.
                     value[~beam_dict[f'{stoke}_flags']])
        bpa += list(
            beam_dict[f'{stoke}_beams'].pa.value[~beam_dict[f'{stoke}_flags']])

    big_beams = Beams(bmaj * u.deg, bmin * u.deg, bpa * u.deg)

    try:
        cmn_beam = big_beams.common_beam(tolerance=tolerance,
                                         epsilon=epsilon,
                                         nsamps=nsamps)
    except BeamError:
        if verbose:
            print("Couldn't find common beam with defaults")
            print("Trying again with smaller tolerance")
        cmn_beam = big_beams.common_beam(tolerance=tolerance * 0.1,
                                         epsilon=epsilon,
                                         nsamps=nsamps)

    cmn_beam = Beam(
        major=my_ceil(cmn_beam.major.to(u.arcsec).value, precision=1) *
        u.arcsec,
        minor=my_ceil(cmn_beam.minor.to(u.arcsec).value, precision=1) *
        u.arcsec,
        pa=round_up(cmn_beam.pa.to(u.deg), decimals=2))

    target_header = fits.getheader(data_dict[band]['i'][0], memmap=True)
    dx = target_header['CDELT1'] * -1 * u.deg
    dy = target_header['CDELT2'] * u.deg
    grid = dy
    conbeams = [cmn_beam.deconvolve(beam) for beam in big_beams]

    # Check that convolving beam will be nyquist sampled
    min_samps = []
    for b_idx, conbeam in enumerate(conbeams):
        # Get maj, min, pa
        samp = conbeam.minor / grid.to(u.arcsec)
        if samp < 2:
            min_samps.append([samp, b_idx])

    if len(min_samps) > 0:
        print('Adjusting common beam to be sampled by grid!')
        worst_idx = np.argmin([samp[0] for samp in min_samps], axis=0)
        samp_cor_fac, idx = 2 / \
            min_samps[worst_idx][0], int(
                min_samps[worst_idx][1])
        conbeam = conbeams[idx]
        major = conbeam.major
        minor = conbeam.minor * samp_cor_fac
        pa = conbeam.pa
        # Check for small major!
        if major < minor:
            major = minor
            pa = 0 * u.deg

        cor_beam = Beam(major, minor, pa)
        if verbose:
            print('Smallest common beam is:', cmn_beam)
        cmn_beam = big_beams[idx].convolve(cor_beam)
        cmn_beam = Beam(
            major=my_ceil(cmn_beam.major.to(u.arcsec).value, precision=1) *
            u.arcsec,
            minor=my_ceil(cmn_beam.minor.to(u.arcsec).value, precision=1) *
            u.arcsec,
            pa=round_up(cmn_beam.pa.to(u.deg), decimals=2))
        if verbose:
            print('Smallest common Nyquist sampled beam is:', cmn_beam)
    if debug:
        from matplotlib.patches import Ellipse
        pixscale = 1 * u.arcsec
        fig = plt.figure()
        ax = plt.gca()
        for beam in big_beams:
            ellipse = Ellipse(
                (0, 0),
                width=(beam.major.to(u.deg) / pixscale).to(
                    u.dimensionless_unscaled).value,
                height=(beam.minor.to(u.deg) / pixscale).to(
                    u.dimensionless_unscaled).value,
                # PA is 90 deg offset from x-y axes by convention
                # (it is angle from NCP)
                angle=(beam.pa + 90 * u.deg).to(u.deg).value,
                edgecolor='k',
                fc='None',
                lw=1,
                alpha=0.1)
            ax.add_artist(ellipse)
        ellipse = Ellipse(
            (0, 0),
            width=(cmn_beam.major.to(u.deg) / pixscale).to(
                u.dimensionless_unscaled).value,
            height=(cmn_beam.minor.to(u.deg) / pixscale).to(
                u.dimensionless_unscaled).value,
            # PA is 90 deg offset from x-y axes by convention
            # (it is angle from NCP)
            angle=(cmn_beam.pa + 90 * u.deg).to(u.deg).value,
            edgecolor='r',
            fc='None',
            lw=2,
            alpha=1,
        )
        ax.add_artist(ellipse)
        label = f"BMAJ={cmn_beam.major.to(u.arcsec).round()}, BMIN={cmn_beam.minor.to(u.arcsec).round()}, BPA={cmn_beam.pa.to(u.deg).round()}"
        plt.plot([np.nan], [np.nan], 'r', label=label)
        plt.xlim(-0.2 * 60, 0.2 * 60)
        plt.ylim(-0.2 * 60, 0.2 * 60)
        plt.xlabel('$\Delta$ RA [arcsec]')
        plt.ylabel('$\Delta$ DEC [arcsec]')
        plt.legend()
        plt.show()

    beam_dict.update({'common_beam': cmn_beam})
    return beam_dict
コード例 #14
0
        factors.append(factor)
    factor = np.array(factors)
    return factor


for suffix in ('mean', 'max'):

    results = []
    Ulines = []

    for fn in glob.glob(f"spectra/*.{suffix}.fits"):
        sp = pyspeckit.Spectrum(fn)

        # this is a bit hackier than I like
        # we'll do all our measurements in Kelvin!
        beams = Beams.from_fits_bintable(fits.open(fn)[1])
        factors = jtok_factors(beams, sp.xarr.to(u.GHz))
        sp.data = sp.data * factors
        sp.unit = u.K

        # want km/s - reference will be ~middle of SPW
        sp.xarr.convert_to_unit(u.km / u.s)

        med = np.nanmedian(sp.data)

        mad = stats.mad_std(sp.data - med)
        detections = (sp.data - med) > 5 * mad

        labels, ct = label(detections)

        for labelid in range(1, ct + 1):
コード例 #15
0
def load_casa_image(filename,
                    skipdata=False,
                    memmap=True,
                    skipvalid=False,
                    skipcs=False,
                    target_cls=None,
                    use_dask=None,
                    **kwargs):
    """
    Load a cube (into memory?) from a CASA image. By default it will transpose
    the cube into a 'python' order and drop degenerate axes. These options can
    be suppressed. The object holds the coordsys object from the image in
    memory.
    """

    if use_dask is None:
        use_dask = True

    if not use_dask:
        raise ValueError(
            "Loading CASA datasets is not possible with use_dask=False")

    from .core import StringWrapper
    if isinstance(filename, StringWrapper):
        filename = filename.value

    # read in the data
    if not skipdata:
        data = image_to_dask(filename, memmap=memmap)

    # CASA stores validity of data as a mask
    if skipvalid:
        valid = None
    else:
        try:
            valid = image_to_dask(filename, memmap=memmap, mask=True)
        except FileNotFoundError:
            valid = None

    # transpose is dealt with within the cube object

    # read in coordinate system object

    desc = getdesc(filename)

    casa_cs = desc['_keywords_']['coords']

    if 'units' in desc['_keywords_']:
        unit = desc['_keywords_']['units']
    else:
        unit = ''

    imageinfo = desc['_keywords_']['imageinfo']

    if 'perplanebeams' in imageinfo:
        beam_ = {'beams': imageinfo['perplanebeams']}
        beam_['nStokes'] = beam_['beams'].pop('nStokes')
        beam_['nChannels'] = beam_['beams'].pop('nChannels')
        beam_['beams'] = {
            key: {
                '*0': value
            }
            for key, value in list(beam_['beams'].items())
        }
    elif 'restoringbeam' in imageinfo:
        beam_ = imageinfo['restoringbeam']
    else:
        beam_ = {}

    wcs = coordsys_to_astropy_wcs(casa_cs)

    del casa_cs

    if 'major' in beam_:
        beam = Beam(
            major=u.Quantity(beam_['major']['value'],
                             unit=beam_['major']['unit']),
            minor=u.Quantity(beam_['minor']['value'],
                             unit=beam_['minor']['unit']),
            pa=u.Quantity(beam_['positionangle']['value'],
                          unit=beam_['positionangle']['unit']),
        )
    elif 'beams' in beam_:
        bdict = beam_['beams']
        if beam_['nStokes'] > 1:
            raise NotImplementedError()
        nbeams = len(bdict)
        assert nbeams == beam_['nChannels']
        stokesidx = '*0'

        majors = [
            u.Quantity(bdict['*{0}'.format(ii)][stokesidx]['major']['value'],
                       bdict['*{0}'.format(ii)][stokesidx]['major']['unit'])
            for ii in range(nbeams)
        ]
        minors = [
            u.Quantity(bdict['*{0}'.format(ii)][stokesidx]['minor']['value'],
                       bdict['*{0}'.format(ii)][stokesidx]['minor']['unit'])
            for ii in range(nbeams)
        ]
        pas = [
            u.Quantity(
                bdict['*{0}'.format(ii)][stokesidx]['positionangle']['value'],
                bdict['*{0}'.format(ii)][stokesidx]['positionangle']['unit'])
            for ii in range(nbeams)
        ]

        beams = Beams(major=u.Quantity(majors),
                      minor=u.Quantity(minors),
                      pa=u.Quantity(pas))
    else:
        warnings.warn("No beam information found in CASA image.", BeamWarning)

    # don't need this yet
    # stokes = get_casa_axis(temp_cs, wanttype="Stokes", skipdeg=False,)

    #    if stokes == None:
    #        order = np.arange(self.data.ndim)
    #    else:
    #        order = []
    #        for ax in np.arange(self.data.ndim+1):
    #            if ax == stokes:
    #                continue
    #            order.append(ax)

    #    self.casa_cs = ia.coordsys(order)

    # This should work, but coordsys.reorder() has a bug
    # on the error checking. JIRA filed. Until then the
    # axes will be reversed from the original.

    # if transpose == True:
    #    new_order = np.arange(self.data.ndim)
    #    new_order = new_order[-1*np.arange(self.data.ndim)-1]
    #    print new_order
    #    self.casa_cs.reorder(new_order)

    meta = {'filename': filename, 'BUNIT': unit}

    if wcs.naxis == 3:
        data, wcs_slice = cube_utils._orient(data, wcs)

        if valid is not None:
            valid, _ = cube_utils._orient(valid, wcs)
            mask = BooleanArrayMask(valid, wcs_slice)
        else:
            mask = None

        if 'beam' in locals():
            cube = DaskSpectralCube(data,
                                    wcs_slice,
                                    mask,
                                    meta=meta,
                                    beam=beam)
        elif 'beams' in locals():
            cube = DaskVaryingResolutionSpectralCube(data,
                                                     wcs_slice,
                                                     mask,
                                                     meta=meta,
                                                     beams=beams)
        else:
            cube = DaskSpectralCube(data, wcs_slice, mask, meta=meta)
        # with #592, this is no longer true
        # we've already loaded the cube into memory because of CASA
        # limitations, so there's no reason to disallow operations
        # cube.allow_huge_operations = True
        if mask is not None:
            assert cube.mask.shape == cube.shape

    elif wcs.naxis == 4:
        if valid is not None:
            valid, _ = cube_utils._split_stokes(valid, wcs)
        data, wcs = cube_utils._split_stokes(data, wcs)
        mask = {}
        for component in data:
            data_, wcs_slice = cube_utils._orient(data[component], wcs)
            if valid is not None:
                valid_, _ = cube_utils._orient(valid[component], wcs)
                mask[component] = BooleanArrayMask(valid_, wcs_slice)
            else:
                mask[component] = None

            if 'beam' in locals():
                data[component] = DaskSpectralCube(data_,
                                                   wcs_slice,
                                                   mask[component],
                                                   meta=meta,
                                                   beam=beam)
            elif 'beams' in locals():
                data[component] = DaskVaryingResolutionSpectralCube(
                    data_, wcs_slice, mask[component], meta=meta, beams=beams)
            else:
                data[component] = DaskSpectralCube(data_,
                                                   wcs_slice,
                                                   mask[component],
                                                   meta=meta)

            data[component].allow_huge_operations = True

        cube = StokesSpectralCube(stokes_data=data)
        if mask['I'] is not None:
            assert cube.I.mask.shape == cube.shape
            assert wcs_utils.check_equality(cube.I.mask._wcs, cube.wcs)
    else:
        raise ValueError("CASA image has {0} dimensions, and therefore "
                         "is not readable by spectral-cube.".format(wcs.naxis))

    from .core import normalize_cube_stokes
    return normalize_cube_stokes(cube, target_cls=target_cls)
コード例 #16
0
ファイル: casa_image.py プロジェクト: yxlinaqua/spectral-cube
def load_casa_image(filename,
                    skipdata=False,
                    skipvalid=False,
                    skipcs=False,
                    target_cls=None,
                    **kwargs):
    """
    Load a cube (into memory?) from a CASA image. By default it will transpose
    the cube into a 'python' order and drop degenerate axes. These options can
    be suppressed. The object holds the coordsys object from the image in
    memory.
    """

    from .core import StringWrapper
    if isinstance(filename, StringWrapper):
        filename = filename.value

    try:
        import casatools
        iatool = casatools.image
    except ImportError:
        try:
            from taskinit import iatool
        except ImportError:
            raise ImportError(
                "Could not import CASA (casac) and therefore cannot read CASA .image files"
            )

    ia = iatool()

    # use the ia tool to get the file contents
    try:
        ia.open(filename, cache=False)
    except AssertionError as ex:
        if 'must be of cReqPath type' in str(ex):
            raise IOError("File {0} not found.  Error was: {1}".format(
                filename, str(ex)))
        else:
            raise ex

    # read in the data
    if not skipdata:
        arrdata = ArraylikeCasaData(filename)
        # CASA data are apparently transposed.
        data = dask.array.from_array(arrdata,
                                     chunks=arrdata.chunksize,
                                     name=filename)

    # CASA stores validity of data as a mask
    if not skipvalid:
        boolarr = ArraylikeCasaData(filename, ia_kwargs={'getmask': True})
        valid = dask.array.from_array(boolarr,
                                      chunks=boolarr.chunksize,
                                      name=filename + ".mask")

    # transpose is dealt with within the cube object

    # read in coordinate system object
    casa_cs = ia.coordsys()

    unit = ia.brightnessunit()

    beam_ = ia.restoringbeam()

    ia.done()
    ia.close()

    wcs = wcs_casa2astropy(ia, casa_cs)

    del casa_cs
    del ia

    if 'major' in beam_:
        beam = Beam(
            major=u.Quantity(beam_['major']['value'],
                             unit=beam_['major']['unit']),
            minor=u.Quantity(beam_['minor']['value'],
                             unit=beam_['minor']['unit']),
            pa=u.Quantity(beam_['positionangle']['value'],
                          unit=beam_['positionangle']['unit']),
        )
    elif 'beams' in beam_:
        bdict = beam_['beams']
        if beam_['nStokes'] > 1:
            raise NotImplementedError()
        nbeams = len(bdict)
        assert nbeams == beam_['nChannels']
        stokesidx = '*0'

        majors = [
            u.Quantity(bdict['*{0}'.format(ii)][stokesidx]['major']['value'],
                       bdict['*{0}'.format(ii)][stokesidx]['major']['unit'])
            for ii in range(nbeams)
        ]
        minors = [
            u.Quantity(bdict['*{0}'.format(ii)][stokesidx]['minor']['value'],
                       bdict['*{0}'.format(ii)][stokesidx]['minor']['unit'])
            for ii in range(nbeams)
        ]
        pas = [
            u.Quantity(
                bdict['*{0}'.format(ii)][stokesidx]['positionangle']['value'],
                bdict['*{0}'.format(ii)][stokesidx]['positionangle']['unit'])
            for ii in range(nbeams)
        ]

        beams = Beams(major=u.Quantity(majors),
                      minor=u.Quantity(minors),
                      pa=u.Quantity(pas))
    else:
        warnings.warn("No beam information found in CASA image.", BeamWarning)

    # don't need this yet
    # stokes = get_casa_axis(temp_cs, wanttype="Stokes", skipdeg=False,)

    #    if stokes == None:
    #        order = np.arange(self.data.ndim)
    #    else:
    #        order = []
    #        for ax in np.arange(self.data.ndim+1):
    #            if ax == stokes:
    #                continue
    #            order.append(ax)

    #    self.casa_cs = ia.coordsys(order)

    # This should work, but coordsys.reorder() has a bug
    # on the error checking. JIRA filed. Until then the
    # axes will be reversed from the original.

    # if transpose == True:
    #    new_order = np.arange(self.data.ndim)
    #    new_order = new_order[-1*np.arange(self.data.ndim)-1]
    #    print new_order
    #    self.casa_cs.reorder(new_order)

    meta = {'filename': filename, 'BUNIT': unit}

    if wcs.naxis == 3:
        data, wcs_slice = cube_utils._orient(data, wcs)
        valid, _ = cube_utils._orient(valid, wcs)

        mask = BooleanArrayMask(valid, wcs_slice)
        if 'beam' in locals():
            cube = SpectralCube(data, wcs_slice, mask, meta=meta, beam=beam)
        elif 'beams' in locals():
            cube = VaryingResolutionSpectralCube(data,
                                                 wcs_slice,
                                                 mask,
                                                 meta=meta,
                                                 beams=beams)
        else:
            cube = SpectralCube(data, wcs_slice, mask, meta=meta)
        # with #592, this is no longer true
        # we've already loaded the cube into memory because of CASA
        # limitations, so there's no reason to disallow operations
        # cube.allow_huge_operations = True
        assert cube.mask.shape == cube.shape

    elif wcs.naxis == 4:
        valid, _ = cube_utils._split_stokes(valid, wcs)
        data, wcs = cube_utils._split_stokes(data, wcs)
        mask = {}
        for component in data:
            data_, wcs_slice = cube_utils._orient(data[component], wcs)
            valid_, _ = cube_utils._orient(valid[component], wcs)
            mask[component] = BooleanArrayMask(valid_, wcs_slice)

            if 'beam' in locals():
                data[component] = SpectralCube(data_,
                                               wcs_slice,
                                               mask[component],
                                               meta=meta,
                                               beam=beam)
            elif 'beams' in locals():
                data[component] = VaryingResolutionSpectralCube(
                    data_, wcs_slice, mask[component], meta=meta, beams=beams)
            else:
                data[component] = SpectralCube(data_,
                                               wcs_slice,
                                               mask[component],
                                               meta=meta)

            data[component].allow_huge_operations = True

        cube = StokesSpectralCube(stokes_data=data)
        assert cube.I.mask.shape == cube.shape
        assert wcs_utils.check_equality(cube.I.mask._wcs, cube.wcs)
    else:
        raise ValueError("CASA image has {0} dimensions, and therefore "
                         "is not readable by spectral-cube.".format(wcs.naxis))

    from .core import normalize_cube_stokes
    return normalize_cube_stokes(cube, target_cls=target_cls)
コード例 #17
0
ファイル: makebigcube.py プロジェクト: gheald/quocka
def getmaxbeam(file_dict,
               tolerance=0.0001,
               nsamps=200,
               epsilon=0.0005,
               verbose=False):
    """Find common beam

    Arguments:
        file_dict {dict} -- Filenames for each bandcube.

    Keyword Arguments:
        tolerance {float} -- See common_beam (default: {0.0001})
        nsamps {int} -- See common_beam (default: {200})
        epsilon {float} -- See common_beam (default: {0.0005})
        verbose {bool} -- Verbose output (default: {False})

    Returns:
        cmn_beam {Beam} -- Common beam
    """
    if verbose:
        print('Finding common beam...')
    stokes = ['i', 'q', 'u', 'v']
    beam_dict = {}
    beams = []
    for stoke in stokes:
        for i, file in enumerate(file_dict[stoke]):
            header = fits.getheader(file, memmap=True)
            if stoke == 'i' and i == 0:
                target_header = header
            beam = Beam.from_fits_header(header)
            beams.append(beam)
    beams = Beams([beam.major.value for beam in beams] * u.deg,
                  [beam.minor.value for beam in beams] * u.deg,
                  [beam.pa.value for beam in beams] * u.deg)

    try:
        cmn_beam = beams.common_beam(tolerance=tolerance,
                                     epsilon=epsilon,
                                     nsamps=nsamps)
    except BeamError:
        if verbose:
            print("Couldn't find common beam with defaults")
            print("Trying again with smaller tolerance")
        cmn_beam = beams.common_beam(tolerance=tolerance * 0.1,
                                     epsilon=epsilon,
                                     nsamps=nsamps)
    cmn_beam = Beam(
        major=my_ceil(cmn_beam.major.to(u.arcsec).value, precision=0) *
        u.arcsec,
        minor=my_ceil(cmn_beam.minor.to(u.arcsec).value, precision=0) *
        u.arcsec,
        pa=round_up(cmn_beam.pa.to(u.deg), decimals=2))
    dx = target_header['CDELT1'] * -1 * u.deg
    dy = target_header['CDELT2'] * u.deg
    assert abs(dx) == abs(dy)
    grid = dy
    conbeams = [cmn_beam.deconvolve(beam) for beam in beams]

    # Check that convolving beam will be nyquist sampled
    min_samps = []
    for b_idx, conbeam in enumerate(conbeams):
        # Get maj, min, pa
        samp = conbeam.minor / grid.to(u.arcsec)
        if samp < 2:
            min_samps.append([samp, b_idx])

    if len(min_samps) > 0:
        print('Adjusting common beam to be sampled by grid!')
        worst_idx = np.argmin([samp[0] for samp in min_samps], axis=0)
        samp_cor_fac, idx = 2 / \
            min_samps[worst_idx][0], int(
                min_samps[worst_idx][1])
        conbeam = conbeams[idx]
        major = conbeam.major
        minor = conbeam.minor * samp_cor_fac
        pa = conbeam.pa
        # Check for small major!
        if major < minor:
            major = minor
            pa = 0 * u.deg

        cor_beam = Beam(major, minor, pa)
        if verbose:
            print('Smallest common beam is:', cmn_beam)
        cmn_beam = beams[idx].convolve(cor_beam)
        cmn_beam = Beam(
            major=my_ceil(cmn_beam.major.to(u.arcsec).value, precision=1) *
            u.arcsec,
            minor=my_ceil(cmn_beam.minor.to(u.arcsec).value, precision=1) *
            u.arcsec,
            pa=round_up(cmn_beam.pa.to(u.deg), decimals=2))
        if verbose:
            print('Smallest common Nyquist sampled beam is:', cmn_beam)
    return cmn_beam
コード例 #18
0
            assert err_image.get_beam() == image.get_beam()
            all_errors.append(err_image)
        else:
            raise FileNotFoundError(
                f'SI error image {si_err_image} does not exist.')
    #####################################################
    # find the smallest common beam
    if args.beam is None:
        if args.circbeam:
            maxmaj = np.max([b[0] for b in all_beams])
            target_beam = [maxmaj * 1.01, maxmaj * 1.01,
                           0.]  # add 1% to prevent crash in convolution
        else:
            from radio_beam import Beams
            my_beams = Beams([b[0] for b in all_beams] * u.deg,
                             [b[1] for b in all_beams] * u.deg,
                             [b[2] for b in all_beams] * u.deg)
            common_beam = my_beams.common_beam()
            target_beam = [
                common_beam.major.value, common_beam.minor.value,
                common_beam.pa.value
            ]
    else:
        target_beam = [
            args.beam[0] / 3600., args.beam[1] / 3600., args.beam[2]
        ]

    logging.info('Final beam: %.1f" %.1f" (pa %.1f deg)' \
        % (target_beam[0]*3600., target_beam[1]*3600., target_beam[2]))

    #####################################################
コード例 #19
0
def line_ids(fn):
    results = []
    Ulines = []

    sp = pyspeckit.Spectrum(fn)

    # this is a bit hackier than I like
    # we'll do all our measurements in Kelvin!
    beams = Beams.from_fits_bintable(fits.open(fn)[1])
    factors = jtok_factors(beams, sp.xarr.to(u.GHz))
    sp.data = sp.data * factors
    sp.unit = u.K

    # want km/s - reference will be ~middle of SPW
    sp.xarr.convert_to_unit(u.km / u.s)

    med = np.nanmedian(sp.data)

    mad = stats.mad_std(sp.data - med)
    detections = (sp.data - med) > 5 * mad

    labels, ct = label(detections)

    for labelid in range(1, ct + 1):
        ssp = sp[labels == labelid]
        try:
            ssp.xarr.convert_to_unit(u.GHz)
            ssp.specfit()
            ssp.specfit.parinfo
            frq = ssp.specfit.parinfo['SHIFT0'].value * ssp.xarr.unit
        except Exception as ex:
            print(ex)
            frq = ssp.xarr.to(u.GHz).mean()
        sq = Splatalogue.query_lines(
            frq * (1 + 0 / 3e5),
            frq * (1 + 75 / 3e5),  # 30/3e5 original lower bound
            only_astronomically_observed=True)
        if len(sq) > 0:
            tbl = utils.minimize_table(sq)
            try:
                total_intensity = ssp.data.sum() * np.abs(
                    ssp.xarr.to(u.km / u.s).cdelt())
            except ValueError:
                total_intensity = ssp.data.sum() * np.abs(
                    sp.xarr.to(u.km / u.s).cdelt())
            peak_intensity = ssp.data.max()
            tbl.add_column(Column(data=total_intensity, name='TotalIntensity'))
            tbl.add_column(Column(data=peak_intensity, name='PeakIntensity'))
            tbl.add_column(Column(data=mad, name='RMS'))
            tbl.add_column(
                Column(data=u.Quantity((-(frq.to(u.GHz).value - tbl['Freq']) /
                                        tbl['Freq'] * constants.c),
                                       u.km / u.s),
                       name='Velocity'))
            #             print(tbl.pprint(max_width=200))
            results.append(tbl)
        else:
            log.warning(f"Frequency {frq.to(u.GHz)} had no hits")
            Ulines.append(frq)

    try:
        match_table = table.vstack(results)
    except ValueError:
        pass
    else:
        #         match_table.remove_column('QNs')
        match_table = table.unique(match_table, keys='Species')
    match_table.sort('Freq')
    print(match_table.pprint(max_width=200))
    print(match_table['Species', 'Freq', 'Velocity'])
    #     match_table.write(f"line_fit_table_{suffix}.ipac", format='ascii.ipac', overwrite=True)
    return match_table
コード例 #20
0
        exportfits(imagename=file, fitsimage=file + '.fits')
    else:
        print(file + '.fits' + ' already exists. Not running exportfits.')

fits_files = glob.glob(data_path + '*.fits')

psf_file = [s for s in fits_files if ".psf.fits" in s][0]
orig_resid = [s for s in fits_files if ".residual.fits" in s][0]
model = [s for s in fits_files if ".model.fits" in s][0]

hdu = fits.open(psf_file)
data = hdu[0].data
header = hdu[0].header
beams = hdu[1]
beams_table = beams.data
beams = Beams.from_fits_bintable(beams)  #convert to radio_beam object
hdu.close()

center = np.unravel_index(np.argmax(data[0, plot_channel, :, :]),
                          data[0, plot_channel, :, :].shape)
cy, cx = center

cutout = data[0, plot_channel, cy - max_npix_peak:cy + max_npix_peak + 1,
              cx - max_npix_peak:cx + max_npix_peak + 1]

shape = cutout.shape
sy, sx = shape
Y, X = np.mgrid[0:sy, 0:sx]

center = np.unravel_index(np.argmax(cutout), cutout.shape)
cy, cx = center
コード例 #21
0
ファイル: casa_image.py プロジェクト: folguinch/spectral-cube
def load_casa_image(filename, skipdata=False,
                    skipvalid=False, skipcs=False, **kwargs):
    """
    Load a cube (into memory?) from a CASA image. By default it will transpose
    the cube into a 'python' order and drop degenerate axes. These options can
    be suppressed. The object holds the coordsys object from the image in
    memory.
    """

    try:
        import casatools
        ia = casatools.image()
    except ImportError:
        try:
            from taskinit import iatool
            ia = iatool()
        except ImportError:
            raise ImportError("Could not import CASA (casac) and therefore cannot read CASA .image files")

    # use the ia tool to get the file contents
    ia.open(filename)

    # read in the data
    if not skipdata:
        # CASA data are apparently transposed.
        data = ia.getchunk().transpose()

    # CASA stores validity of data as a mask
    if not skipvalid:
        valid = ia.getchunk(getmask=True).transpose()

    # transpose is dealt with within the cube object

    # read in coordinate system object
    casa_cs = ia.coordsys()

    wcs = wcs_casa2astropy(casa_cs)

    unit = ia.brightnessunit()

    beam_ = ia.restoringbeam()
    if 'major' in beam_:
        beam = Beam(major=u.Quantity(beam_['major']['value'], unit=beam_['major']['unit']),
                    minor=u.Quantity(beam_['minor']['value'], unit=beam_['minor']['unit']),
                    pa=u.Quantity(beam_['positionangle']['value'], unit=beam_['positionangle']['unit']),
                   )
    elif 'beams' in beam_:
        bdict = beam_['beams']
        if beam_['nStokes'] > 1:
            raise NotImplementedError()
        nbeams = len(bdict)
        assert nbeams == beam_['nChannels']
        stokesidx = '*0'

        majors = [u.Quantity(bdict['*{0}'.format(ii)][stokesidx]['major']['value'],
                             bdict['*{0}'.format(ii)][stokesidx]['major']['unit']) for ii in range(nbeams)]
        minors = [u.Quantity(bdict['*{0}'.format(ii)][stokesidx]['minor']['value'],
                             bdict['*{0}'.format(ii)][stokesidx]['minor']['unit']) for ii in range(nbeams)]
        pas = [u.Quantity(bdict['*{0}'.format(ii)][stokesidx]['positionangle']['value'],
                          bdict['*{0}'.format(ii)][stokesidx]['positionangle']['unit']) for ii in range(nbeams)]

        beams = Beams(major=u.Quantity(majors),
                      minor=u.Quantity(minors),
                      pa=u.Quantity(pas))
    else:
        warnings.warn("No beam information found in CASA image.",
                      BeamWarning)


    # don't need this yet
    # stokes = get_casa_axis(temp_cs, wanttype="Stokes", skipdeg=False,)

    #    if stokes == None:
    #        order = np.arange(self.data.ndim)
    #    else:
    #        order = []
    #        for ax in np.arange(self.data.ndim+1):
    #            if ax == stokes:
    #                continue
    #            order.append(ax)

    #    self.casa_cs = ia.coordsys(order)

        # This should work, but coordsys.reorder() has a bug
        # on the error checking. JIRA filed. Until then the
        # axes will be reversed from the original.

        # if transpose == True:
        #    new_order = np.arange(self.data.ndim)
        #    new_order = new_order[-1*np.arange(self.data.ndim)-1]
        #    print new_order
        #    self.casa_cs.reorder(new_order)

    # close the ia tool
    ia.close()

    meta = {'filename': filename,
            'BUNIT': unit}


    if wcs.naxis == 3:
        mask = BooleanArrayMask(np.logical_not(valid), wcs)
        if 'beam' in locals():
            cube = SpectralCube(data, wcs, mask, meta=meta, beam=beam)
        elif 'beams' in locals():
            cube = VaryingResolutionSpectralCube(data, wcs, mask, meta=meta, beams=beams)
        else:
            cube = SpectralCube(data, wcs, mask, meta=meta)
        # we've already loaded the cube into memory because of CASA
        # limitations, so there's no reason to disallow operations
        cube.allow_huge_operations = True

    elif wcs.naxis == 4:
        data, wcs = cube_utils._split_stokes(data, wcs)
        mask = {}
        for component in data:
            data_, wcs_slice = cube_utils._orient(data[component], wcs)
            mask[component] = LazyMask(np.isfinite, data=data[component],
                                       wcs=wcs_slice)

            if 'beam' in locals():
                data[component] = SpectralCube(data_, wcs_slice, mask[component],
                                               meta=meta, beam=beam)
            elif 'beams' in locals():
                data[component] = VaryingResolutionSpectralCube(data_,
                                                                wcs_slice,
                                                                mask[component],
                                                                meta=meta,
                                                                beams=beams)
            else:
                data[component] = SpectralCube(data_, wcs_slice, mask[component],
                                               meta=meta)

            data[component].allow_huge_operations = True


        cube = StokesSpectralCube(stokes_data=data)

    return cube
コード例 #22
0
def main(pool, args, verbose=True):
    # Fix up outdir
    if args.mpi:
        from mpi4py import MPI
        mpiComm = MPI.COMM_WORLD
        n_cores = mpiComm.Get_size()
        #mpiRank = mpiComm.Get_rank()
    outdir = args.outdir
    if outdir is not None:
        if outdir[-1] == '/':
            outdir = outdir[:-1]
    else:
        outdir = '.'

    files = glob(args.infile)
    if files == []:
        raise Exception('No files found!')

    beams = []
    nchans = []
    datadict = {}
    masks = []
    for i, file in enumerate(files):
        # Set up files
        datadict[f"cube_{i}"] = {}
        datadict[f"cube_{i}"]["filename"] = file
        # Get metadata
        header = fits.getheader(file)
        dxas = header['CDELT1'] * -1 * u.deg
        datadict[f"cube_{i}"]["dx"] = dxas
        dyas = header['CDELT2'] * u.deg
        datadict[f"cube_{i}"]["dy"] = dyas
        # Get beam info
        dirname = os.path.dirname(file)
        basename = os.path.basename(file)
        beamlog = f"{dirname}/beamlog.{basename}".replace('.fits', '.txt')
        datadict[f"cube_{i}"]["beamlog"] = beamlog
        beam, nchan = getbeams(beamlog, verbose=verbose)
        # Find bad chans
        cube = SpectralCube.read(file)
        mask = cube[:, cube.shape[1] // 2, cube.shape[2] // 2].mask.view()
        masks.append(mask)
        # Record beams
        beams.append(beam)
        nchans.append(nchan)
    beams = np.array(beams)
    nchans = np.array(nchans)
    # Do dome masking
    beams['BMAJarcsec'][beams['BMAJarcsec'] == 0] = np.nan
    beams['BMINarcsec'][beams['BMAJarcsec'] == 0] = np.nan
    beams['BMINarcsec'][beams['BMINarcsec'] == 0] = np.nan
    beams['BMAJarcsec'][beams['BMINarcsec'] == 0] = np.nan

    totalmask = sum(masks) > 0

    for i, _ in enumerate(beams['BMAJarcsec']):
        beams['BMAJarcsec'][i][totalmask] = np.nan
        beams['BMINarcsec'][i][totalmask] = np.nan
        beams['BPAdeg'][i][totalmask] = np.nan
        datadict[f"cube_{i}"]["oldbeams"] = Beams(
            beams['BMAJarcsec'][i].ravel() * u.arcsec,
            beams['BMINarcsec'][i].ravel() * u.arcsec,
            beams['BPAdeg'][i].ravel() * u.deg)

    if args.masklist is not None:
        masklist = np.loadtxt(args.masklist) == 1
        for i, _ in enumerate(beams['BMAJarcsec']):
            beams['BMAJarcsec'][i][masklist] = np.nan
            beams['BMINarcsec'][i][masklist] = np.nan
            beams['BPAdeg'][i][masklist] = np.nan
            datadict[f"cube_{i}"]["oldbeams"] = Beams(
                beams['BMAJarcsec'][i].ravel() * u.arcsec,
                beams['BMINarcsec'][i].ravel() * u.arcsec,
                beams['BPAdeg'][i].ravel() * u.deg)

        # for chan in masklist:

    if not all(elem == nchans[0] for elem in nchans):
        raise Exception('Unequal channel count in beamlogs!')

    beamlst = Beams(beams['BMAJarcsec'].ravel() * u.arcsec,
                    beams['BMINarcsec'].ravel() * u.arcsec,
                    beams['BPAdeg'].ravel() * u.deg)

    big_beam = beamlst[~np.isnan(beamlst)].common_beam()
    if verbose:
        print(f'largest common beam is', big_beam)
    # Parse args
    bmaj = args.bmaj
    bmin = args.bmin
    bpa = args.bpa

    # Set to largest
    if bpa is None and bmin is None and bmaj is None:
        bpa = big_beam.pa.to(u.deg)
    else:
        bpa = 0 * u.deg
    if bmaj is None:
        bmaj = round_up(big_beam.major.to(u.arcsec))
    elif bmaj * u.arcsec < round_up(big_beam.major.to(u.arcsec)):
        raise Exception('Selected BMAJ is too small!')
    else:
        bmaj *= u.arcsec
    if bmin is None:
        bmin = round_up(big_beam.minor.to(u.arcsec))
    elif bmin * u.arcsec < round_up(big_beam.minor.to(u.arcsec)):
        raise Exception('Selected BMIN is too small!')
    else:
        bmin *= u.arcsec

    new_beam = Beam(bmaj, bmin, bpa)
    if verbose:
        print(f'Final beam is', new_beam)

    for key in tqdm(datadict.keys(), desc='Working on cubes separately'):
        conbms, facs = getfacs(datadict[key], new_beam, verbose=False)
        cube = SpectralCube.read(datadict[key]["filename"])
        # Set up output file
        outname = "sm." + os.path.basename(datadict[key]["filename"])
        outfile = f'{outdir}/{outname}'
        if verbose:
            print(f'Initialsing to {outfile}')
        if not os.path.isfile(outfile):
            copyfile(datadict[key]["filename"], outfile, verbose=True)

        cubedict = datadict[key]
        cubedict["conbeams"] = conbms
        cubedict["sfactors"] = facs

        if not args.mpi:
            n_cores = args.n_cores
        width_max = n_cores
        width = cpu_to_use(width_max, cube.shape[0])
        n_chunks = cube.shape[0] // width

        for i in trange(n_chunks,
                        disable=(not verbose),
                        desc='Smoothing in chunks'):
            start = i * width
            stop = start + width

            func = functools.partial(worker, start=start, cubedict=cubedict)
            arr_out = list(pool.map(func, [idx for idx in range(width)]))
            arr_out = np.array(arr_out)

            with fits.open(outfile, mode='update', memmap=True) as outfh:
                outfh[0].data[start:stop, 0, :, :] = arr_out[:]
                outfh.flush()

        if verbose:
            print('Updating header...')
        with fits.open(outfile, mode='update', memmap=True) as outfh:
            outfh[0].header = new_beam.attach_to_header(outfh[0].header)
            outfh.flush()