Exemple #1
0
def check_spec(f, length):
    cube = SpectralCube.read(f)
    spec_length = len(cube.spectral_axis)
    #print cube.shape
    out = numpy.zeros((length, cube.shape[1], cube.shape[2]))
    out[:] = numpy.nan
    if spec_length < length:
        print('Spectral axis smaller than ' + str(length) + ' channels')
        print('Correcting Spectral Axis')
        to_add = length - spec_length
        for index, x in numpy.ndenumerate(cube[0, :, :]):
            spec = cube[:, index[0], index[1]]
            rms = numpy.std(numpy.concatenate([spec[0:50], spec[-50:]]))
            noise = numpy.random.normal(scale=rms, size=to_add)
            spec2 = numpy.concatenate([
                noise[0:int(len(noise) / 2)], spec, noise[int(len(noise) / 2):]
            ])
            out[:, index[0], index[1]] = spec2
        # correct the header variables
        cube.header['NAXIS3'] = length
        # Move CRPIX3 over by the number of channels
        # added to left of spectrum
        cube.header['CRPIX3'] = cube.header['CRPIX3'] + int(len(noise) / 2)
        cube = SpectralCube(data=out, wcs=cube.wcs)
        f = f.split('.fits')[0] + '_clover.fits'
        cube.write(f, overwrite=True)
    elif spec_length > length:
        to_remove = spec_length - length
        cube = cube[int(to_remove / 2.):-int(round(to_remove / 2.)), :, :]
        f = f.split('.fits')[0] + '_clover.fits'
        cube.write(f, overwrite=True)
    return f
Exemple #2
0
def rebase_multi(filename, nproc=8, mask_percent=0.4, blorder_max=3, window_size=31):
	"""  
 Returns a baseline-subtracted cube. Can be run with parallel processes.    
   
 filename = name of datacube to process (including its path)
 nproc = number of parallel processes desired
 mask_percent = percentage of pixels to select for baseline fitting
 blorder_max = largest order polynomial to fit (fit from blorder_max down to order of 1) 
	"""
	cube = SpectralCube.read(filename)

	queue = pprocess.Queue(limit=nproc, continuous=1)
	calc = queue.manage(rebase)
	tic = time.time()

	# create cube to store rebaselined data
	cube_out = np.zeros(cube.shape) * np.nan
	pixels = cube.shape[1] * cube.shape[2]

	counter = 0
	for i in np.array_split(range(cube.shape[1]), nproc):
		calc(i, data=cube, mask_percent=mask_percent, blorder_max=blorder_max, window_size=window_size)

	for i, j, ss in queue:
		cube_out[:,i,j]=ss
		counter+=1
		print str(counter) + ' of ' + str(pixels) + ' pixels completed \r',
		sys.stdout.flush()
	print "\n %f s for parallel computation." % (time.time() - tic)
	
	cube_final = SpectralCube(data=cube_out, wcs=cube.wcs, header=cube.header)
	cube_final.write(filename[0:-5] + '_rebase_multi.fits', format='fits', overwrite=True)
Exemple #3
0
def cubefit_gen(cube,
                ncomp=2,
                paraname=None,
                modname=None,
                chisqname=None,
                guesses=None,
                errmap11name=None,
                multicore=None,
                mask_function=None,
                snr_min=3.0,
                linename="oneone",
                momedgetrim=True,
                saveguess=False,
                **kwargs):
    '''
    Perform n velocity component fit on the GAS ammonia 1-1 data.
    (This should be the function to call for all future codes if it has been proven to be reliable)
    # note: the method can probably be renamed to cubefit()

    Parameters
    ----------
    cube : str
        The file name of the ammonia 1-1 cube or a SpectralCube object
    ncomp : int
        The number of components one wish to fit. Default is 2
    paraname: str
        The output file name of the
    Returns
    -------
    pcube : 'pyspeckit.cubes.SpectralCube.Cube'
        Pyspeckit cube object containing both the fit and the original data cube
    '''

    if hasattr(cube, 'spectral_axis'):
        pcube = pyspeckit.Cube(cube=cube)

    else:
        cubename = cube
        cube = SpectralCube.read(cubename)
        pcube = pyspeckit.Cube(filename=cubename)

    pcube.unit = "K"

    # the following check on rest-frequency may not be necessarily for GAS, but better be safe than sorry
    # note: this assume the data cube has the right units
    if cube._wcs.wcs.restfrq == np.nan:
        # Specify the rest frequency not present
        cube = cube.with_spectral_unit(u.Hz,
                                       rest_value=freq_dict[linename] * u.Hz)
    cube = cube.with_spectral_unit(u.km / u.s, velocity_convention='radio')

    if pcube.wcs.wcs.restfrq == np.nan:
        # Specify the rest frequency not present
        pcube.xarr.refX = freq_dict[linename] * u.Hz
    pcube.xarr.velocity_convention = 'radio'

    # always register the fitter just in case different lines are used
    fitter = ammv.nh3_multi_v_model_generator(n_comp=ncomp,
                                              linenames=[linename])
    pcube.specfit.Registry.add_fitter('nh3_multi_v', fitter, fitter.npars)
    print "number of parameters is {0}".format(fitter.npars)
    print "the line to fit is {0}".format(linename)

    # Specify a width for the expected velocity range in the data
    #v_peak_hwidth = 3.0 # km/s (should be sufficient for GAS Orion, but may not be enough for KEYSTONE)
    v_peak_hwidth = 4.0  # km/s (should be sufficient for GAS Orion, but may not be enough for KEYSTONE)

    if errmap11name is not None:
        errmap11 = fits.getdata(errmap11name)
    else:
        # a quick way to estimate RMS as long as the noise dominates the spectrum by channels
        mask_finite = np.isfinite(cube._data)
        errmap11 = mad_std(cube._data[mask_finite], axis=0)
        print "median rms: {0}".format(np.nanmedian(errmap11))

    snr = cube.filled_data[:].value / errmap11
    peaksnr = np.nanmax(snr, axis=0)

    #the snr map will inetiabley be noisy, so a little smoothing
    kernel = Gaussian2DKernel(1)
    peaksnr = convolve(peaksnr, kernel)

    # trim the edges by 3 pixels to guess the location of the peak emission
    footprint_mask = np.any(np.isfinite(cube._data), axis=0)

    if np.logical_and(footprint_mask.size > 1000, momedgetrim):
        print "triming the edges to make moment maps"
        footprint_mask = binary_erosion(footprint_mask, disk(3))

    # the following function is copied directly from GAS
    def default_masking(snr, snr_min=5.0):
        planemask = (snr > snr_min)
        if planemask.size > 100:
            planemask = remove_small_objects(planemask, min_size=40)
            planemask = opening(planemask, disk(1))
        return (planemask)

    if 'maskmap' in kwargs:
        planemask = kwargs['maskmap']
    elif mask_function is None:
        planemask = default_masking(peaksnr, snr_min=snr_min)
    else:
        planemask = mask_function(peaksnr, snr_min=snr_min)

    print "planemask size: {0}, shape: {1}".format(planemask[planemask].size,
                                                   planemask.shape)

    # masking
    mask = np.isfinite(cube._data) * planemask * footprint_mask

    print "mask size: {0}, shape: {1}".format(mask[mask].size, mask.shape)

    maskcube = cube.with_mask(mask.astype(bool))
    maskcube = maskcube.with_spectral_unit(u.km / u.s,
                                           velocity_convention='radio')

    if guesses is not None:
        v_guess = guesses[::4]
        v_guess[v_guess == 0] = np.nan
    else:
        v_guess = np.nan

    if np.isfinite(v_guess).sum() > 0:
        v_guess = v_guess[np.isfinite(v_guess)]
        v_median = np.median(v_guess)
        print "The median of the user provided velocities is: {0}".format(
            v_median)
        m0, m1, m2 = main_hf_moments(maskcube,
                                     window_hwidth=v_peak_hwidth,
                                     v_atpeak=v_median)
    else:
        m0, m1, m2 = main_hf_moments(maskcube, window_hwidth=v_peak_hwidth)
        v_median = np.median(m1[np.isfinite(m1)])
        print "median velocity: {0}".format(v_median)

        if False:
            # save the moment maps for diagnostic purposes
            hdr_new = copy.deepcopy(pcube.header)
            hdr_new['CDELT3'] = 1
            hdr_new['CTYPE3'] = 'FITPAR'
            hdr_new['CRVAL3'] = 0
            hdr_new['CRPIX3'] = 1

            savename = "{0}_moments.fits".format(
                os.path.splitext(paraname)[0], "parameter_maps")
            fitcubefile = fits.PrimaryHDU(data=np.array([m0, m1, m2]),
                                          header=hdr_new)
            fitcubefile.writeto(savename, overwrite=True)

    # remove the nana values to allow np.nanargmax(m0) to operate smoothly
    m0[np.isnan(
        m0
    )] = 0.0  # I'm not sure if this is a good way to get around the sum vs nansum issue

    # define acceptable v range based on the provided or determined median velocity
    vmax = v_median + v_peak_hwidth
    vmin = v_median - v_peak_hwidth

    # find the location of the peak signal (to determine the first pixel to fit if nearest neighbour method is used)
    peakloc = np.nanargmax(m0)
    ymax, xmax = np.unravel_index(peakloc, m0.shape)

    # set the fit parameter limits (consistent with GAS DR1)
    Texmin = 3.0  # K; a more reasonable lower limit (5 K T_kin, 1e3 cm^-3 density, 1e13 cm^-2 column, 3km/s sigma)
    Texmax = 40  # K; DR1 T_k for Orion A is < 35 K. T_k = 40 at 1e5 cm^-3, 1e15 cm^-2, and 0.1 km/s yields Tex = 37K
    sigmin = 0.07  # km/s
    sigmax = 2.5  # km/s; for Larson's law, a 10pc cloud has sigma = 2.6 km/s
    taumax = 100.0  # a reasonable upper limit for GAS data. At 10K and 1e5 cm^-3 & 3e15 cm^-2 -> 70
    taumin = 0.2  # note: at 1e3 cm^-3, 1e13 cm^-2, 1 km/s linewidth, 40 K -> 0.15
    eps = 0.001  # a small perturbation that can be used in guesses

    # get the guesses based on moment maps
    # tex and tau guesses are chosen to reflect low density, diffusive gas that are likley to have low SNR
    gg = moment_guesses(m1, m2, ncomp, sigmin=sigmin, moment0=m0)

    if guesses is None:
        guesses = gg

    else:
        # fill in the blanks with moment guesses
        guesses[guesses == 0] = np.nan
        gmask = np.isfinite(guesses)
        guesses[~gmask] = gg[~gmask]

        # fill in the failed sigma guesses with moment guesses
        gmask = guesses[1::4] < sigmin
        guesses[1::4][gmask] = gg[1::4][gmask]

        print "user provided guesses accepted"

    # The guesses should be fine in the first case, but just in case, make sure the guesses are confined within the
    # appropriate limits
    guesses[::4][guesses[::4] > vmax] = vmax
    guesses[::4][guesses[::4] < vmin] = vmin
    guesses[1::4][guesses[1::4] > sigmax] = sigmax
    guesses[1::4][guesses[1::4] < sigmin] = sigmin + eps
    guesses[2::4][guesses[2::4] > Texmax] = Texmax
    guesses[2::4][guesses[2::4] < Texmin] = Texmin
    guesses[3::4][guesses[3::4] > taumax] = taumax
    guesses[3::4][guesses[3::4] < taumin] = taumin

    if saveguess:
        # save the guesses for diagnostic purposes
        hdr_new = copy.deepcopy(pcube.header)
        hdr_new['CDELT3'] = 1
        hdr_new['CTYPE3'] = 'FITPAR'
        hdr_new['CRVAL3'] = 0
        hdr_new['CRPIX3'] = 1

        savedir = "{0}/{1}".format(path.dirname(paraname), "guesses")

        try:
            os.makedirs(savedir)
        except OSError as e:
            if e.errno != errno.EEXIST:
                raise

        savename = "{0}_guesses.fits".format(
            path.splitext(paraname)[0], "parameter_maps")
        savename = "{0}/{1}".format(savedir, path.basename(savename))

        fitcubefile = fits.PrimaryHDU(data=guesses, header=hdr_new)
        fitcubefile.writeto(savename, overwrite=True)

    # set some of the fiteach() inputs to that used in GAS DR1 reduction
    if not 'integral' in kwargs:
        kwargs['integral'] = False

    if not 'verbose_level' in kwargs:
        kwargs['verbose_level'] = 3

    if not 'signal_cut' in kwargs:
        kwargs['signal_cut'] = 2

    # Now fit the cube. (Note: the function inputs are consistent with GAS DR1 whenever possible)
    print('start fit')

    # use SNR masking if not provided
    if not 'maskmap' in kwargs:
        print "mask mask!"
        kwargs['maskmap'] = planemask * footprint_mask

    if np.sum(kwargs['maskmap']) < 1:
        print("[WARNING]: maskmap has no pixel, no fitting will be performed")
        return pcube
    elif np.sum(np.isfinite(guesses)) < 1:
        print("[WARNING]: guesses has no pixel, no fitting will be performed")
        return pcube

    pcube.fiteach(fittype='nh3_multi_v',
                  guesses=guesses,
                  start_from_point=(xmax, ymax),
                  use_neighbor_as_guess=False,
                  limitedmax=[True, True, True, True] * ncomp,
                  maxpars=[vmax, sigmax, Texmax, taumax] * ncomp,
                  limitedmin=[True, True, True, True] * ncomp,
                  minpars=[vmin, sigmin, Texmin, taumin] * ncomp,
                  multicore=multicore,
                  **kwargs)

    if paraname != None:
        save_pcube(pcube, paraname, ncomp=ncomp)

    if modname != None:
        model = SpectralCube(pcube.get_modelcube(),
                             pcube.wcs,
                             header=cube.header)
        model.write(modname, overwrite=True)

    if chisqname != None:
        chisq = get_chisq(cube, pcube.get_modelcube(), expand=20)
        chisqfile = fits.PrimaryHDU(data=chisq,
                                    header=cube.wcs.celestial.to_header())
        chisqfile.writeto(chisqname, overwrite=True)

    return pcube
Exemple #4
0
def get_multiV_models(paraname,
                      refcubename,
                      n_comp=2,
                      savename=None,
                      snrname=None,
                      rms=0.15,
                      rmspath=None,
                      linename="oneone"):
    '''
    Creates a fits file containing the model cubes of individual components stacked into a hypercube
    :param paraname:
    :param refcubename:
    :param n_comp:
    :param savename:
    :param snrname:
    :param rms:
    :param rmspath:
    :return:
    '''

    para, hdr = fits.getdata(paraname, header=True)

    pcube = pyspeckit.Cube(refcubename)
    xarr = pcube.xarr

    cubes = [pcube.cube.copy() for i in np.arange(n_comp)]
    cubes = np.array(cubes)
    cubes[:] = np.nan

    # remove the error components
    n_para = n_comp * 4
    para = para[:n_para]
    assert para.shape[0] == n_para

    yy, xx = np.indices(para.shape[1:])
    nanvals = np.any(~np.isfinite(para), axis=0)
    isvalid = np.any(para, axis=0) & ~nanvals
    valid_pixels = zip(xx[isvalid], yy[isvalid])

    def model_a_pixel(xy):
        x, y = int(xy[0]), int(xy[1])
        models = [
            ammonia._ammonia_spectrum(xarr.as_unit('GHz'),
                                      tex=tex,
                                      tau_dict={linename: tau},
                                      width=width,
                                      xoff_v=vel,
                                      fortho=0.0,
                                      line_names=[linename])
            for vel, width, tex, tau in zip(para[::4, y, x], para[1::4, y, x],
                                            para[2::4, y, x], para[3::4, y, x])
        ]
        cubes[:, :, y, x] = models

    for xy in ProgressBar(list(valid_pixels)):
        print int(xy[0]), int(xy[1])
        model_a_pixel(xy)

    if savename != None:
        f_name, f_extension = path.splitext(savename)
        for i, data in enumerate(cubes):
            fname = "{0}_v{1}_{2}".format(f_name, i, f_extension)
            model = SpectralCube(data, pcube.wcs, header=pcube.header)
            model.write(fname, overwrite=True)

    if snrname != None:
        # calculate the peak temperature
        Tpeak = np.array([np.nanmax(cube, axis=0) for cube in cubes])

        if rmspath is not None:
            rmsdata = fits.getdata(rmspath)
            if rmsdata.shape == Tpeak[0].shape:
                rms = rmsdata
            else:
                print "[WARNING]: The shape of the rms map ({0}) does not match the shape of the emission map {1}." \
                      " An uniform rms value of: {2} has been adopted instead".format(rmsdata.shape, Tpeak[0].shape, rms)

        snr = Tpeak / rms
        snrfile = fits.PrimaryHDU(data=snr, header=pcube.header)

        for i in np.arange(n_comp * 8) + 1:
            key = 'PLANE{0}'.format(i)
            if key in hdr:
                hdr.remove(key)

        snrfile.header.set('CDELT3', 1)
        snrfile.header.set('CTYPE3', 'FITPAR')
        snrfile.header.set('PLANE1', 'SNR_0')
        snrfile.header.set('PLANE2', 'SNR_1')
        snrfile.header.set('NAXIS3', n_comp * 8)
        snrfile.writeto(snrname, overwrite=True)

    return cubes
Exemple #5
0
                    pa_bounds=pa_bounds_n,
                    verbose=True,
                    how='cube')

bin_centers, total_spectrum_co_radial_s, num_pixels_s = \
    radial_stacking(gal, co_cube, dr=dr,
                    max_radius=max_radius,
                    pa_bounds=pa_bounds_s,
                    verbose=True,
                    how='cube')

spec_shape = co_cube.shape[0]

rot_stack = SpectralCube(data=total_spectrum_co_radial.T.reshape((spec_shape, bin_centers.size, 1)),
                         wcs=co_cube.wcs)
rot_stack.write(co_stackpath("rotation_stacked_radial_{}.fits".format(wstring)),
                overwrite=True)
rot_stack_n = SpectralCube(data=total_spectrum_co_radial_n.T.reshape((spec_shape, bin_centers.size, 1)),
                           wcs=co_cube.wcs)
rot_stack_n.write(co_stackpath("rotation_stacked_radial_north_{}.fits".format(wstring)),
                  overwrite=True)
rot_stack_s = SpectralCube(data=total_spectrum_co_radial_s.T.reshape((spec_shape, bin_centers.size, 1)),
                           wcs=co_cube.wcs)
rot_stack_s.write(co_stackpath("rotation_stacked_radial_south_{}.fits".format(wstring)),
                  overwrite=True)

# Separately save the number of pixels in each bin
np.save(co_stackpath("radial_stacking_pixelsinbin_{}.npy").format(wstring), num_pixels)
np.save(co_stackpath("radial_stacking_pixelsinbin_north_{}.npy").format(wstring), num_pixels_n)
np.save(co_stackpath("radial_stacking_pixelsinbin_south_{}.npy").format(wstring), num_pixels_s)

Exemple #6
0
def cleansplit(filename,
               galaxy=None,
               Vwindow=650 * u.km / u.s,
               Vgalaxy=300 * u.km / u.s,
               blorder=3,
               HanningLoops=0,
               maskfile=None,
               circleMask=True,
               edgeMask=False,
               weightCut=0.2,
               spectralSetup=None,
               spatialSmooth=1.0):
    """
    Takes a raw DEGAS cube and produces individual cubes for each
    spectral line.
    
    Paramters
    ---------
    filename : str
        The file to split.
    
    Keywords
    --------
    galaxy : Galaxy object
        Currently unused
    Vwindow : astropy.Quantity
        Width of the window in velocity units
    Vgalaxy : astropy.Quantity
        Line of sight velocity of the galaxy centre
    blorder : int
        Baseline order
    HanningLoops : int
        Number of times to smooth and resample the data
    edgeMask : bool
        Determine whether to apply an edgeMask
    weightCut : float
        Minimum weight value to include in the data
    spatialSmooth : float
        Factor to increase the (linear) beam size by in a convolution.
    spectralSetup : str
        String to determine how we set up the spectrum
        'hcn_hcop' -- split based on HCN/HCO+ setup
        '13co_c18o' -- split based on 13CO/C18O setup
        '12co' -- don't split; assume single line
    """

    Cube = SpectralCube.read(filename)
    CatalogFile = get_pkg_data_filename('./data/dense_survey.cat',
                                        package='degas')
    Catalog = Table.read(CatalogFile, format='ascii')

    # Find which galaxy in our catalog corresponds to the object we
    # are mapping
    if galaxy is None:
        RABound, DecBound = Cube.world_extrema
        match = np.zeros_like(Catalog, dtype=np.bool)
        for index, row in enumerate(Catalog):
            galcoord = SkyCoord(row['RA'],
                                row['DEC'],
                                unit=(u.hourangle, u.deg))
            if (galcoord.ra < RABound[1] and galcoord.ra > RABound[0]
                    and galcoord.dec < DecBound[1]
                    and galcoord.dec > DecBound[0]):
                match[index] = True
        MatchRow = Catalog[match]
        galcoord = SkyCoord(MatchRow['RA'],
                            MatchRow['DEC'],
                            unit=(u.hourangle, u.deg))
        Galaxy = MatchRow['NAME'].data[0]
        print("Catalog Match with " + Galaxy)
        V0 = MatchRow['CATVEL'].data[0] * u.km / u.s

    # Check spectral setups.  Use the max frequencies present to
    # determine which spectral setup we used if not specifed.
    if spectralSetup is None:
        if (Cube.spectral_axis.max() > 105 * u.GHz
                and Cube.spectral_axis.max() < 113 * u.GHz):
            warnings.warn("assuming 13CO/C18O spectral setup")
            spectralSetup = '13CO_C18O'
            filestr = '13co_c18o'
        if (Cube.spectral_axis.max() > 82 * u.GHz
                and Cube.spectral_axis.max() < 90 * u.GHz):
            warnings.warn("assuming HCN/HCO+ spectral setup")
            spectralSetup = 'HCN_HCO+'
            filestr = 'hcn_hcop'
        if (Cube.spectral_axis.max() > 113 * u.GHz):
            warnings.warn("assuming 12CO spectral setup")
            spectralSetup = '12CO'
            filestr = '12co'

    if spectralSetup == '13CO_C18O':
        CEighteenO = Cube.with_spectral_unit(u.km / u.s,
                                             velocity_convention='radio',
                                             rest_value=109.78217 * u.GHz)
        ThirteenCO = Cube.with_spectral_unit(u.km / u.s,
                                             velocity_convention='radio',
                                             rest_value=110.20135 * u.GHz)
        CubeList = (CEighteenO, ThirteenCO)
        LineList = ('C18O', '13CO')

    elif spectralSetup == 'HCN_HCO+':
        HCN = Cube.with_spectral_unit(u.km / u.s,
                                      velocity_convention='radio',
                                      rest_value=88.631847 * u.GHz)
        HCOp = Cube.with_spectral_unit(u.km / u.s,
                                       velocity_convention='radio',
                                       rest_value=89.188518 * u.GHz)
        CubeList = (HCN, HCOp)
        LineList = ('HCN', 'HCOp')

    elif spectralSetup == '12CO':
        TwelveCO = Cube.with_spectral_unit(u.km / u.s,
                                           velocity_convention='radio',
                                           rest_value=115.27120180 * u.GHz)
        CubeList = (TwelveCO, )
        LineList = ('12CO', )

    for ThisCube, ThisLine in zip(CubeList, LineList):
        if circleMask:
            x0, y0, _ = ThisCube.wcs.wcs_world2pix(galcoord.ra, galcoord.dec,
                                                   0, 0)
            ThisCube = circletrim(ThisCube,
                                  filename.replace('.fits', '_wts.fits'),
                                  x0,
                                  y0,
                                  weightCut=weightCut)
        if edgeMask:
            ThisCube = edgetrim(ThisCube,
                                filename.replace('.fits', '_wts.fits'),
                                weightCut=weightCut)

        # Trim each cube to the specified velocity range
        ThisCube = ThisCube.spectral_slab(V0 - Vwindow, V0 + Vwindow)
        ThisCube.write(Galaxy + '_' + ThisLine + '.fits', overwrite=True)
        StartChan = ThisCube.closest_spectral_channel(V0 - Vgalaxy)
        EndChan = ThisCube.closest_spectral_channel(V0 + Vgalaxy)

        if maskfile is not None:
            maskLookup = buildMaskLookup(maskfile)
            shp = ThisCube.shape
            TmpCube = ThisCube.with_spectral_unit(u.Hz)
            spaxis = TmpCube.spectral_axis
            spaxis = spaxis.value
            data = ThisCube.filled_data[:].value
            for y in np.arange(shp[1]):
                for x in np.arange(shp[2]):
                    spectrum = data[:, y, x]
                    if np.any(np.isnan(spectrum)):
                        continue
                    coords = ThisCube.world[:, y, x]
                    mask = maskLookup(coords[2].value, coords[1].value, spaxis)
                    spectrum = robustBaseline(spectrum,
                                              blorder=blorder,
                                              baselineIndex=~mask)
                    data[:, y, x] = spectrum
            ThisCube = SpectralCube(data * ThisCube.unit,
                                    ThisCube.wcs,
                                    header=ThisCube.header,
                                    meta={'BUNIT': ThisCube.header['BUNIT']})
            ThisCube.write(Galaxy + '_' + ThisLine +
                           '_rebase{0}.fits'.format(blorder),
                           overwrite=True)
        else:
            gbtpipe.Baseline.rebaseline(Galaxy + '_' + ThisLine + '.fits',
                                        baselineRegion=[
                                            slice(0, StartChan, 1),
                                            slice(EndChan, ThisCube.shape[0],
                                                  1)
                                        ],
                                        blorder=blorder)
        ThisCube = SpectralCube.read(Galaxy + '_' + ThisLine +
                                     '_rebase{0}'.format(blorder) + '.fits')
        # Smooth
        Kern = Kernel1D(array=np.array([0.5, 1.0, 0.5]))
        for i in range(HanningLoops):
            ThisCube.spectral_smooth(Kern)
            ThisCube = ThisCube[::2, :, :]

        # Spatial Smooth
        if spatialSmooth > 1.0:
            newBeam = Beam(major=ThisCube.beam.major * spatialSmooth,
                           minor=ThisCube.beam.minor * spatialSmooth)
            ThisCube.convolve_to(newBeam)
            smoothstr = '_smooth{0}'.format(spatialSmooth)
        else:
            smoothstr = ''

        # Final Writeout
        ThisCube.write(Galaxy + '_' + ThisLine + '_rebase{0}'.format(blorder) +
                       smoothstr + '_hanning{0}.fits'.format(HanningLoops),
                       overwrite=True)
def measure_dendrogram_properties(dend=None, cube303=cube303,
                                  cube321=cube321, cube13co=cube13co,
                                  cube18co=cube18co, noise_cube=noise_cube,
                                  sncube=sncube,
                                  suffix="",
                                  last_index=None,
                                  plot_some=True,
                                  line='303',
                                  write=True):

    assert (cube321.shape == cube303.shape == noise_cube.shape ==
            cube13co.shape == cube18co.shape == sncube.shape)
    assert sncube.wcs is cube303.wcs is sncube.mask._wcs

    metadata = {}
    metadata['data_unit'] = u.K
    metadata['spatial_scale'] =  7.2 * u.arcsec
    metadata['beam_major'] =  30 * u.arcsec
    metadata['beam_minor'] =  30 * u.arcsec
    metadata['wavelength'] =  218.22219*u.GHz
    metadata['velocity_scale'] = u.km/u.s
    metadata['wcs'] = cube303.wcs

    keys = [
            'density_chi2',
            'expected_density',
            'dmin1sig_chi2',
            'dmax1sig_chi2',
            'column_chi2',
            'expected_column',
            'cmin1sig_chi2',
            'cmax1sig_chi2',
            'temperature_chi2',
            'expected_temperature',
            'tmin1sig_chi2',
            'tmax1sig_chi2',
            'eratio321303',
            'ratio321303',
            'logh2column',
            'elogh2column',
            'logabundance',
            'elogabundance',
           ]
    obs_keys = [
            'Stot303',
            'Smin303',
            'Smax303',
            'Stot321',
            'Smean303',
            'Smean321',
            'npix',
            'e303',
            'e321',
            'r321303',
            'er321303',
            '13cosum',
            'c18osum',
            '13comean',
            'c18omean',
            's_ntotal',
            'index',
            'is_leaf',
            'parent',
            'root',
            'lon',
            'lat',
            'vcen',
            'higaldusttem',
            'reff',
            'dustmass',
            'dustmindens',
            'bad',
            #'tkin_turb',
    ]
    columns = {k:[] for k in (keys+obs_keys)}

    log.debug("Initializing dendrogram temperature fitting loop")

    # FORCE wcs to match
    # (technically should reproject here)
    cube13co._wcs = cube18co._wcs = cube303.wcs
    cube13co.mask._wcs = cube18co.mask._wcs = cube303.wcs

    if line == '303':
        maincube = cube303
    elif line == '321':
        maincube = cube321
    else:
        raise ValueError("Unrecognized line: {0}".format(line))

    # Prepare an array to hold the fitted temperatures
    tcubedata = np.empty(maincube.shape, dtype='float32')
    tcubedata[:] = np.nan
    tcubeleafdata = np.empty(maincube.shape, dtype='float32')
    tcubeleafdata[:] = np.nan


    nbad = 0

    catalog = ppv_catalog(dend, metadata)
    pb = ProgressBar(len(catalog))
    for ii,row in enumerate(catalog):
        structure = dend[row['_idx']]
        assert structure.idx == row['_idx'] == ii
        dend_obj_mask = BooleanArrayMask(structure.get_mask(), wcs=cube303.wcs)
        dend_inds = structure.indices()

        view = (slice(dend_inds[0].min(), dend_inds[0].max()+1),
                slice(dend_inds[1].min(), dend_inds[1].max()+1),
                slice(dend_inds[2].min(), dend_inds[2].max()+1),)
        #view2 = cube303.subcube_slices_from_mask(dend_obj_mask)
        submask = dend_obj_mask[view]
        #assert np.count_nonzero(submask.include()) == np.count_nonzero(dend_obj_mask.include())

        sn = sncube[view].with_mask(submask)
        sntot = sn.sum().value
        #np.testing.assert_almost_equal(sntot, structure.values().sum(), decimal=0)

        c303 = cube303[view].with_mask(submask)
        c321 = cube321[view].with_mask(submask)
        co13sum = cube13co[view].with_mask(submask).sum().value
        co18sum = cube18co[view].with_mask(submask).sum().value
        if hasattr(co13sum,'__len__'):
            raise TypeError(".sum() applied to an array has yielded a non scalar.")

        npix = submask.include().sum()
        assert npix == structure.get_npix()
        Stot303 = c303.sum().value
        if np.isnan(Stot303):
            raise ValueError("NaN in cube.  This can't happen: the data from "
                             "which the dendrogram was derived can't have "
                             "NaN pixels.")
        Smax303 = c303.max().value
        Smin303 = c303.min().value

        Stot321 = c321.sum().value
        if npix == 0:
            raise ValueError("npix=0. This is impossible.")
        Smean303 = Stot303/npix
        if Stot303 <= 0 and line=='303':
            raise ValueError("The 303 flux is <=0.  This isn't possible because "
                             "the dendrogram was derived from the 303 data with a "
                             "non-zero threshold.")
        elif Stot303 <= 0 and line=='321':
            Stot303 = 0
            Smean303 = 0
        elif Stot321 <= 0 and line=='321':
            raise ValueError("The 321 flux is <=0.  This isn't possible because "
                             "the dendrogram was derived from the 321 data with a "
                             "non-zero threshold.")
        if np.isnan(Stot321):
            raise ValueError("NaN in 321 line")
        Smean321 = Stot321/npix

        #error = (noise_cube[view][submask.include()]).sum() / submask.include().sum()**0.5
        var = ((noise_cube[dend_obj_mask.include()]**2).sum() / npix**2)
        error = var**0.5
        if np.isnan(error):
            raise ValueError("error is nan: this is impossible by definition.")

        if line == '321' and Stot303 == 0:
            r321303 = np.nan
            er321303 = np.nan
        elif Stot321 < 0:
            r321303 = error / Smean303
            er321303 = (r321303**2 * (var/Smean303**2 + 1))**0.5
        else:
            r321303 = Stot321 / Stot303
            er321303 = (r321303**2 * (var/Smean303**2 + var/Smean321**2))**0.5

        for c in columns:
            assert len(columns[c]) == ii

        columns['index'].append(row['_idx'])
        columns['s_ntotal'].append(sntot)
        columns['Stot303'].append(Stot303)
        columns['Smax303'].append(Smax303)
        columns['Smin303'].append(Smin303)
        columns['Stot321'].append(Stot321)
        columns['Smean303'].append(Smean303)
        columns['Smean321'].append(Smean321)
        columns['npix'].append(npix)
        columns['e303'].append(error)
        columns['e321'].append(error)
        columns['r321303'].append(r321303)
        columns['er321303'].append(er321303)
        columns['13cosum'].append(co13sum)
        columns['c18osum'].append(co18sum)
        columns['13comean'].append(co13sum/npix)
        columns['c18omean'].append(co18sum/npix)
        columns['is_leaf'].append(structure.is_leaf)
        columns['parent'].append(structure.parent.idx if structure.parent else -1)
        columns['root'].append(get_root(structure).idx)
        s_main = maincube._data[dend_inds]
        x,y,z = maincube.world[dend_inds]
        lon = ((z.value-(360*(z.value>180)))*s_main).sum()/s_main.sum()
        lat = (y*s_main).sum()/s_main.sum()
        vel = (x*s_main).sum()/s_main.sum()
        columns['lon'].append(lon)
        columns['lat'].append(lat.value)
        columns['vcen'].append(vel.value)

        mask2d = dend_obj_mask.include().max(axis=0)[view[1:]]
        logh2column = np.log10(np.nanmean(column_regridded.data[view[1:]][mask2d]) * 1e22)
        if np.isnan(logh2column):
            log.info("Source #{0} has NaNs".format(ii))
            logh2column = 24
        elogh2column = elogabundance
        columns['higaldusttem'].append(np.nanmean(dusttem_regridded.data[view[1:]][mask2d]))

        r_arcsec = row['radius']*u.arcsec
        reff = (r_arcsec*(8.5*u.kpc)).to(u.pc, u.dimensionless_angles())
        mass = ((10**logh2column*u.cm**-2)*np.pi*reff**2*2.8*constants.m_p).to(u.M_sun)
        density = (mass/(4/3.*np.pi*reff**3)/constants.m_p/2.8).to(u.cm**-3)

        columns['reff'].append(reff.value)
        columns['dustmass'].append(mass.value)
        columns['dustmindens'].append(density.value)
        mindens = np.log10(density.value)
        if mindens < 3:
            mindens = 3

        if (r321303 < 0 or np.isnan(r321303)) and line != '321':
            raise ValueError("Ratio <0: This can't happen any more because "
                             "if either num/denom is <0, an exception is "
                             "raised earlier")
            #for k in columns:
            #    if k not in obs_keys:
            #        columns[k].append(np.nan)
        elif (r321303 < 0 or np.isnan(r321303)) and line == '321':
            for k in keys:
                columns[k].append(np.nan)
        else:
            # Replace negatives for fitting
            if Smean321 <= 0:
                Smean321 = error
            mf.set_constraints(ratio321303=r321303, eratio321303=er321303,
                               #ratio321322=ratio2, eratio321322=eratio2,
                               logh2column=logh2column, elogh2column=elogh2column,
                               logabundance=logabundance, elogabundance=elogabundance,
                               taline303=Smean303, etaline303=error,
                               taline321=Smean321, etaline321=error,
                               mindens=mindens,
                               linewidth=10)
            row_data = mf.get_parconstraints()
            row_data['ratio321303'] = r321303
            row_data['eratio321303'] = er321303

            for k in row_data:
                columns[k].append(row_data[k])

            # Exclude bad velocities from cubes
            if row['v_cen'] < -80e3 or row['v_cen'] > 180e3:
                # Skip: there is no real structure down here
                nbad += 1
                is_bad = True
            else:
                is_bad = False
                tcubedata[dend_obj_mask.include()] = row_data['expected_temperature']
                if structure.is_leaf:
                    tcubeleafdata[dend_obj_mask.include()] = row_data['expected_temperature']

            columns['bad'].append(is_bad)

            width = row['v_rms']*u.km/u.s
            lengthscale = reff

            #REMOVED in favor of despotic version done in dendrograms.py
            # we use the analytic version here; the despotic version is
            # computed elsewhere (with appropriate gcor factors)
            #columns['tkin_turb'].append(heating.tkin_all(10**row_data['density_chi2']*u.cm**-3,
            #                                             width,
            #                                             lengthscale,
            #                                             width/lengthscale,
            #                                             columns['higaldusttem'][-1]*u.K,
            #                                             crir=0./u.s))

        if len(set(len(c) for k,c in columns.items())) != 1:
            print("Columns are different lengths.  This is not allowed.")
            import ipdb; ipdb.set_trace()

        for c in columns:
            assert len(columns[c]) == ii+1

        if plot_some and not is_bad and (ii-nbad % 100 == 0 or ii-nbad < 50):
            try:
                log.info("T: [{tmin1sig_chi2:7.2f},{expected_temperature:7.2f},{tmax1sig_chi2:7.2f}]"
                         "  R={ratio321303:8.4f}+/-{eratio321303:8.4f}"
                         "  Smean303={Smean303:8.4f} +/- {e303:8.4f}"
                         "  Stot303={Stot303:8.2e}  npix={npix:6d}"
                         .format(Smean303=Smean303, Stot303=Stot303,
                                 npix=npix, e303=error, **row_data))

                pl.figure(1)
                pl.clf()
                mf.denstemplot()
                pl.savefig(fpath("dendrotem/diagnostics/{0}_{1}.png".format(suffix,ii)))
                pl.figure(2).clf()
                mf.parplot1d_all(levels=[0.68268949213708585])
                pl.savefig(fpath("dendrotem/diagnostics/1dplot{0}_{1}.png".format(suffix,ii)))
                pl.draw()
                pl.show()
            except Exception as ex:
                print ex
                pass
        else:
            pb.update(ii+1)

        if last_index is not None and ii >= last_index:
            break

    if last_index is not None:
        catalog = catalog[:last_index+1]

    for k in columns:
        if k not in catalog.keys():
            catalog.add_column(table.Column(name=k, data=columns[k]))

    for mid,lo,hi,letter in (('expected_temperature','tmin1sig_chi2','tmax1sig_chi2','t'),
                             ('expected_density','dmin1sig_chi2','dmax1sig_chi2','d'),
                             ('expected_column','cmin1sig_chi2','cmax1sig_chi2','c')):
        catalog.add_column(table.Column(name='elo_'+letter,
                                        data=catalog[mid]-catalog[lo]))
        catalog.add_column(table.Column(name='ehi_'+letter,
                                        data=catalog[hi]-catalog[mid]))

    if write:
        catalog.write(tpath('PPV_H2CO_Temperature{0}.ipac'.format(suffix)), format='ascii.ipac')

    # Note that there are overlaps in the catalog, which means that ORDER MATTERS
    # in the above loop.  I haven't yet checked whether large scale overwrites
    # small or vice-versa; it may be that both views of the data are interesting.
    tcube = SpectralCube(data=tcubedata, wcs=cube303.wcs,
                         mask=cube303.mask, meta={'unit':'K'},
                         header=cube303.header,
                        )
    tcubeleaf = SpectralCube(data=tcubeleafdata, wcs=cube303.wcs,
                         mask=cube303.mask, meta={'unit':'K'},
                         header=cube303.header,
                        )

    if write:
        log.info("Writing TemperatureCube")
        outpath = 'TemperatureCube_DendrogramObjects{0}.fits'
        tcube.write(hpath(outpath.format(suffix)),
                    overwrite=True)

        outpath_leaf = 'TemperatureCube_DendrogramObjects{0}_leaves.fits'
        tcubeleaf.write(hpath(outpath_leaf.format(suffix)),
                    overwrite=True)


    return catalog, tcube
Exemple #8
0
def recipe_phangs_noise(incube=None,
                        outfile=None,
                        mask=None,
                        noise_kwargs=None,
                        return_spectral_cube=False,
                        overwrite=False):
    """

    Wrap noise_cube with a set of preferred parameters for the
    PHANGS-ALMA CO work.
    
    Parameters:
    -----------
    
    cube : np.array

        Array of data (floats)
    
    Keywords:
    ---------
    
    mask : np.bool

        Boolean array with False indicating where data can be used in
        the noise estimate. (i.e., True is signal).

    """

    # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%
    # Error checking and work out inputs
    # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%

    if type(incube) is SpectralCube:
        cube = incube
    elif type(incube) == str:
        cube = SpectralCube.read(incube)
    else:
        logger.error("Input must be a SpectralCube object or a filename.")

    # Initialize an empty kwargs dictionary
    if noise_kwargs is None:
        noise_kwargs = {}

    # If no box is specified, default to one about two beams across
    if 'box' not in noise_kwargs:
        pixels_per_beam = cube.pixels_per_beam
        box = np.ceil(2.5 * pixels_per_beam**0.5)
        noise_kwargs['box'] = box

    # Default to an odd bandpass smothing window
    if 'bandpass_smooth_window' not in noise_kwargs:
        spectral_smooth = np.ceil(cube.shape[0] / 5) // 2 * 2 + 1
        noise_kwargs['bandpass_smooth_window'] = spectral_smooth

    if 'spec_box' not in noise_kwargs:
        noise_kwargs['spec_box'] = 5

    if 'iterations' not in noise_kwargs:
        noise_kwargs['iterations'] = 4

    # Require a valid cube input as a
    if mask is not None:
        if type(mask) is SpectralCube:
            noise_kwargs['mask'] = mask
        elif type(mask) == type("hello"):
            noise_kwargs['mask'] = SpectralCube.read(mask)
        else:
            logger.error(
                "Mask must be a SpectralCube object or a filename or None.")

    # Fill in the mask if it hasn't already been filled in.
    if 'mask' not in noise_kwargs:

        # Check if a non-trivial signal mask is attached to the cube
        if (np.sum(cube.mask.include()) < np.sum(
                np.isfinite(cube.filled_data[:].value))):
            noise_kwargs['mask'] = cube.mask.include()
        else:
            noise_kwargs['mask'] = None

    # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%
    # Run the noise estimate
    # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%

    data = cube.filled_data[:].value
    badmask = np.isnan(data)
    badmask = nd.binary_dilation(badmask,
                                 structure=nd.generate_binary_structure(3, 2))
    data[badmask] = np.nan
    rms = noise_cube(data, **noise_kwargs)

    # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%
    # Write or return as requested
    # &%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%

    # In this case can avoid a recast
    if not return_spectral_cube and (outfile is None):
        return (rms)

    # Recast from numpy array to spectral cube
    header = cube.header
    header['DATAMIN'] = np.nanmin(rms)
    header['DATAMAX'] = np.nanmax(rms)
    header['COMMENT'] = 'Produced with PHANGS-ALMA pipeline version ' + version
    if tableversion:
        header[
            'COMMENT'] = 'Galaxy properties from PHANGS sample table version ' + tableversion
    rms = SpectralCube(rms,
                       wcs=cube.wcs,
                       header=header,
                       meta={'BUNIT': cube.header['BUNIT']})

    # Write to disk, if desired
    if outfile is not None:
        rms.write(outfile, overwrite=overwrite)

    if return_spectral_cube:
        return (rms)
    else:
        return (rms.filled_data[:].value)
            logabundance=logabundance,
            elogabundance=elogabundance,
            taline303=ta303.value,
            etaline303=err,
            taline321=ta321.value,
            etaline321=err,
            linewidth=linewidth)
        row_data = mf.get_parconstraints()
        tcube[z, y, x] = row_data['temperature_chi2']
        row_data['ratio303321'] = rat
        row_data['eratio303321'] = erat

        if ii % 100 == 0 or ii < 50:
            log.info(
                "T: [{tmin1sig_chi2:7.2f},{temperature_chi2:7.2f},{tmax1sig_chi2:7.2f}]  R={ratio303321:6.2f}+/-{eratio303321:6.2f}"
                .format(**row_data))
        else:
            pb.update(ii)
        tcube.flush()
    else:
        pb.update(ii)

tcube[tcube == 0] = np.nan
tCube = SpectralCube(tcube,
                     cube303.wcs,
                     mask=BooleanArrayMask(np.isfinite(tcube),
                                           wcs=cube303.wcs))
tCube.write(hpath('chi2_temperature_cube.fits'), overwrite=True)

print()
Exemple #10
0
log.info("Stacking CO with HI centroid. 2 * beam")
bin_centers, total_spectrum_co_radial, num_pixels = \
    radial_stacking(gal, co_cube, dr=dr,
                    max_radius=max_radius,
                    pa_bounds=None,
                    verbose=verbose,
                    how='cube')

spec_shape = co_cube.shape[0]

cent_stack = SpectralCube(data=total_spectrum_co_radial.T.reshape(
    (spec_shape, bin_centers.size, 1)),
                          wcs=co_cube.wcs)
cent_stack.write(co_stackpath(
    "centroid_stacked_radial_{}.fits".format(wstring)),
                 overwrite=True)

# Separately save the number of pixels in each bin
np.save(
    co_stackpath("radial_stacking_pixelsinbin_{}.npy").format(wstring),
    num_pixels)

# Save the total profiles over the inner 7 kpc

total_spectrum_co = total_spectrum_co_radial.sum(0)

oned_wcs = co_cube[:, 0, 0].wcs
OneDSpectrum(total_spectrum_co.value,
             unit=total_spectrum_co.unit,
             wcs=oned_wcs).write(co_stackpath(
        rcubedata[structure.get_mask()] = r321303
        tcubedata[structure.get_mask()] = pwtem(np.array([r321303]))

        pb.update(ii+1)

    # Note that there are overlaps in the catalog, which means that ORDER MATTERS
    # in the above loop.  I haven't yet checked whether large scale overwrites
    # small or vice-versa; it may be that both views of the data are interesting.
    tcube = SpectralCube(data=tcubedata, wcs=cubeA.wcs,
                         mask=cubeA.mask, meta={'unit':'K'},
                         header=cubeA.header,
                        )

    outpath = 'TemperatureCube_DendrogramObjects{0}_Piecewise.fits'.format(sm)
    tcube.write(hpath(outpath), overwrite=True)

    rcube = SpectralCube(data=rcubedata, wcs=cubeA.wcs,
                         mask=cubeA.mask, meta={'unit':'K'},
                         header=cubeA.header,
                        )

    outpath = 'RatioCube_DendrogramObjects{0}.fits'.format(sm)
    rcube.write(hpath(outpath), overwrite=True)

    max_temcube = tcube.max(axis=0)
    max_temcube.hdu.writeto(hpath('TemperatureCube_DendrogramObjects{0}_Piecewise_max.fits'.format(sm)), clobber=True)
    max_rcube = rcube.max(axis=0)
    max_rcube.hdu.writeto(hpath('RatioCube_DendrogramObjects{0}_Piecewise_max.fits'.format(sm)), clobber=True)

    mean_temcube = tcube.mean(axis=0)
Exemple #12
0
class Data3D(Data):
    """ Creates a data cube structure.

    Attributes:
        address: file name.
        data: the data cube.
    """
    def __init__(self, address):
        """Defines a new data cube object.

        Parameters:
            address (str): file name.
        """
        super(Data3D, self).__init__(address)
        self.logger = get_logger(__name__)

    def load(self):
        """Load the data cube"""
        try:
            self.data = SpectralCube.read(self.address)
        except:
            self.logger.warn('Trying to fix the cube')
            cube = fits.open(self.address)[0]
            cube.header['CUNIT3'] = 'm/s'
            cube.header['CRVAL3'] = cube.header['CRVAL3'] * 1.E3
            cube.header['CDELT3'] = cube.header['CDELT3'] * 1.E3
            self.data = SpectralCube(cube.data, WCS(cube.header))
            self.logger.info('Cube fixed')

    def save(self, filename=None, overwrite=True):
        """Save the data cube"""
        self.data.write(filename or self.address, overwrite=overwrite)

    @property
    def wcs(self):
        """Return the WCS with the position data."""
        return self.data.wcs.sub(['longitude', 'latitude'])

    def get_coord(self, xpix, ypix, frame='fk5'):
        """Return the (ra, dec) coordinates of the input pixel location.

        Parameters:
            xpix (float): x-position of the coordinate.
            ypix (float): y-position of the coordinate.
            frame (str, default=fk5): sky frame projection.

        Returns:
            coord (astropy.SkyCoord): sky coordinate of the input location.

        Note:
            xpix and ypix are zero based.
        """
        ra, dec = self.wcs.all_pix2world([[xpix, ypix]], 0)[0]
        return SkyCoord(ra=ra * u.degree, dec=dec * u.degree, frame=frame)

    def get_pixel(self, coord):
        crd = [[coord.ra.degree, coord.dec.degree]]
        return self.wcs.all_world2pix(crd, 0)[0]
        #return self.wcs.world_to_pixel(coord)

    def get_spectrum(self, coord=None, pixel=None):
        if coord is not None:
            xy = self.get_pixel(coord)
        elif pixel is not None:
            xy = pixel
        else:
            raise ValueError('Could not determine position')
        x, y = map(int, xy)
        self.logger.info('Extracting spectrum at pixel: %i, %i', x, y)
        return self.data[:, y, x]

    def get_avg_spectrum(self, coord, r=None):
        # Position
        xy = self.get_pixel(coord)
        x, y = map(int, xy)
        self.logger.info('Region centered at pixel: %i, %i', x, y)

        # Multiple beams
        bmaj = 0.
        bmin = 0.
        for beam in self.data.beams:
            bmaj += beam.major.to(u.deg).value
            bmin += beam.minor.to(u.deg).value
        bmaj = bmaj / len(self.data.beams)
        bmin = bmin / len(self.data.beams)

        # Get mask
        mask = image_circular_mask(self.data,
                                   xy=[x, y],
                                   r=r,
                                   bmin=bmin,
                                   bmaj=bmaj)

        # Get spectrum
        maskedcube = self.data.with_mask(mask)
        return maskedcube.mean(axis=(-2, -1))

    def rotate(self, angle, source, centre=(0, 0), mode='bilinear', **kwargs):
        """Rotate the cube.

        Parameters
            angle: angle of rotation in degrees.
            centre: centre of rotation.
            mode: interpolation mode.
        """
        #old_centre = self.centre

        # Rotate the cube
        aux = None
        for j, slc in enumerate(self.data.unmasked_data[:]):
            rotated = rotate(slc, angle, centre=centre, mode=mode)
            if aux is None:
                aux = np.array([rotated])
            else:
                aux = np.append(aux, [rotated], axis=0)

        # Create a new fits file
        hdu = fits.PrimaryHDU(aux)
        hdu.header = self.data.header
        hdu.header['COMMENT'] = 'Rotated %.3f deg, center %i, %i' % \
                ((angle,)+centre)
        self.logger.info('Image rotated %.1f deg, center %i, %i', angle,
                         *centre)

        # Redefine the reference pixel
        hdu.header['CRPIX1'] = aux.shape[2] / 2. + .5
        hdu.header['CRPIX2'] = aux.shape[1] / 2. + .5
        hdu.header['CRVAL1'] = source.position.ra.to(u.deg).value
        hdu.header['CRVAL2'] = source.position.dec.to(u.deg).value

        # Redifine header rotation
        #if 'CROTA2' in hdu.header:
        #    #if hdu.header['CROTA2']==angle:
        #    self.logger.info('Deleting rotation keywords from header')
        try:
            del hdu.header['CROTA2']
            del hdu.header['CROTA1']
        except KeyError:
            pass
            #else:
            #    self.logger.info('Changing rotation keywords from header')
            #    hdu.header['CROTA2'] = hdu.header['CROTA2']-angle
        #else:
        #    self.logger.info('Setting rotation header keywords')
        #    hdu.header['CROTA1'] = 0
        #    hdu.header['CROTA2'] = angle

        return hdu
Exemple #13
0
def buildmasks(filename,
               nChan=2000,
               width=2e9,
               outdir=None,
               grow_v=0,
               grow_xy=0,
               setups=['HCN_HCO+', '13CO_C18O', '12CO'],
               emissionfile=None,
               galname=None,
               zsource=0):
    """Builds masks for use in DEGAS imaging pipeline. 

    Parameters
    ----------

    filename : str
        FITS filename of spectral cube mask. The file should be a
        binary mask with True / 1 indicating emission and False / 0
        otherwise.  This assumes the cube has a spectral axis in
        velocity and that the cube or has the metadata required to
        convert to velocity.  Note there is no checking of the
        spectral frame (LSRK, LSRD, BARY) and the conversion assumes
        radio Doppler convention.
  
    nChan : int
        Number of channels in output mask.  This should be larger than
        the number of channels in the DEGAS bandpass (1024)

    width : float
        Spectral width in Hz of the resulting mask.  This should be
        larger than the GBT bandwidth used (usually 1.5 GHz for DEGAS)

    outdir : str
        Directory for output masks to be stored in

    grow_v : int
        Spectrally grow_v the mask by this number of channels

    """

    if outdir is None:
        outdir = os.environ['DEGASDIR'] + 'masks/'

    if not os.access(outdir, os.W_OK):
        try:
            os.mkdir(outdir)
            print('Made directory {0}'.format(outdir))
        except OSError:
            try:
                os.mkdir('/'.join(
                    (outdir.split('/'))[0:-1]
                ))  # there may be a safer what to do this with os.path.split
                os.mkdir(outdir)
                print('Made directory {0}'.format(outdir))
            except:
                warnings.warn('Unable to make output directory ' + outdir)
                raise
        except:
            warnings.warn('Unable to make output directory ' + outdir)
            raise

    c = 299792.458
    # Read in original cube, ensure in velocity space
    s = SpectralCube.read(filename)
    s = s.with_spectral_unit(u.km / u.s, velocity_convention='radio')
    if galname is None:
        galname = os.path.split(filename)[1].split('_')[0]

    vmid = s.spectral_axis[len(s.spectral_axis) // 2].value
    if emissionfile:
        cube = SpectralCube.read(emissionfile)
        cube = cube.with_spectral_unit(u.km / u.s, velocity_convention='radio')
        s = cube.with_mask(s > 0 * s.unit)
        s = s.with_fill_value(0 * s.unit)
        dtype = np.float
        outtype = np.float
    else:
        dtype = np.bool
        outtype = np.uint8

    # HCN_HCO+
    # Build a mask with a spectral width of 2 GHz and the same spatial
    # dimensions as the original mask
    if 'HCN_HCO+' in setups:
        s_hcn = s.with_spectral_unit(u.Hz, rest_value=88.631847 * u.GHz)
        s_hcop = s.with_spectral_unit(u.Hz, rest_value=89.188518 * u.GHz)

        mask = np.zeros((nChan, s.shape[1], s.shape[2]), dtype=outtype)
        hdr = s_hcn.wcs.to_header()
        hdr['CRPIX3'] = 1000
        hdr['CDELT3'] = width / nChan
        hdr['CRVAL3'] = (89.188518 + 88.631847) / 2 * 1e9 * (1 - vmid / c)
        hdr['NAXIS'] = 3
        hdr['NAXIS1'] = mask.shape[0]
        hdr['NAXIS2'] = mask.shape[1]
        hdr['NAXIS3'] = mask.shape[2]
        hdr['SIMPLE'] = 'T'
        hdr['BITPIX'] = 8
        hdr['EXTEND'] = 'T'

        hdr = deduplicate_keywords(hdr)
        w = wcs.WCS(hdr)
        maskcube = SpectralCube(mask, w, header=hdr)
        for zz in range(nChan):
            nu = maskcube.spectral_axis[zz]
            _, _, zz_hcn = s_hcn.wcs.wcs_world2pix(hdr['CRVAL1'],
                                                   hdr['CRVAL2'], nu, 0)
            zz_hcn = int(zz_hcn)
            _, _, zz_hcop = s_hcop.wcs.wcs_world2pix(hdr['CRVAL1'],
                                                     hdr['CRVAL2'], nu, 0)
            zz_hcop = int(zz_hcop)
            if 0 <= zz_hcn < s_hcn.shape[0]:
                mask[zz, :, :] = np.array(s_hcn.filled_data[zz_hcn, :, :],
                                          dtype=dtype)
            if 0 <= zz_hcop < s_hcop.shape[0]:
                mask[zz, :, :] = np.array(s_hcop.filled_data[zz_hcop, :, :],
                                          dtype=dtype)
        if grow_v > 0 and (dtype == np.bool):
            mask = binary_dilation(
                mask, np.ones((int(2 * grow_v + 1), 1, 1), dtype=dtype))

        if grow_xy > 0 and dtype == np.bool:
            mask = binary_dilation(
                mask,
                np.ones((1, int(2 * grow_xy + 1), int(2 * grow_xy + 1)),
                        dtype=dtype))

        maskcube = SpectralCube(mask.astype(outtype), w, header=hdr)
        maskcube.write(outdir + galname + '.hcn_hcop.mask.fits',
                       overwrite=True)

    # C18O/13CO
    # Build a mask with a spectral width of 2 GHz and the same spatial
    # dimensions as the original mask

    if '13CO_C18O' in setups:
        s_13co = s.with_spectral_unit(u.Hz, rest_value=110.20135 * u.GHz)
        s_c18o = s.with_spectral_unit(u.Hz, rest_value=109.78217 * u.GHz)

        mask = np.zeros((nChan, s.shape[1], s.shape[2]), dtype=outtype)
        hdr = s_13co.wcs.to_header()
        hdr['CRPIX3'] = 1000
        hdr['CDELT3'] = width / nChan
        hdr['CRVAL3'] = (110.20135 + 109.78217) / 2 * 1e9 * (1 - vmid / c)
        hdr['NAXIS'] = 3
        hdr['NAXIS1'] = mask.shape[0]
        hdr['NAXIS2'] = mask.shape[1]
        hdr['NAXIS3'] = mask.shape[2]
        hdr['SIMPLE'] = 'T'
        hdr['BITPIX'] = 8
        hdr['EXTEND'] = 'T'
        w = wcs.WCS(hdr)
        hdr = deduplicate_keywords(hdr)

        maskcube = SpectralCube(mask, w, header=hdr)
        for zz in range(nChan):
            nu = maskcube.spectral_axis[zz]
            _, _, zz_13co = s_13co.wcs.wcs_world2pix(hdr['CRVAL1'],
                                                     hdr['CRVAL2'], nu, 0)
            zz_13co = int(zz_13co)
            _, _, zz_c18o = s_c18o.wcs.wcs_world2pix(hdr['CRVAL1'],
                                                     hdr['CRVAL2'], nu, 0)
            zz_c18o = int(zz_c18o)
            if 0 <= zz_13co < s_13co.shape[0]:
                mask[zz, :, :] = np.array(s_13co.filled_data[zz_13co, :, :],
                                          dtype=dtype)
            if 0 <= zz_c18o < s_c18o.shape[0]:
                mask[zz, :, :] = np.array(s_c18o.filled_data[zz_c18o, :, :],
                                          dtype=dtype)
        if grow_v > 0 and dtype == np.bool:
            mask = binary_dilation(
                mask, np.ones((int(2 * grow_v + 1), 1, 1), dtype=dtype))
        if grow_xy > 0 and dtype == np.bool:
            mask = binary_dilation(
                mask,
                np.ones((1, int(2 * grow_xy + 1), int(2 * grow_xy + 1)),
                        dtype=dtype))

        maskcube = SpectralCube(mask.astype(outtype), w, header=hdr)
        maskcube.write(outdir + galname + '.13co_c18o.mask.fits',
                       overwrite=True)

    # 12CO
    # Build a mask with a spectral width of 2 GHz and the same spatial
    # dimensions as the original mask
    if '12CO' in setups:
        s_12co = s.with_spectral_unit(u.Hz, rest_value=115.271204 * u.GHz)

        mask = np.zeros((nChan, s.shape[1], s.shape[2]), dtype=outtype)
        hdr = s_12co.wcs.to_header()
        hdr['CRPIX3'] = 1000
        hdr['CDELT3'] = width / nChan
        hdr['CRVAL3'] = (115.271204) * 1e9 * (1 - vmid / c)
        hdr['NAXIS'] = 3
        hdr['NAXIS1'] = mask.shape[0]
        hdr['NAXIS2'] = mask.shape[1]
        hdr['NAXIS3'] = mask.shape[2]
        hdr['SIMPLE'] = 'T'
        hdr['BITPIX'] = 8
        hdr['EXTEND'] = 'T'
        w = wcs.WCS(hdr)
        hdr = deduplicate_keywords(hdr)

        maskcube = SpectralCube(mask, w, header=hdr)
        for zz in range(nChan):
            nu = maskcube.spectral_axis[zz]
            _, _, zz_12co = s_12co.wcs.wcs_world2pix(hdr['CRVAL1'],
                                                     hdr['CRVAL2'], nu, 0)
            zz_12co = int(zz_12co)
            if 0 <= zz_12co < s_12co.shape[0]:
                mask[zz, :, :] = np.array(s_12co.filled_data[zz_12co, :, :],
                                          dtype=dtype)
        if grow_v > 0 and dtype == np.bool:
            mask = binary_dilation(
                mask, np.ones((int(2 * grow_v + 1), 1, 1), dtype=dtype))
        if grow_xy > 0 and dtype == np.bool:
            mask = binary_dilation(
                mask,
                np.ones((1, int(2 * grow_xy + 1), int(2 * grow_xy + 1)),
                        dtype=dtype))

        maskcube = SpectralCube(mask.astype(outtype), w, header=hdr)
        maskcube.write(outdir + galname + '.12co.mask.fits', overwrite=True)
Exemple #14
0
def cubemask(infile,
             outfile,
             peakCut=5.0,
             lowCut=3.0,
             minBeamFrac=1.0,
             minNchan=1.0,
             skipChan=None,
             threeD=False,
             noise3D=False,
             outDir='./mask'):
    """ Creates a mask for a cube
    
    Parameters
    ----------

    infile : str
          input file

    output file : str
          output file

    lowCut : float
          Minimum signal-to-noise to expand the initial mask down to

    peakCut : float
          Minimum signal-to-noise for initial mask

    """

    from astrodendro import Dendrogram
    from astrodendro import pruning as p
    from matplotlib import pyplot as plt
    from scipy import ndimage

    # read in the data.
    cube = SpectralCube.read(infile)

    # set up cube and initialize mask
    cubedata = cube.unmasked_data[:, :, :].value
    mask = np.zeros(cube.shape, dtype=bool)

    # get info about cube
    nchan = cube.spectral_axis.size  #number of channels

    # calculate noise
    if noise3D:
        # inputs based on PHANGS defaults.
        noise = noise_cube(cubedata,
                           box=40,
                           spec_box=5,
                           bandpass_smooth_order=2,
                           iterations=3)
        #SpectralCube(noise, cube.wcs, beam=cube.beam).write(os.path.join(outDir,outfile).replace('.fits','_noise.fits'),format='fits',overwrite=True)
    else:
        noise = cube.mad_std(
        ).value  # one value for whole cube. already scaled.

    sncube = cubedata / noise  # operate on the S/N cube to make everything easier

    #SpectralCube(sncube, cube.wcs, beam=cube.beam).write(os.path.join(outDir,outfile).replace('.fits','_sncube.fits'),format='fits',overwrite=True)

    if threeD:
        # compute dendrogram with a min threshold (input), 1sigma contrast,
        # 1 beamsize as lower limit and a peak value lower limit(input)
        # all distinct regions will need to have a peak value > peakCut*sigma,
        # or else it will be merged
        d = Dendrogram.compute(sncube,
                               min_value=lowCut,
                               min_delta=1.0,
                               min_npix=minBeamFrac * cube.pixels_per_beam *
                               minNchan,
                               is_independent=p.min_peak(peakCut))

        for t in d.trunk:
            mask = mask | t.get_mask()

    else:
        for chan in np.arange(nchan):
            d = Dendrogram.compute(sncube[chan, :, :],
                                   min_value=lowCut,
                                   min_delta=1.0,
                                   min_npix=minBeamFrac * cube.pixels_per_beam,
                                   is_independent=p.min_peak(peakCut))

            for t in d.trunk:
                mask[chan, :, :] = mask[chan, :, :] | t.get_mask()

    # blank channels you want to skip
    if skipChan:
        for chan in skipChan:
            mask[chan, :, :] = mask[chan, :, :] * 0.0

    # fill in holes in individual channels
    for chan in np.arange(nchan):
        mask[chan, :, :] = ndimage.binary_fill_holes(
            mask[chan, :, :])  # fill holes in the mask

    maskhead = cube.header
    maskhead['BUNIT'] = ''
    cubemask = SpectralCube(data=mask.astype('short'),
                            wcs=cube.wcs,
                            header=maskhead)
    cubemask.write(os.path.join(outDir, outfile),
                   format='fits',
                   overwrite=True)
        co_stackpath(
            "radial_stacking_pixelsinbin_north_{0}_sigmacut_{1}.npy").format(
                wstring, level), num_pixels_n)
    np.save(
        co_stackpath(
            "radial_stacking_pixelsinbin_south_{0}_sigmacut_{1}.npy").format(
                wstring, level), num_pixels_s)

    spec_shape = co_cube_peakvel.shape[0]

    peakvel_stack = SpectralCube(
        data=total_spectrum_co_radial_peakvel.T.reshape(
            (spec_shape, bin_centers.size, 1)),
        wcs=co_cube_peakvel.wcs)
    peakvel_stack.write(co_stackpath(
        "peakvel_stacked_radial_{0}_sigmacut_{1}.fits".format(wstring, level)),
                        overwrite=True)

    peakvel_stack_n = SpectralCube(
        data=total_spectrum_co_radial_peakvel_n.T.reshape(
            (spec_shape, bin_centers.size, 1)),
        wcs=co_cube_peakvel.wcs)
    peakvel_stack_n.write(co_stackpath(
        "peakvel_stacked_radial_north_{0}_sigmacut_{1}.fits".format(
            wstring, level)),
                          overwrite=True)
    peakvel_stack_s = SpectralCube(
        data=total_spectrum_co_radial_peakvel_s.T.reshape(
            (spec_shape, bin_centers.size, 1)),
        wcs=co_cube_peakvel.wcs)
    peakvel_stack_s.write(co_stackpath(
        pb.update(ii + 1)

    # Note that there are overlaps in the catalog, which means that ORDER MATTERS
    # in the above loop.  I haven't yet checked whether large scale overwrites
    # small or vice-versa; it may be that both views of the data are interesting.
    tcube = SpectralCube(
        data=tcubedata,
        wcs=cubeA.wcs,
        mask=cubeA.mask,
        meta={'unit': 'K'},
        header=cubeA.header,
    )

    outpath = 'TemperatureCube_DendrogramObjects{0}_Piecewise.fits'.format(sm)
    tcube.write(hpath(outpath), overwrite=True)

    rcube = SpectralCube(
        data=rcubedata,
        wcs=cubeA.wcs,
        mask=cubeA.mask,
        meta={'unit': 'K'},
        header=cubeA.header,
    )

    outpath = 'RatioCube_DendrogramObjects{0}.fits'.format(sm)
    rcube.write(hpath(outpath), overwrite=True)

    max_temcube = tcube.max(axis=0)
    max_temcube.hdu.writeto(hpath(
        'TemperatureCube_DendrogramObjects{0}_Piecewise_max.fits'.format(sm)),
                                                               column_flat, utline303,
                                                               utline321, unoise)):
    if tcube[z,y,x] == 0:
        logh2column = np.log10(col)+22

        mf.set_constraints(ratio303321=rat, eratio303321=erat,
                           #ratio321322=ratio2, eratio321322=eratio2,
                           logh2column=logh2column, elogh2column=elogh2column,
                           logabundance=logabundance, elogabundance=elogabundance,
                           taline303=ta303.value, etaline303=err,
                           taline321=ta321.value, etaline321=err,
                           linewidth=linewidth)
        row_data = mf.get_parconstraints()
        tcube[z,y,x] = row_data['temperature_chi2']
        row_data['ratio303321'] = rat
        row_data['eratio303321'] = erat

        if ii % 100 == 0 or ii < 50:
            log.info("T: [{tmin1sig_chi2:7.2f},{temperature_chi2:7.2f},{tmax1sig_chi2:7.2f}]  R={ratio303321:6.2f}+/-{eratio303321:6.2f}".format(**row_data))
        else:
            pb.update(ii)
        tcube.flush()
    else:
        pb.update(ii)

tcube[tcube==0] = np.nan
tCube = SpectralCube(tcube, cube303.wcs, mask=BooleanArrayMask(np.isfinite(tcube), wcs=cube303.wcs))
tCube.write(hpath('chi2_temperature_cube.fits'), overwrite=True)

print()
Exemple #18
0
def regridData(baseCubeFits, otherDataFits, outDir, mask=False):
    '''
    regrids one data set to match the wcs of the base data set, which
    is assumed to be a cube. The regridded data set can be either 2d
    or 3d.
    '''

    # open the base cube
    try:
        baseCube = SpectralCube.read(baseCubeFits)
    except:
        print("Can't read in " + baseCubeFits + ".")

    # determine how many dimensions the other data sets have
    f = fits.open(otherDataFits)
    ndim = f[0].header['NAXIS']
    f.close()

    # output image name
    newFits = os.path.join(
        outDir,
        os.path.basename(otherDataFits).replace('.fits', '_regrid.fits'))

    # now regrid images appropriately
    if ndim == 3:

        # read in cube
        otherCube = SpectralCube.read(otherDataFits)

        # interpolate velocity axis. This needs to be done first.
        regridCube = otherCube.spectral_interpolate(baseCube.spectral_axis)

        newCube = regridCube.reproject(baseCube.header)

        if mask:
            # if greater than 0.0 set value to 1. otherwise 0.
            newdata = np.where(newCube.unitless_filled_data[:, :, :] > 0.0, 1,
                               0)
            finalCube = SpectralCube(newdata, newCube.wcs, mask=baseCube.mask)

        else:
            newdata = newCube.filled_data[:, :, :]
            finalCube = SpectralCube(newdata, newCube.wcs, mask=baseCube.mask)

        finalCube.write(newFits, overwrite=True)

    elif ndim == 2:

        # regrid image
        newcube = reproject_interp(
            otherDataFits,
            output_projection=baseCube.wcs.dropaxis(2),
            shape_out=baseCube.wcs.dropaxis(2).array_shape,
            order='nearest-neighbor',
            return_footprint=False)

        if mask:
            # set to 1 where greater than 0.0.
            newdata = np.where(newcube > 0.0, 1.0, 0.0)
        else:
            newdata = newcube

        # get mask on original data
        baseMask = baseCube.get_mask_array()
        totalBaseMask = baseMask.max(axis=0)

        newdata[np.invert(totalBaseMask)] = np.nan

        # write out regridded data
        fits.writeto(newFits,
                     newdata,
                     baseCube.wcs.dropaxis(2).to_header(),
                     overwrite=True)

    else:
        print("Number of dimensions of other data set is not 2 or 3.")
Exemple #19
0
def gauss_fitter(region='Cepheus_L1251',
                 snr_min=3.0,
                 mol='C2S',
                 vmin=5.0,
                 vmax=10.0,
                 convolve=False,
                 use_old_conv=False,
                 multicore=1,
                 file_extension=None):
    """
    	Fit a Gaussian to non-NH3 emission lines from GAS.
    	It creates a cube for the best-fit Gaussian, a cube 
    	for the best-fit Gaussian with noise added back into 
    	the spectrum, and a parameter map of Tpeak, Vlsr, and FWHM
    
    	Parameters
    	----------
    	region : str
        	Name of region to reduce
    	snr_min : float
        	Lowest signal-to-noise pixels to include in the line-fitting
    	mol : str
        	name of molecule to fit
   	vmin : numpy.float
        	Minimum centroid velocity, in km/s.
    	vmax : numpy.float
        	Maximum centroid velocity, in km/s.
    	convolve : bool or float
        	If not False, specifies the beam-size to convolve the original map with
		Beam-size must be given in arcseconds
    	use_old_conv : bool
        	If True, use an already convolved map with name:
		region + '_' + mol + file_extension + '_conv.fits'
		This convolved map must be in units of km/s
    	multicore : int
		Maximum number of simultaneous processes desired
	file_extension: str
		filename extension 
    	"""
    if file_extension:
        root = file_extension
    else:
        # root = 'base{0}'.format(blorder)
        root = 'all'

    molecules = ['C2S', 'HC7N_22_21', 'HC7N_21_20', 'HC5N']

    MolFile = '{0}/{0}_{2}_{1}.fits'.format(region, root, mol)
    ConvFile = '{0}/{0}_{2}_{1}_conv.fits'.format(region, root, mol)
    GaussOut = '{0}/{0}_{2}_{1}_gauss_cube.fits'.format(region, root, mol)
    GaussNoiseOut = '{0}/{0}_{2}_{1}_gauss_cube_noise.fits'.format(
        region, root, mol)
    ParamOut = '{0}/{0}_{2}_{1}_param_cube.fits'.format(region, root, mol)

    # Load the spectral cube and convert to velocity units
    cube = SpectralCube.read(MolFile)
    cube_km = cube.with_spectral_unit(u.km / u.s, velocity_convention='radio')

    # If desired, convolve map with larger beam
    # or load previously created convolved cube
    if convolve:
        cube = SpectralCube.read(MolFile)
        cube_km_1 = cube.with_spectral_unit(u.km / u.s,
                                            velocity_convention='radio')
        beam = radio_beam.Beam(major=convolve * u.arcsec,
                               minor=convolve * u.arcsec,
                               pa=0 * u.deg)
        cube_km = cube_km_1.convolve_to(beam)
        cube_km.write(ConvFile, format='fits', overwrite=True)
    if use_old_conv:
        cube_km = SpectralCube.read(ConvFile)

# Define the spectral axis in km/s
    spectra_x_axis_kms = np.array(cube_km.spectral_axis)

    # Find the channel range corresponding to vmin and vmax
    # -- This is a hold-over from when I originally set up the code to
    #    use a channel range rather than velocity range.
    #    Can change later, but this should work for now.
    low_channel = np.where(spectra_x_axis_kms <= vmax
                           )[0][0] + 1  # Add ones to change index to channel
    high_channel = np.where(spectra_x_axis_kms >= vmin
                            )[0][-1] + 1  # Again, hold-over from older setup
    peak_channels = [low_channel, high_channel]

    # Create cubes for storing the fitted Gaussian profiles
    # and the Gaussians with noise added back into the spectrum
    header = cube_km.header
    cube_gauss = np.array(cube_km.unmasked_data[:, :, :])
    cube_gauss_noise = np.array(cube_km.unmasked_data[:, :, :])
    shape = np.shape(cube_gauss)

    # Set up a cube for storing fitted parameters
    param_cube = np.zeros((6, shape[1], shape[2]))
    param_header = cube_km.header

    # Define the Gaussian profile
    def p_eval(x, a, x0, sigma):
        return a * np.exp(-(x - x0)**2 / (2 * sigma**2))

# Create some arrays full of NANs
# To be used in output cubes if fits fail

    nan_array = np.empty(shape[0])  # For gauss cubes
    nan_array[:] = np.NAN
    nan_array2 = np.empty(param_cube.shape[0])  # For param cubes
    nan_array2[:] = np.NAN

    # Loop through each pixel and find those
    # with SNR above snr_min
    x = []
    y = []
    pixels = 0
    for (i, j), value in np.ndenumerate(cube_gauss[0]):
        spectra = np.array(cube_km.unmasked_data[:, i, j])
        if (False in np.isnan(spectra)):
            rms = np.nanstd(
                np.append(spectra[0:(peak_channels[0] - 1)],
                          spectra[(peak_channels[1] + 1):len(spectra)]))
            if (max(spectra[peak_channels[0]:peak_channels[1]]) /
                    rms) > snr_min:
                pixels += 1
                x.append(i)
                y.append(j)
        else:
            cube_gauss[:, i, j] = nan_array
            param_cube[:, i, j] = nan_array2
            cube_gauss_noise[:, i, j] = nan_array
    print str(pixels) + ' Pixels above SNR=' + str(snr_min)

    # Define a Gaussian fitting function for each pixel
    # i, j are the x,y coordinates of the pixel being fit
    def pix_fit(i, j):
        spectra = np.array(cube_km.unmasked_data[:, i, j])
        # Use the peak brightness Temp within specified channel
        # range as the initial guess for Gaussian height
        max_ch = np.argmax(spectra[peak_channels[0]:peak_channels[1]])
        Tpeak = spectra[peak_channels[0]:peak_channels[1]][max_ch]
        # Use the velocity of the brightness Temp peak as
        # initial guess for Gaussian mean
        vpeak = spectra_x_axis_kms[peak_channels[0]:peak_channels[1]][max_ch]
        rms = np.std(
            np.append(spectra[0:(peak_channels[0] - 1)],
                      spectra[(peak_channels[1] + 1):len(spectra)]))
        err1 = np.zeros(shape[0]) + rms
        # Create a noise spectrum based on rms of off-line channels
        # This will be added to best-fit Gaussian to obtain a noisy Gaussian
        noise = np.random.normal(0., rms, len(spectra_x_axis_kms))
        # Define initial guesses for Gaussian fit
        guess = [Tpeak, vpeak, 0.3]  # [height, mean, sigma]
        try:
            coeffs, covar_mat = curve_fit(p_eval,
                                          xdata=spectra_x_axis_kms,
                                          ydata=spectra,
                                          p0=guess,
                                          sigma=err1,
                                          maxfev=500)
            gauss = np.array(
                p_eval(spectra_x_axis_kms, coeffs[0], coeffs[1], coeffs[2]))
            noisy_gauss = np.array(
                p_eval(spectra_x_axis_kms, coeffs[0], coeffs[1],
                       coeffs[2])) + noise
            params = np.append(coeffs, (covar_mat[0][0]**0.5, covar_mat[1][1]**
                                        0.5, covar_mat[2][2]**0.5))
            # params = ['Tpeak', 'VLSR','sigma','Tpeak_err','VLSR_err','sigma_err']

            # Don't accept fit if fitted parameters are non-physical or too uncertain
            if (params[0] < 0.01) or (params[3] > 1.0) or (
                    params[2] < 0.05) or (params[5] > 0.5) or (params[4] >
                                                               0.75):
                noisy_gauss = nan_array
                gauss = nan_array
                params = nan_array2

            # Don't accept fit if the SNR for fitted spectrum is less than SNR threshold
            #if max(gauss)/rms < snr_min:
            #	noisy_gauss = nan_array
            #	gauss = nan_array
            #	params = nan_array2

        except RuntimeError:
            noisy_gauss = nan_array
            gauss = nan_array
            params = nan_array2

        return i, j, gauss, params, noisy_gauss

# Parallel computation:

    nproc = multicore  # maximum number of simultaneous processes desired
    queue = pprocess.Queue(limit=nproc)
    calc = queue.manage(pprocess.MakeParallel(pix_fit))
    tic = time.time()
    counter = 0

    # Uncomment to see some plots of the fitted spectra
    #for i,j in zip(x,y):
    #pix_fit(i,j)
    #plt.plot(spectra_x_axis_kms, spectra, color='blue', drawstyle='steps')
    #plt.plot(spectra_x_axis_kms, gauss, color='red')
    #plt.show()
    #plt.close()

    # Begin parallel computations
    # Store the best-fit Gaussians and parameters
    # in their correct positions in the previously created cubes
    for i, j in zip(x, y):
        calc(i, j)
    for i, j, gauss_spec, parameters, noisy_gauss_spec in queue:
        cube_gauss[:, i, j] = gauss_spec
        param_cube[:, i, j] = parameters
        cube_gauss_noise[:, i, j] = noisy_gauss_spec
        counter += 1
        print str(counter) + ' of ' + str(pixels) + ' pixels completed \r',
        sys.stdout.flush()
    print "\n %f s for parallel computation." % (time.time() - tic)

    # Save final cubes
    # These will be in km/s units.
    # Spectra will have larger values to the left, lower values to right
    cube_final_gauss = SpectralCube(data=cube_gauss,
                                    wcs=cube_km.wcs,
                                    header=cube_km.header)
    cube_final_gauss.write(GaussOut, format='fits', overwrite=True)
    cube_final_gauss_noise = SpectralCube(data=cube_gauss_noise,
                                          wcs=cube_km.wcs,
                                          header=cube_km.header)
    cube_final_gauss_noise.write(GaussNoiseOut, format='fits', overwrite=True)

    # Construct appropriate header for param_cube
    param_header['NAXIS3'] = len(nan_array2)
    param_header['WCSAXES'] = 3
    param_header['CRPIX3'] = 1
    param_header['CDELT3'] = 1
    param_header['CRVAL3'] = 0
    param_header['PLANE1'] = 'Tpeak'
    param_header['PLANE2'] = 'VLSR'
    param_header['PLANE3'] = 'sigma'
    param_header['PLANE5'] = 'Tpeak_err'
    param_header['PLANE6'] = 'VLSR_err'
    param_header['PLANE7'] = 'sigma_err'

    fits.writeto(ParamOut, param_cube, header=param_header, clobber=True)
Exemple #20
0
def rebaseline(filename, blorder=3,
               baselineRegion=[slice(0, 262, 1), slice(-512, 0, 1)],
               windowFunction=None, blankBaseline=False,
               flagSpike=True, v0=None, **kwargs):
    """
    Rebaseline a data cube using robust regression of Legendre polynomials.

    Parameters
    ----------
    filename : string
        FITS filename of the data cube
    blorder : int
        Order of the polynomial to fit to the data
    baselineRegion : list
        List of slices defining the default region of the spectrum, in
        channels, to be used for the baseline fitting.
    windowFunction : function
        Name of function to be used that will accept spectrum data, and
        velocity axis and will return a binary mask of the channels to be used
        in the  baseline fitting.  Extra **kwargs are passed to windowFunction
        to do with as it must.
    blankBaseline : boolean
        Blank the baseline region on a per-spectrum basis

    Returns
    -------
    Nothing.  A new FITS file is written out with the suffix 'rebaseN' where N
    is the baseline order

    """
    cube = SpectralCube.read(filename)
    originalUnit = cube.spectral_axis.unit
    cube = cube.with_spectral_unit(u.km / u.s, velocity_convention='radio')
    spaxis = cube.spectral_axis.to(u.km / u.s).value

    goodposition = np.isfinite(cube.apply_numpy_function(np.max, axis=0))
    y, x = np.where(goodposition)
    outcube = np.zeros(cube.shape) * np.nan
    RegionName = (filename.split('_'))[0]

    if hasattr(windowFunction, '__call__'):
        catalog = catalogs.GenerateRegions()

    nuindex = np.arange(cube.shape[0])
    runmin = nuindex[-1]
    runmax = nuindex[0]

    for thisy, thisx in console.ProgressBar(zip(y, x)):
        spectrum = cube[:, thisy, thisx].value

        if v0 is not None:
            baselineIndex = windowFunction(spectrum, spaxis,
                                           v0=v0, **kwargs)
        elif hasattr(windowFunction, '__call__'):
            _, Dec, RA = cube.world[0, thisy, thisx]
            # This determines a v0 appropriate for the region
            v0 = VlsrByCoord(RA.value, Dec.value, RegionName,
                             regionCatalog=catalog)
            baselineIndex = windowFunction(spectrum, spaxis,
                                           v0=v0, **kwargs)
        else:
            baselineIndex = np.zeros_like(spectrum,dtype=np.bool)
            for ss in baselineRegion:
                baselineIndex[ss] = True

        runmin = np.min([nuindex[baselineIndex].min(), runmin])
        runmax = np.max([nuindex[baselineIndex].max(), runmax])

        # Use channel-to-channel difference as the noise value.
        if flagSpike:
            jumps = (spectrum - np.roll(spectrum, -1))
            noise = mad1d(jumps) * 2**(-0.5)
            baselineIndex *= (np.abs(jumps) < 5 * noise)
            noise = mad1d((spectrum -
                           np.roll(spectrum, -2))[baselineIndex]) * 2**(-0.5)
        else:
            noise = mad1d((spectrum -
                           np.roll(spectrum, -2))[baselineIndex]) * 2**(-0.5)

        if blankBaseline:
            spectrum = robustBaseline(spectrum, baselineIndex,
                                      blorder=blorder,
                                      noiserms=noise)
            spectrum[baselineIndex] = np.nan
            outcube[:, thisy, thisx] = spectrum
        else:
            outcube[:, thisy, thisx] = robustBaseline(spectrum, baselineIndex,
                                                      blorder=blorder,
                                                      noiserms=noise)

    outsc = SpectralCube(outcube, cube.wcs, header=cube.header)
    outsc = outsc[runmin:runmax, :, :]  # cut beyond baseline edges
    # Return to original spectral unit
    outsc = outsc.with_spectral_unit(originalUnit)
    outsc.write(filename.replace('.fits', '_rebase{0}.fits'.format(blorder)),
                overwrite=True)
def measure_dendrogram_properties(dend=None,
                                  cube303=cube303,
                                  cube321=cube321,
                                  cube13co=cube13co,
                                  cube18co=cube18co,
                                  noise_cube=noise_cube,
                                  sncube=sncube,
                                  suffix="",
                                  last_index=None,
                                  plot_some=True,
                                  line='303',
                                  write=True):

    assert (cube321.shape == cube303.shape == noise_cube.shape ==
            cube13co.shape == cube18co.shape == sncube.shape)
    assert sncube.wcs is cube303.wcs is sncube.mask._wcs

    metadata = {}
    metadata['data_unit'] = u.K
    metadata['spatial_scale'] = 7.2 * u.arcsec
    metadata['beam_major'] = 30 * u.arcsec
    metadata['beam_minor'] = 30 * u.arcsec
    metadata['wavelength'] = 218.22219 * u.GHz
    metadata['velocity_scale'] = u.km / u.s
    metadata['wcs'] = cube303.wcs

    keys = [
        'density_chi2',
        'expected_density',
        'dmin1sig_chi2',
        'dmax1sig_chi2',
        'column_chi2',
        'expected_column',
        'cmin1sig_chi2',
        'cmax1sig_chi2',
        'temperature_chi2',
        'expected_temperature',
        'tmin1sig_chi2',
        'tmax1sig_chi2',
        'eratio321303',
        'ratio321303',
        'logh2column',
        'elogh2column',
        'logabundance',
        'elogabundance',
    ]
    obs_keys = [
        'Stot303',
        'Smin303',
        'Smax303',
        'Stot321',
        'Smean303',
        'Smean321',
        'npix',
        'e303',
        'e321',
        'r321303',
        'er321303',
        '13cosum',
        'c18osum',
        '13comean',
        'c18omean',
        's_ntotal',
        'index',
        'is_leaf',
        'parent',
        'root',
        'lon',
        'lat',
        'vcen',
        'higaldusttem',
        'reff',
        'dustmass',
        'dustmindens',
        'bad',
        #'tkin_turb',
    ]
    columns = {k: [] for k in (keys + obs_keys)}

    log.debug("Initializing dendrogram temperature fitting loop")

    # FORCE wcs to match
    # (technically should reproject here)
    cube13co._wcs = cube18co._wcs = cube303.wcs
    cube13co.mask._wcs = cube18co.mask._wcs = cube303.wcs

    if line == '303':
        maincube = cube303
    elif line == '321':
        maincube = cube321
    else:
        raise ValueError("Unrecognized line: {0}".format(line))

    # Prepare an array to hold the fitted temperatures
    tcubedata = np.empty(maincube.shape, dtype='float32')
    tcubedata[:] = np.nan
    tcubeleafdata = np.empty(maincube.shape, dtype='float32')
    tcubeleafdata[:] = np.nan

    nbad = 0

    catalog = ppv_catalog(dend, metadata)
    pb = ProgressBar(len(catalog))
    for ii, row in enumerate(catalog):
        structure = dend[row['_idx']]
        assert structure.idx == row['_idx'] == ii
        dend_obj_mask = BooleanArrayMask(structure.get_mask(), wcs=cube303.wcs)
        dend_inds = structure.indices()

        view = (
            slice(dend_inds[0].min(), dend_inds[0].max() + 1),
            slice(dend_inds[1].min(), dend_inds[1].max() + 1),
            slice(dend_inds[2].min(), dend_inds[2].max() + 1),
        )
        #view2 = cube303.subcube_slices_from_mask(dend_obj_mask)
        submask = dend_obj_mask[view]
        #assert np.count_nonzero(submask.include()) == np.count_nonzero(dend_obj_mask.include())

        sn = sncube[view].with_mask(submask)
        sntot = sn.sum().value
        #np.testing.assert_almost_equal(sntot, structure.values().sum(), decimal=0)

        c303 = cube303[view].with_mask(submask)
        c321 = cube321[view].with_mask(submask)
        co13sum = cube13co[view].with_mask(submask).sum().value
        co18sum = cube18co[view].with_mask(submask).sum().value
        if hasattr(co13sum, '__len__'):
            raise TypeError(
                ".sum() applied to an array has yielded a non scalar.")

        npix = submask.include().sum()
        assert npix == structure.get_npix()
        Stot303 = c303.sum().value
        if np.isnan(Stot303):
            raise ValueError("NaN in cube.  This can't happen: the data from "
                             "which the dendrogram was derived can't have "
                             "NaN pixels.")
        Smax303 = c303.max().value
        Smin303 = c303.min().value

        Stot321 = c321.sum().value
        if npix == 0:
            raise ValueError("npix=0. This is impossible.")
        Smean303 = Stot303 / npix
        if Stot303 <= 0 and line == '303':
            raise ValueError(
                "The 303 flux is <=0.  This isn't possible because "
                "the dendrogram was derived from the 303 data with a "
                "non-zero threshold.")
        elif Stot303 <= 0 and line == '321':
            Stot303 = 0
            Smean303 = 0
        elif Stot321 <= 0 and line == '321':
            raise ValueError(
                "The 321 flux is <=0.  This isn't possible because "
                "the dendrogram was derived from the 321 data with a "
                "non-zero threshold.")
        if np.isnan(Stot321):
            raise ValueError("NaN in 321 line")
        Smean321 = Stot321 / npix

        #error = (noise_cube[view][submask.include()]).sum() / submask.include().sum()**0.5
        var = ((noise_cube[dend_obj_mask.include()]**2).sum() / npix**2)
        error = var**0.5
        if np.isnan(error):
            raise ValueError("error is nan: this is impossible by definition.")

        if line == '321' and Stot303 == 0:
            r321303 = np.nan
            er321303 = np.nan
        elif Stot321 < 0:
            r321303 = error / Smean303
            er321303 = (r321303**2 * (var / Smean303**2 + 1))**0.5
        else:
            r321303 = Stot321 / Stot303
            er321303 = (r321303**2 *
                        (var / Smean303**2 + var / Smean321**2))**0.5

        for c in columns:
            assert len(columns[c]) == ii

        columns['index'].append(row['_idx'])
        columns['s_ntotal'].append(sntot)
        columns['Stot303'].append(Stot303)
        columns['Smax303'].append(Smax303)
        columns['Smin303'].append(Smin303)
        columns['Stot321'].append(Stot321)
        columns['Smean303'].append(Smean303)
        columns['Smean321'].append(Smean321)
        columns['npix'].append(npix)
        columns['e303'].append(error)
        columns['e321'].append(error)
        columns['r321303'].append(r321303)
        columns['er321303'].append(er321303)
        columns['13cosum'].append(co13sum)
        columns['c18osum'].append(co18sum)
        columns['13comean'].append(co13sum / npix)
        columns['c18omean'].append(co18sum / npix)
        columns['is_leaf'].append(structure.is_leaf)
        columns['parent'].append(
            structure.parent.idx if structure.parent else -1)
        columns['root'].append(get_root(structure).idx)
        s_main = maincube._data[dend_inds]
        x, y, z = maincube.world[dend_inds]
        lon = ((z.value - (360 *
                           (z.value > 180))) * s_main).sum() / s_main.sum()
        lat = (y * s_main).sum() / s_main.sum()
        vel = (x * s_main).sum() / s_main.sum()
        columns['lon'].append(lon)
        columns['lat'].append(lat.value)
        columns['vcen'].append(vel.value)

        mask2d = dend_obj_mask.include().max(axis=0)[view[1:]]
        logh2column = np.log10(
            np.nanmean(column_regridded.data[view[1:]][mask2d]) * 1e22)
        if np.isnan(logh2column):
            log.info("Source #{0} has NaNs".format(ii))
            logh2column = 24
        elogh2column = elogabundance
        columns['higaldusttem'].append(
            np.nanmean(dusttem_regridded.data[view[1:]][mask2d]))

        r_arcsec = row['radius'] * u.arcsec
        reff = (r_arcsec * (8.5 * u.kpc)).to(u.pc, u.dimensionless_angles())
        mass = ((10**logh2column * u.cm**-2) * np.pi * reff**2 * 2.8 *
                constants.m_p).to(u.M_sun)
        density = (mass / (4 / 3. * np.pi * reff**3) / constants.m_p / 2.8).to(
            u.cm**-3)

        columns['reff'].append(reff.value)
        columns['dustmass'].append(mass.value)
        columns['dustmindens'].append(density.value)
        mindens = np.log10(density.value)
        if mindens < 3:
            mindens = 3

        if (r321303 < 0 or np.isnan(r321303)) and line != '321':
            raise ValueError("Ratio <0: This can't happen any more because "
                             "if either num/denom is <0, an exception is "
                             "raised earlier")
            #for k in columns:
            #    if k not in obs_keys:
            #        columns[k].append(np.nan)
        elif (r321303 < 0 or np.isnan(r321303)) and line == '321':
            for k in keys:
                columns[k].append(np.nan)
        else:
            # Replace negatives for fitting
            if Smean321 <= 0:
                Smean321 = error
            mf.set_constraints(
                ratio321303=r321303,
                eratio321303=er321303,
                #ratio321322=ratio2, eratio321322=eratio2,
                logh2column=logh2column,
                elogh2column=elogh2column,
                logabundance=logabundance,
                elogabundance=elogabundance,
                taline303=Smean303,
                etaline303=error,
                taline321=Smean321,
                etaline321=error,
                mindens=mindens,
                linewidth=10)
            row_data = mf.get_parconstraints()
            row_data['ratio321303'] = r321303
            row_data['eratio321303'] = er321303

            for k in row_data:
                columns[k].append(row_data[k])

            # Exclude bad velocities from cubes
            if row['v_cen'] < -80e3 or row['v_cen'] > 180e3:
                # Skip: there is no real structure down here
                nbad += 1
                is_bad = True
            else:
                is_bad = False
                tcubedata[
                    dend_obj_mask.include()] = row_data['expected_temperature']
                if structure.is_leaf:
                    tcubeleafdata[dend_obj_mask.include(
                    )] = row_data['expected_temperature']

            columns['bad'].append(is_bad)

            width = row['v_rms'] * u.km / u.s
            lengthscale = reff

            #REMOVED in favor of despotic version done in dendrograms.py
            # we use the analytic version here; the despotic version is
            # computed elsewhere (with appropriate gcor factors)
            #columns['tkin_turb'].append(heating.tkin_all(10**row_data['density_chi2']*u.cm**-3,
            #                                             width,
            #                                             lengthscale,
            #                                             width/lengthscale,
            #                                             columns['higaldusttem'][-1]*u.K,
            #                                             crir=0./u.s))

        if len(set(len(c) for k, c in columns.items())) != 1:
            print("Columns are different lengths.  This is not allowed.")
            import ipdb
            ipdb.set_trace()

        for c in columns:
            assert len(columns[c]) == ii + 1

        if plot_some and not is_bad and (ii - nbad % 100 == 0
                                         or ii - nbad < 50):
            try:
                log.info(
                    "T: [{tmin1sig_chi2:7.2f},{expected_temperature:7.2f},{tmax1sig_chi2:7.2f}]"
                    "  R={ratio321303:8.4f}+/-{eratio321303:8.4f}"
                    "  Smean303={Smean303:8.4f} +/- {e303:8.4f}"
                    "  Stot303={Stot303:8.2e}  npix={npix:6d}".format(
                        Smean303=Smean303,
                        Stot303=Stot303,
                        npix=npix,
                        e303=error,
                        **row_data))

                pl.figure(1)
                pl.clf()
                mf.denstemplot()
                pl.savefig(
                    fpath("dendrotem/diagnostics/{0}_{1}.png".format(
                        suffix, ii)))
                pl.figure(2).clf()
                mf.parplot1d_all(levels=[0.68268949213708585])
                pl.savefig(
                    fpath("dendrotem/diagnostics/1dplot{0}_{1}.png".format(
                        suffix, ii)))
                pl.draw()
                pl.show()
            except Exception as ex:
                print ex
                pass
        else:
            pb.update(ii + 1)

        if last_index is not None and ii >= last_index:
            break

    if last_index is not None:
        catalog = catalog[:last_index + 1]

    for k in columns:
        if k not in catalog.keys():
            catalog.add_column(table.Column(name=k, data=columns[k]))

    for mid, lo, hi, letter in (('expected_temperature', 'tmin1sig_chi2',
                                 'tmax1sig_chi2', 't'),
                                ('expected_density', 'dmin1sig_chi2',
                                 'dmax1sig_chi2', 'd'),
                                ('expected_column', 'cmin1sig_chi2',
                                 'cmax1sig_chi2', 'c')):
        catalog.add_column(
            table.Column(name='elo_' + letter,
                         data=catalog[mid] - catalog[lo]))
        catalog.add_column(
            table.Column(name='ehi_' + letter,
                         data=catalog[hi] - catalog[mid]))

    if write:
        catalog.write(tpath('PPV_H2CO_Temperature{0}.ipac'.format(suffix)),
                      format='ascii.ipac')

    # Note that there are overlaps in the catalog, which means that ORDER MATTERS
    # in the above loop.  I haven't yet checked whether large scale overwrites
    # small or vice-versa; it may be that both views of the data are interesting.
    tcube = SpectralCube(
        data=tcubedata,
        wcs=cube303.wcs,
        mask=cube303.mask,
        meta={'unit': 'K'},
        header=cube303.header,
    )
    tcubeleaf = SpectralCube(
        data=tcubeleafdata,
        wcs=cube303.wcs,
        mask=cube303.mask,
        meta={'unit': 'K'},
        header=cube303.header,
    )

    if write:
        log.info("Writing TemperatureCube")
        outpath = 'TemperatureCube_DendrogramObjects{0}.fits'
        tcube.write(hpath(outpath.format(suffix)), overwrite=True)

        outpath_leaf = 'TemperatureCube_DendrogramObjects{0}_leaves.fits'
        tcubeleaf.write(hpath(outpath_leaf.format(suffix)), overwrite=True)

    return catalog, tcube
np.save(
    hi_stackpath("radial_stacking_pixelsinbin_north_{0}_noCO.npy").format(
        wstring), num_pixels_n)
np.save(
    hi_stackpath("radial_stacking_pixelsinbin_south_{0}_noCO.npy").format(
        wstring), num_pixels_s)

spec_shape = hi_cube_peakvel.shape[0]

peakvel_stack = SpectralCube(data=total_spectrum_hi_radial_peakvel.T.reshape(
    (spec_shape, bin_centers.size, 1)),
                             wcs=hi_cube_peakvel.wcs)
peakvel_stack = peakvel_stack.with_mask(
    np.ones_like(peakvel_stack, dtype='bool'))
peakvel_stack.write(hi_stackpath(
    "peakvel_stacked_radial_{0}_noCO.fits".format(wstring)),
                    overwrite=True)

peakvel_stack_n = SpectralCube(
    data=total_spectrum_hi_radial_peakvel_n.T.reshape(
        (spec_shape, bin_centers.size, 1)),
    wcs=hi_cube_peakvel.wcs)
peakvel_stack_n = peakvel_stack_n.with_mask(
    np.ones_like(peakvel_stack_n, dtype='bool'))
peakvel_stack_n.write(hi_stackpath(
    "peakvel_stacked_radial_north_{0}_noCO.fits".format(wstring)),
                      overwrite=True)
peakvel_stack_s = SpectralCube(
    data=total_spectrum_hi_radial_peakvel_s.T.reshape(
        (spec_shape, bin_centers.size, 1)),
    wcs=hi_cube_peakvel.wcs)
Exemple #23
0
def rebaseline(filename,
               blorder=3,
               baselineRegion=[slice(0, 800, 1),
                               slice(-800, 0, 1)],
               windowFunction=None,
               blankBaseline=False,
               flagSpike=True,
               v0=None,
               VlsrByCoord=None,
               verbose=False,
               **kwargs):
    """
    Rebaseline a data cube using robust regression of Legendre polynomials.

    Parameters
    ----------
    filename : string
        FITS filename of the data cube
    blorder : int
        Order of the polynomial to fit to the data
    baselineRegion : list
        List of slices defining the default region of the spectrum, in
        channels, to be used for the baseline fitting.
    windowFunction : function
        Name of function to be used that will accept spectrum data, and
        velocity axis and will return a binary mask of the channels to be used
        in the  baseline fitting.  Extra **kwargs are passed to windowFunction
        to do with as it must.
    blankBaseline : boolean
        Blank the baseline region on a per-spectrum basis

    Returns
    -------
    Nothing.  A new FITS file is written out with the suffix 'rebaseN' where N
    is the baseline order

    """
    cube = SpectralCube.read(filename)
    originalUnit = cube.spectral_axis.unit
    cube = cube.with_spectral_unit(u.km / u.s, velocity_convention='radio')
    spaxis = cube.spectral_axis.to(u.km / u.s).value

    goodposition = np.isfinite(cube.apply_numpy_function(np.max, axis=0))
    y, x = np.where(goodposition)
    outcube = np.zeros(cube.shape) * np.nan
    RegionName = (filename.split('/'))[-1]
    RegionName = (RegionName.split('_'))[0]
    nuindex = np.arange(cube.shape[0])
    runmin = nuindex[-1]
    runmax = nuindex[0]
    if verbose:
        pb = console.ProgressBar(len(y))
    for thisy, thisx in zip(y, x):
        spectrum = cube[:, thisy, thisx].value

        if v0 is not None:
            baselineIndex = windowFunction(spectrum, spaxis, v0=v0, **kwargs)
        elif hasattr(windowFunction, '__call__') and \
                hasattr(VlsrByCoord, '__call__'):
            _, Dec, RA = cube.world[0, thisy, thisx]
            # This determines a v0 appropriate for the region
            v0 = VlsrByCoord(RA.value, Dec.value, RegionName, **kwargs)
            baselineIndex = windowFunction(spectrum, spaxis, v0=v0, **kwargs)
        else:
            baselineIndex = np.zeros_like(spectrum, dtype=np.bool)
            for ss in baselineRegion:
                baselineIndex[ss] = True

        runmin = np.min([nuindex[baselineIndex].min(), runmin])
        runmax = np.max([nuindex[baselineIndex].max(), runmax])

        # Use channel-to-channel difference as the noise value.
        if flagSpike:
            jumps = (spectrum - np.roll(spectrum, -1))
            noise = mad1d(jumps) * 2**(-0.5)
            baselineIndex *= (np.abs(jumps) < 5 * noise)
            noise = mad1d(
                (spectrum - np.roll(spectrum, -2))[baselineIndex]) * 2**(-0.5)
        else:
            noise = mad1d(
                (spectrum - np.roll(spectrum, -2))[baselineIndex]) * 2**(-0.5)

        if blankBaseline:
            spectrum = robustBaseline(spectrum,
                                      baselineIndex,
                                      blorder=blorder,
                                      noiserms=noise)
            spectrum[baselineIndex] = np.nan
            outcube[:, thisy, thisx] = spectrum
        else:
            outcube[:, thisy, thisx] = robustBaseline(spectrum,
                                                      baselineIndex,
                                                      blorder=blorder,
                                                      noiserms=noise)
        if verbose:
            pb.update()
    outsc = SpectralCube(outcube,
                         cube.wcs,
                         header=cube.header,
                         meta={'BUNIT': cube.header['BUNIT']})
    outsc = outsc[runmin:runmax, :, :]  # cut beyond baseline edges

    # Return to original spectral unit
    outsc = outsc.with_spectral_unit(originalUnit)
    outsc.write(filename.replace('.fits', '_rebase{0}.fits'.format(blorder)),
                overwrite=True)
def MakeRoundBeam(incube, outfile=None, overwrite=True):
    '''
    This takes a FITS file or a SpectralCube and outputs

    Parameters
    ----------
    filename : `string` or `SpectralCube`
       Input spectral cube

    Returns
    -------
    cube : `SpectralCube`

    '''
    if isinstance(incube, str):
        cube = SpectralCube.read(incube)

    if isinstance(incube, VaryingResolutionSpectralCube):
        cube = incube

    if not isinstance(cube, VaryingResolutionSpectralCube):
        warnings.warn("No information about multiple beams")
        return (None)

    beams = cube.beams
    major_axes = np.array([bm.major.to(u.deg).value for bm in beams])
    target_beamsize = np.array(major_axes.max())
    target_beam = Beam(major=target_beamsize * u.deg,
                       minor=target_beamsize * u.deg,
                       pa=0.0 * u.deg)
    print("Target beam is : {}".format(target_beam))

    # Let's assume square pixels
    pixsize = cube.wcs.pixel_scale_matrix[1, 1]
    fwhm2sigma = np.sqrt(8 * np.log(2))

    output = np.zeros(cube.shape)

    with console.ProgressBar(cube.shape[0]) as bar:

        for ii, plane in enumerate(cube.filled_data[:]):
            this_beam = beams[ii]
            conv_beam = target_beam - this_beam

            majpix = conv_beam.major.value / pixsize / fwhm2sigma
            minpix = conv_beam.minor.value / pixsize / fwhm2sigma

            output[ii, :, :] = ftconvolve(plane,
                                          major=majpix,
                                          minor=minpix,
                                          angle=conv_beam.pa.value)

            bar.update()

    hdr = copy.copy(cube.header)
    hdr['CASAMBM'] = False
    hdr['BMAJ'] = float(target_beam.major.value)
    hdr['BMIN'] = float(target_beam.major.value)
    hdr['BPA'] = 0.0
    outcube = SpectralCube(output, cube.wcs, header=hdr)
    if outfile:
        outcube.write(outfile, overwrite=overwrite)
        return None
    return (outcube)
def residual_cube(cubename, fitfile=None, 
                  expand=20, writemodel=False, fileprefix=None,
                  filesuffix=None,
                  writeresidual=False, writechisq=True):
    """This function generates products for evaluating the goodness of
    fit for a cold_ammonia model.  Either a parameter cube name must
    be passed or the prefix/suffixes of the files.

    Parameters
    ----------

    cubename : str
        Name of the original data file
    
    Keywords
    --------
    fitfile : str
        Name of the parameter file produced by the cube fitter
    fileprefix : str
        Prefix of file name before the parameter name (e.g., for
        L1688_N_NH3_DR1_rebase3_flag.fits, this would be 'L1688_').
    filesufffix : str
        Prefix of file name before the parameter name (e.g., for
        L1688_N_NH3_DR1_rebase3_flag.fits, this would be
        '_DR1_rebase3_flag').
    expand : int
        Expands the region where the residual is evaluated by this
        many channels in the spectral dimension
    writemodel : bool
        Setting to True writes out a model cube of the ammonia fit
    writeresidual : bool
        Setting to True writes out a residual cube
    writechisq : bool
        Setting to True writes out a map of the reduced chi-squared
        goodness of fit.

    Returns
    -------
        None
    """
    try:
        if fitfile is None:
            tkin = fits.getdata(fileprefix + 'Tkin' + filesuffix + '.fits')
            tex = fits.getdata(fileprefix + 'Tex' + filesuffix + '.fits')
            sigma = fits.getdata(fileprefix + 'Sigma' + filesuffix + '.fits')
            column = fits.getdata(fileprefix + 'N_NH3' + filesuffix + '.fits')
            v0 = fits.getdata(fileprefix + 'Vlsr' + filesuffix + '.fits')
            fortho = np.zeros_like(v0)
        else:
            hdu = fits.open(fitfile)
            fitparams = hdu[0].data
            tkin = fitparams[0, :, :]
            tex = fitparams[1, :, :]
            column = fitparams[2, :, :]
            sigma = fitparams[3, :, :]
            v0 = fitparams[4, :, :]
            fortho = fitparams[5, :,:]
    except NameError:
        warnings.warn("Either fitfile or fileprefix/filesuffix"+
                      " must be specified")

    cube = SpectralCube.read(cubename)
    cube  = cube.with_spectral_unit(u.km/u.s,velocity_convention='radio')
    model = np.zeros(cube.shape)

    yy, xx = np.where(column>0)
    cube = cube.with_spectral_unit(u.Hz)
    spaxis = pyspeckit.spectrum.units.SpectroscopicAxis(cube.spectral_axis)
    for y, x in console.ProgressBar(zip(yy,xx)):
        fit = ammonia.cold_ammonia(spaxis, tkin[y, x], tex=tex[y, x], 
                                   ntot=column[y, x], width=sigma[y, x],
                                   xoff_v=v0[y, x], fortho=fortho[y, x])
        model[:, y, x] = fit

    mask = model > 0
    residual = cube.filled_data[:].value-model

    # This calculates chisq over the region where the fit is non-zero
    # plus a buffer of size set by the expand keyword.

    selem = np.ones(expand,dtype=np.bool)
    selem.shape += (1,1,)
    mask = nd.binary_dilation(mask, selem)
    mask = mask.astype(np.float)
    chisq = np.sum((residual * mask)**2, axis=0) / np.sum(mask, axis=0)

    # This produces a robust estimate of the RMS along every line of sight:
    diff = residual - np.roll(residual, 2, axis=0)
    rms = 1.4826 * np.nanmedian(np.abs(diff), axis=0) / 2**0.5

    chisq /= rms**2

    root = (cubename.split('.'))[0]
    if writechisq:
        hdu = fits.PrimaryHDU(chisq, cube.wcs.celestial.to_header())
        hdu.writeto(root+'_chisq.fits', clobber=True)
    if writeresidual:
        newcube = SpectralCube(residual,cube.wcs,header=cube.header)
        newcube.write(root+'_residual.fits', overwrite=True)
    if writemodel:
        model = SpectralCube(model, cube.wcs, header=cube.header)
        model.write(root + '_model.fits', overwrite=True)