Ejemplo n.º 1
0
def line_flux2(catalog, line_name='13co10',
               asgn=datadir + 'COHRS_all_asgn.fits',
               cubefile=datadir + 'GRS_13CO_all.fits'):

    flux = Column(np.zeros(len(catalog)),name=line_name)

    asgn = SpectralCube.read(asgn)
    linefile = SpectralCube.read(cubefile)

    previous_file=''
    fill_data=None
    previous_cube_file=''
    for idx, obj in enumerate(catalog):
        if obj['orig_file'] != previous_cube_file:
            print "Pulling line subcube for {0}".format(obj['orig_file'])
            subx1 = obj['orig_file'].split('_')[2]
            subx2 = obj['orig_file'].split('_')[3]
            subcube = linefile[:, :, int(subx1):int(subx2)]
            fill_cube_data = (subcube.filled_data[:].value)
            previous_cube_file = obj['orig_file']
        
        outtuple = sparse_mask(obj, asgn,
                               previous_file=previous_file,
                               fill_data=fill_data)
        previous_file, fill_data, zcld, ycld, xcld = outtuple
        if len(xcld)>0:
            flux[idx] = np.nansum(fill_cube_data[zcld, ycld, xcld])
    catalog.add_column(flux)
    return catalog
Ejemplo n.º 2
0
    def __init__(self, highres, lowres):
        super(MultiResObs, self).__init__()
        self.highres = SpectralCube.read(highres)
        self.lowres = SpectralCube.read(lowres)

        self.highres_convolved = None
        self.lowres_convolved = None

        self.lowbeam = self.lowres.beam
        self.highbeam = self.highres.beam

        self.combined_beam = self.lowbeam.convolve(self.highbeam)
Ejemplo n.º 3
0
def cubegen(ymin,ymax,xmin,xmax, deltaX=30):
	"""Generates a subcube of the specified dimensions from the .fits files,
	   for 12CO and 13CO. Returns the subcubes for 12CO and 13CO, respectively.

	   Argument format: "(ymin,ymax, xmin,xmax.)"
	   ^ These are the parameters of the desired subcubes."""

	cube12 = SpectralCube.read("paws-30m-12co10-23as-cube.fits")
	cube13 = SpectralCube.read("paws-30m-13co10-23as-cube.fits")
	subcube12 = cube12[:,ymin:ymax,xmin:xmax]
	subcube13 = cube13[:,ymin:ymax,xmin:xmax]

	return subcube12,subcube13
Ejemplo n.º 4
0
def test_qglue():
    from spectral_cube import SpectralCube
    cube = SpectralCube.read(os.path.join(DATA, 'cube_3d.fits'))
    data = parse_data(cube, 'x')[0]
    assert data.label == 'x'
    data['STOKES I']
    assert data.shape == (2, 3, 4)
def cube_w11(region='IC348'):
    if region == 'IC348':
        OneOneFile = 'IC348mm/IC348mm-11_cvel_clean_rob05.fits'
        TwoTwoFile = 'IC348mm/IC348mm-11_cvel_clean_rob05.fits'
        vmin=7.4
        vmax=10.0
    elif region == 'IRAS03282':
        OneOneFile = 'IRAS03282/IRAS03282-11_cvel_clean_rob05.fits'
        TwoTwoFile = 'IRAS03282/IRAS03282-11_cvel_clean_rob05.fits'
        vmin=6.0
        vmax=8.5
    elif region == 'L1451mm':
        OneOneFile = 'L1451mm/L1451MM-11_cvel_clean_rob05.fits'
        TwoTwoFile = 'L1451mm/L1451MM-11_cvel_clean_rob05.fits'
        vmin=3.2
        vmax=4.9
    cube = SpectralCube.read(OneOneFile)
    vcube = cube.with_spectral_unit(u.km/u.s, rest_value=freq11, velocity_convention='radio')
    slab = vcube.spectral_slab( vmax*u.km/u.s, vmin*u.km/u.s)
    w11=slab.moment( order=0, axis=0)
    #beam = Beam.from_fits_header(fits.getheader(OneOneFile))
    # Next line is to solve bug in spectralcube:
    # it should be something like this in line 2234 of spectral_cube.py:
    # ```
    # if axis == 0 and self._meta['beam'] is not None:
    #     meta = { blabla, 'beam':self._meta['beam']}
    # else:
    #     meta = { blabla}
    w11._meta['beam'] = slab.beam
    w11.write(OneOneFile.replace('.fits','_w11.fits'), overwrite=True)
Ejemplo n.º 6
0
def writeplanes(save_name='/mnt/work/erosolow/GRS_13CO_all.fits'):
    spatial_template = fits.open('INTEG/COHRS_RELEASE1_FULL_INTEG.fit')
    spectral_template = SpectralCube.read('reprojected.fits')

    # Smoosh astrometry components together
    spatial_header = spatial_template[0].header
    spectral_header = spectral_template.header

    new_header = spatial_header.copy()
    new_header["NAXIS"] = 3
    for keyword in ['NAXIS3', 'CRVAL3', 'CDELT3','CRPIX3','CUNIT3']:
        new_header[keyword] = spectral_header[keyword]
    new_header['BMAJ'] = 14./3600
    new_header['BMIN'] = 14./3600
    new_header['BPA'] = 0.00
    
    if os.path.exists(save_name):
        raise Exception("The file name {} already "
                        "exists".format(save_name))

    # Open a file and start filling this with planes.
    output_fits = fits.StreamingHDU(save_name, new_header)
    # Again, set  up a common vel axis and spin out
    vel = np.linspace(-30, 160, 191)
    for v in vel:
        output_fits.write(fits.getdata(planesdir +
                                       'GRSPLANE_{0}'.format(v) +
                          '.fits'))

    output_fits.close()
Ejemplo n.º 7
0
def FirstLook_Cepheus():
    print("Now NH3(1,1)")
    a_rms = [  0, 135, 290, 405, 505, 665]
    b_rms = [ 70, 245, 350, 455, 625, 740]
    index_rms=first_look.create_index( a_rms, b_rms)
    index_peak=np.arange(350,410)
    file_in='Cepheus/Cepheus_NH3_11.fits'
    # 1st order polynomial
    file_out=file_in.replace('.fits','_base1.fits')
    file_new=first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
    first_look.peak_rms( file_new, index_rms=index_rms, index_peak=index_peak)
    print("Now NH3(2,2)")
    linelist = ['NH3_22','NH3_33','C2S','HC5N','HC7N_21_20','HC7N_22_21']
    vsys = -3.8*u.km/u.s
    throw = 2.0*u.km/u.s
    for line in linelist:
        file_in = 'Cepheus/Cepheus_{0}.fits'.format(line)
        s = SpectralCube.read(file_in)
        s = s.with_spectral_unit(u.km/u.s,velocity_convention='radio')
        a_rms = [s.closest_spectral_channel(vsys+3*throw),
                 s.closest_spectral_channel(vsys-throw)]
        b_rms = [s.closest_spectral_channel(vsys+throw),
                 s.closest_spectral_channel(vsys-3*throw)]
        index_peak = np.arange(s.closest_spectral_channel(vsys+3*u.km/u.s),
                              s.closest_spectral_channel(vsys-3*u.km/u.s))
        index_rms=first_look.create_index( a_rms, b_rms)

        file_out=file_in.replace('.fits','_base1.fits')
        file_new=first_look.baseline( file_in, file_out, 
                                      index_clean=index_rms, polyorder=1)
        first_look.peak_rms( file_new, index_rms=index_rms, 
                             index_peak=index_peak)
Ejemplo n.º 8
0
def moments(cube_fits, line_values, line_names, moment, save_file=False):
    """
    cube: str
        The datacube in fits format to open

    line_values: list of floats
        The wavelengths of the lines. !!! In general: if moment=0 the required wavelenth should be air, if moment=1 it should be vacuum !!!

    line_names: list of str
        The identifier of the lines

    moment: 0 or 1

    save_file: bool, optional
        Set to True if the result is to be saved as a fits file. Default is False.

    example:
        moment = moments('cube.fits', [4861.33, 6562.8], ['Hb', 'Ha'], moment=0)
    """


    print line_values, line_names
    cube=SpectralCube.read(cube_fits)

    for line,stri in zip(line_values,line_names):
        if moment==0:
            mom = cube.spectral_slab((line-3)*u.AA, (line+3)*u.AA).sum(axis=0)
            if save_file==True:
                mom.hdu.writeto(str(stri)+'_moment0.fits',clobber=True)
        if moment==1:    
            mom = cube.with_spectral_unit(u.km/u.s, rest_value=line*u.AA,velocity_convention='optical').spectral_slab(-300*u.km/u.s,300*u.km/u.s).moment1()
            if save_file==True:
                mom.hdu.writeto(str(stri)+'_moment1.fits',clobber=True)

    return mom
Ejemplo n.º 9
0
def write_skycoord_table(data, cube_ref, **kwargs):
    """
    Writes out a text file with flattened coordinates of the cube
    stacked with input array data. Additional arguments are passed
    to astropy's text writing function.

    TODO: add a useful `names` keyword?

    See astropy.io.ascii.write docstring for more info.

    Parameters
    ----------
    data : array-like structure of the same xy-grid as cube_ref.

    cube_ref : a cube file to get the coordinate grid from.

    """
    from astropy.table import Table
    from astropy.io import ascii
    from spectral_cube import SpectralCube

    cube = SpectralCube.read(cube_ref)

    flat_coords = [cube.spatial_coordinate_map[i].flatten() for i in [1,0]]
    # TODO: finish this up for multiple components
    #n_repeat = np.prod(np.array(data).shape)%np.prod(cube.shape[1:])+1

    table = Table(np.vstack(flat_coords +
        [np.array(xy_slice).flatten() for xy_slice in data]).T)

    ascii.write(table, **kwargs)
Ejemplo n.º 10
0
def S2_drawM33(vmin=40,vmax=80, deltaX=40, deltaV=6, deltadeltaX=10, deltadeltaV=1):
	"""Activates S2_draw with each of the .py file's subcube selections,
	   with the same args as S2_arrayM33.

	   Argument format: "(vmin=40,vmax=80, deltaX=40, deltaV=6, deltadeltaX=10,
	   deltadeltaV=1).

	   These MUST match the args/kwargs used in S2_arrayM33!"""

	galaxyname = 'M33'
	filename = 'm33.co21_iram_CLEANED'

	cube = SpectralCube.read(filename+".fits")

	pixelwidthDEG = cube.header['CDELT2']			# The width of each pixel, in degrees.
	distancePC = 840000.0			# The distance to the galaxy that M51's .fits file deals with, in parsecs.  (???) Is this number accurate, though?
	pixelwidthPC = pixelwidthDEG*np.pi/180.0*distancePC	# The width of each pixel, in pc.


	ymin = np.array([350,600,650,525,300,250])	# These are the minimum "y" values of the regions that we're dealing with.
	ymax = np.array([550,800,850,725,500,450])	# These are the corresponding maximum "y" values of these regions.
	xmin = np.array([500,100,400,288,200,550])	# These are the corresponding minimum "x" values of these regions.
	xmax = np.array([700,300,600,488,400,750])	# These are the corresponding maximum "x" values of these regions. (Example: The first region has ymin=350, ymax=550, xmin=500, xmax=700.)
	sets = np.ravel(ymin.shape)[0]		# This is the number of regions that we're dealing with.

	for i in range(0,sets):
		S2_draw(vmin,vmax,ymin[i],ymax[i],xmin[i],xmax[i],deltaX,deltaV,deltadeltaX,deltadeltaV,filename,galaxyname)
Ejemplo n.º 11
0
def select_cloud(idxarray, cloudcat):
    for idx in idxarray:
        entry = cloudcat[idx]
        asgn = SpectralCube.read(cohrsdir+'FINALASGNS/'+
                                 entry['orig_file']+
                                 '_fasgn.fits')

        data = SpectralCube.read(cohrsdir+'DATA/'+
                                 entry['orig_file']+
                                 '.fits')
        mask = (asgn == entry['_idx'] *
                u.dimensionless_unscaled)
        cube = data.with_mask(mask)
        cube = cube.minimal_subcube()
        cube.write('cohrscld_{0}'.format(entry['_idx'])+'.fits',
                   overwrite=True)
Ejemplo n.º 12
0
def subcubes_from_ds9(cube, region_file='../nro_maps/SouthShells.reg', pad_factor=1., shape='exact'):
    """
    Extracts subcubes using the ds9 region file.
    
    Parameters
    ----------
    cube : SpectralCube, str
        The cube to be chopped. Must be type spectral_cube.SpectralCube or str filename.
    region_file : str
        Path to a ds9 region file.
    pad_factor : float, optional
        Expand the subcube around the region by this factor.
    shape : {'square', 'exact'}
        The shape of the subcube returned. 'square' returns the
        smallest square subcube that contains the region.
        'exact' returns only the pixels contained within the region.
    
    Returns
    -------
    subcubes: list of SpectralCube of SpectralCube
    """
    from spectral_cube import SpectralCube
    import pyregion

    try:
        #If cube is a str filename, read a SpectralCube.
        cube = SpectralCube.read(cube)
    except ValueError:
        pass

    if shape == 'square':
        import astropy.units as u
        subcube_list = []
        region_list = pyregion.open(region_file)
        for region in region_list:
            half_width = region.coord_list[2] * pad_factor * u.deg
            ra_center = region.coord_list[0] * u.deg
            dec_center = region.coord_list[1] * u.deg
            ra_range = [ra_center - half_width, ra_center + half_width]
            dec_range = [dec_center - half_width, dec_center + half_width]
            #print(ra_range, dec_range)
            subcube_list.append(cube.subcube(ra_range[1], ra_range[0], dec_range[0], dec_range[1]))
    if shape == 'exact':
        region_list = pyregion.open(region_file)
        subcube_list = []
        for region in region_list:
            
            if pad_factor != 1.:
                new_string = '{};{}({},{},{}")'.format(region.coord_format, region.name,
                                region.coord_list[0], region.coord_list[1],
                                region.coord_list[2]*3600.*pad_factor)
                region = pyregion.parse(new_string)[0]
                
            subcube_list.append(cube.subcube_from_ds9region(pyregion.ShapeList([region])))
    if len(subcube_list) == 1:
        return subcube_list[0]
    else:
        return subcube_list
Ejemplo n.º 13
0
def cubegen(vmin,vmax,ymin,ymax,xmin,xmax, filename = "paws_norot", drawmap = False, mapname="3Dcube"):
	"""
	Returns a subcube of the specified dimensions from the .fits file.
	Also displays the subcube as it appears on the galaxy map if drawmap=True.


	Parameters:
	-----------
	vmin,...,xmax : int
		Parameters used in relevant xi map.
		WARNING: Selecting too large of a vmax-vmin will hugely increase
		processing time in later calculations.
	filename : str
		Name of the .paws data file.
		"paws_norot" for M51, "m33.co21_iram_CLEANED" for M33.
	drawmap : bool
		Enables or disables drawing the subcube Tmax map.
	galaxyname : str
		Name of the galaxy.
		'M51' for M51, 'M33' for M33.
	mapname : str
		Name of the saved image of the subcube's Tmax map, if
		drawmap==True.


	Returns:
	-----------
	subcube : spectral cube (?)
		The data inside the selected subcube.
"""

	cube = SpectralCube.read(filename+".fits")
	data = cube.filled_data[:]   # Pulls "cube"'s information (position, spectral info (?)) into a 3D Numpy array.
	yshape = data.shape[1]/2.0
	xshape = data.shape[2]/2.0

	pixelwidthDEG = cube.header['CDELT2']			# The width of each pixel, in degrees.
	if (filename =='m33.co21_iram_CLEANED') or (filename =='m33.co21_iram_CLEANED_smooth') or (filename =='m33.co21_iram_CLEANED_blank'):	# Checks if the galaxy's Header file contains its distance.
		distancePC = 840000.0				# The distance to the galaxy that M33's .fits file deals with, in parsecs. ONLY works on the CLEANED file!
	else:
		distancePC = cube.header['DIST']		# The distance to the galaxy that M51's .fits file deals with, in parsecs.  (???) Is this number accurate, though?
	pixelwidthPC = pixelwidthDEG*np.pi/180.0*distancePC	# The width of each pixel, in pc.

	subcube = cube[vmin:vmax,ymin:ymax,xmin:xmax]
	if drawmap == True:
		plt.figure(1)
		plt.imshow(np.nanmax(data[vmin:vmax,ymin:ymax,xmin:xmax].value,axis=0), extent=[(xmin-xshape)*pixelwidthPC,(xmax-xshape)*pixelwidthPC, \
			   (ymin-yshape)*pixelwidthPC,(ymax-yshape)*pixelwidthPC], origin='lower')
		fig = matplotlib.pyplot.gcf()
		#fig.set_size_inches(5, 5)	# Enlarges the image so as to prevent squishing.
		plt.xlabel('Distance from Centre in x-direction (pc)')
		plt.ylabel('Distance from Centre in y-direction (pc)')

		plt.savefig('galaxy_'+mapname+'.png')
		plt.clf()			# Clears the image after saving.

	return subcube
Ejemplo n.º 14
0
def summary_plot(filelist):
    for thisfile in filelist:
        s = SpectralCube.read(thisfile)
        outfile = thisfile.replace('.fits','_summary.png')
        mom0 = s.moment0()
        f = aplpy.FITSFigure(mom0.hdu)
        f.show_colorscale()
        f.show_colorbar()
        f.save(outfile)
Ejemplo n.º 15
0
def FirstLook_NGC1333():
    print("Now NH3(1,1)")
    a_rms = [  0, 158, 315, 428, 530, 693, 751]
    b_rms = [ 60, 230, 327, 438, 604, 735, 760]
    index_rms=first_look.create_index( a_rms, b_rms)
    index_peak=np.arange(326,430)
    file_in='NGC1333/NGC1333_NH3_11.fits'
    # 1st order polynomial
    file_out=file_in.replace('.fits','_base1.fits')
    file_new=first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
    first_look.peak_rms( file_new, index_rms=index_rms, index_peak=index_peak)
    print("Now NH3(2,2)")
    a_rms = [  0, 190, 360, 600]
    b_rms = [70, 300, 470, 640]
    index_rms=first_look.create_index( a_rms, b_rms)
    index_peak=np.arange(380,520)
    # file_in='NGC1333/NGC1333_NH3_22.fits'
    # # 1st order polynomial
    # file_out=file_in.replace('.fits','_base1.fits')
    # file_new=first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
    # first_look.peak_rms( file_new, index_rms=index_rms, index_peak=index_peak)
    ## 2nd order polynomial
    #file_out=file_in.replace('.fits','_base2.fits')
    #file_new=first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=2)
    #first_look.peak_rms( file_new, index_rms=index_rms, index_peak=index_peak)
    #
    # print("Now NH3(3,3)")
    # a_rms = [ 10, 190, 420]
    # b_rms = [70, 360, 500]
    # index_rms=first_look.create_index( a_rms, b_rms)
    # index_peak=np.arange(410,540)
    # file_in='NGC1333/NGC1333_NH3_33.fits'
    # 1st order polynomial
    # file_out=file_in.replace('.fits','_base1.fits')
    # file_new=first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
    # first_look.peak_rms( file_new, index_rms=index_rms, index_peak=index_peak)
    linelist = ['NH3_22','NH3_33','C2S','HC5N','HC7N_21_20','HC7N_22_21']
    vsys = 8.5*u.km/u.s
    throw = 8*u.km/u.s
    for line in linelist:
        file_in = 'NGC1333/NGC1333_{0}.fits'.format(line)
        s = SpectralCube.read(file_in)
        s = s.with_spectral_unit(u.km/u.s,velocity_convention='radio')
        a_rms = [s.closest_spectral_channel(vsys+2*throw),
                 s.closest_spectral_channel(vsys-throw)]
        b_rms = [s.closest_spectral_channel(vsys+throw),
                 s.closest_spectral_channel(vsys-2*throw)]
        index_peak = np.arange(s.closest_spectral_channel(vsys+3*u.km/u.s),
                              s.closest_spectral_channel(vsys-3*u.km/u.s))
        index_rms=first_look.create_index( a_rms, b_rms)

        file_out=file_in.replace('.fits','_base1.fits')
        file_new=first_look.baseline( file_in, file_out, 
                                      index_clean=index_rms, polyorder=1)
        first_look.peak_rms( file_new, index_rms=index_rms, 
                             index_peak=index_peak)
Ejemplo n.º 16
0
def dendropix(fileprefix='SgrB2_b3_12M.HC3N'):
    cube = SpectralCube.read(dpath('{0}.image.pbcor.contsub.fits'.format(fileprefix))).minimal_subcube()
    noise = cube.spectral_slab(-200*u.km/u.s, -100*u.km/u.s).std(axis=0)
    keep_mask = cube.max(axis=0) > noise

    tblfile = tpath('{0}.dendrotable.ecsv'.format(fileprefix))
    if os.path.exists(tblfile):
        table = Table.read(tblfile, format='ascii.ecsv')
    else:
        table = Table([Column(name='xpix'),
                       Column(name='ypix'),
                       Column(name='zpix'),
                       Column(name='lon'),
                       Column(name='lat'),
                       Column(name='velo'),
                       Column(name='peakval'),])

    xpyp_done = set(zip(table['xpix'], table['ypix']))
    all_keepers = zip(*np.where(keep_mask))
    xpyp = [x for x in all_keepers if x not in xpyp_done]
    print(len(xpyp), len(all_keepers), len(xpyp_done))
    if len(xpyp_done) > 0:
        assert len(xpyp) < len(all_keepers)


    for ii,(ypix,xpix) in enumerate(ProgressBar(xpyp)):

        data = cube[:,ypix,xpix].value

        error = noise[ypix,xpix].value
        # alternative:
        #error = stats.sigma_clipped_stats(data)[2]

        D = astrodendro.Dendrogram.compute(data, min_value=0,
                                           min_delta=2*error, min_npix=7,
                                           is_independent=astrodendro.pruning.min_peak(5*error))
        if not D.leaves:
            table.add_row([xpix,ypix,]+[np.nan]*5)
            del D
            continue

        #peaks = [S.get_peak()[0][0] for S in D]
        #peak_vals = [S.get_peak()[1] for S in D]
        #peaks = [cube.spectral_axis[S.get_peak()[0][0]].to(u.km/u.s).value for S in D]

        for S in D:
            (peak_pix,),peak_val = S.get_peak()
            velo,lat,lon = cube.world[peak_pix, ypix, xpix]
            table.add_row([xpix,ypix,peak_pix,lon,lat,velo,peak_val])

        if ii % 100 == 0:
            table.write(tblfile, format='ascii.ecsv')

        del D
        del S
        del data
Ejemplo n.º 17
0
def plot_overview(cube='../nro_maps/12CO_20161002_FOREST-BEARS_spheroidal_xyb_grid7.5_0.099kms.fits',
 region_file='../nro_maps/SouthShells.reg', mode='peak', plotname='12co_peak_shells.png',
 interactive=False, show_shells=False):
    """
    Show full image with all shells.
    
    Parameters
    ----------
    cube : str, optional
        Description
    region_file : str, optional
        Description
    mode : str, optional
        Description
    plotname : str, optional
        Description
    interactive : bool, optional
        Description
    show_shells : bool, optional
        Description
    
    """
    try:
        cube = SpectralCube.read(cube)
    except ValueError:
        pass

    if mode == "peak":
        image = cube.max(axis=0)

    fig = plt.figure()
    wcs = WCS(image.header)
    ax = WCSAxes(fig, [0.1,0.1,0.8,0.8], wcs=wcs) 
    fig.add_axes(ax)      
    imgplot = plt.imshow(image.data, cmap=cm.gray, origin='lower', interpolation='none',
        vmin=0., vmax=100)
    cb = plt.colorbar()
    cb.set_label(r'K [T$_{MB}$]')
    plt.title(r"$^{12}$CO Peak")

    if show_shells:
        r = pyregion.open(region_file).as_imagecoord(image.header)
        patch_list, artist_list = r.get_mpl_patches_texts()

        for p in patch_list:
            ax.add_patch(p)
        for t in artist_list:
            ax.add_artist(t)

        pass

    if interactive:
        plt.show()
    else:
        plt.savefig(plotname)
Ejemplo n.º 18
0
    def __init__(self, cube, scale=None, spatial_norm=None,
                 spectral_norm=None, beam=None, method="MAD"):
        """
        Construct a new Noise object.

        Parameters
        ----------

        method : {'MAD','STD'}
            Chooses method for estimating noise variance either 'MAD'
            for median absolute deviation and 'STD' for standard
            deviation.  Default: 'MAD'

        """
        if isinstance(cube,SpectralCube):
            self.cube = cube
        elif isinstance(cube, str):
            self.cube = SpectralCube.read(cube)
        else:
            warnings.warn("Noise currently requires a SpectralCube instance.")

        self.spatial_footprint = np.any(self.cube.get_mask_array(), axis=0)

        if beam is None:
            try:
                self.beam = cube.beam
            except AttributeError:
                warnings.warn("cube object has no associated beam. All beam "
                              "operations are disabled.")
                self.beam = None
            self.astropy_beam_flag = False
        else:
            if isinstance(beam, Beam):
                self.astropy_beam_flag = False
            elif isinstance(beam, Kernel2D):
                self.astropy_beam_flag = True
            else:
                warnings.warn("beam must be a radio_beam Beam object or an "
                              "astropy Kernel2D object. All beam operations "
                              "are disabled.")
            self.beam = beam

        # Default to a normal distribution
        self.distribution = ss.norm

        # SUGGESTION: calculate on initialization?

        # Fit the data
        if scale is None:
            self.calculate_scale(method=method)  # [1] is the std. of a Gaussian
            self.spatial_norm = np.ones((self.cube.shape[1], self.cube.shape[2]))
            self.spectral_norm = np.ones((self.cube.shape[0]))

        # Compute the scale_cube
        self.get_scale_cube()
Ejemplo n.º 19
0
def line_flux(catalog,
              asgn=datadir + 'COHRS_all_asgn.fits'):

    thco10 = Column(np.zeros(len(catalog)), name='13co10')
    thco32 = Column(np.zeros(len(catalog)), name='13co32')
    c18o32 = Column(np.zeros(len(catalog)), name='c18o32')
    twco32 = Column(np.zeros(len(catalog)), name='12co32')
    asgn = SpectralCube.read(asgn)

    previous_file=''
    fill_data=None
    previous_file=''
    
    for idx, obj in enumerate(ProgressBar(catalog)):
        outtuple = sparse_mask(obj, asgn,
                               previous_file=previous_file,
                               fill_data=fill_data)
        
        if obj['orig_file'] != previous_file:
            print "Pulling img tiles for {0}".format(obj['orig_file'])
            subx1 = obj['orig_file'].split('_')[2]
            subx2 = obj['orig_file'].split('_')[3]
            co32cube = (SpectralCube.read(
            './COHRS_tiles/COHRS_{0}_{1}.fits'.format(
                subx1, subx2))).filled_data[:].value
            thco32cube = (SpectralCube.read(
            './CHIPS_13CO_tiles/CHIMPS_13CO_{0}_{1}.fits'.format(
                subx1, subx2))).filled_data[:].value
            c18o32cube = (SpectralCube.read(
            './CHIPS_C18O_tiles/CHIMPS_C18O_{0}_{1}.fits'.format(
                subx1, subx2))).filled_data[:].value
            grscube = (SpectralCube.read(
                './GRS_tiles/GRS_13CO_{0}_{1}.fits'.format(
                    subx1, subx2))).filled_data[:].value

        previous_file, fill_data, z, y, x = outtuple
        thco10[idx] = np.nansum(grscube[z, y, x])
        thco32[idx] = np.nansum(thco32cube[z, y, x])
        c18o32[idx] = np.nansum(c18o32cube[z, y, x])
        twco32[idx] = np.nansum(co32cube[z, y, x])
    catalog.add_columns([thco10, thco32, c18o32, twco32])
    return catalog
Ejemplo n.º 20
0
def to_spectral_cube(data, header):
    '''
    Convert the output from input_data into a SpectralCube.
    '''

    if not HAS_SC:
        raise ValueError("spectral-cube needs to be installed.")

    hdu = fits.PrimaryHDU(data, header)

    return SpectralCube.read(hdu)
Ejemplo n.º 21
0
def cubegen(ymin,ymax,xmin,xmax, deltaX=30):
	"""Generates a subcube of the specified dimensions from the specified
	   .fits file.

	   Argument format: "(ymin,ymax, xmin,xmax.)"
	   ^ These are the parameters of the desired subcube."""

	cube = SpectralCube.read("paws-30m-12co10-23as-cube.fits")
	subcube = cube[:,ymin:ymax,xmin:xmax]

	return subcube
Ejemplo n.º 22
0
    def __init__(self, cube, beam=None, mask=None, method="MAD", compute=True):

        # Initialize cube object
        self.cube = SpectralCube.read(cube)

        if mask is not None:
            _check_mask(mask)
        self.mask = mask

        if beam is not None:
            _check_beam(mask)

        # Initialize noise object
        self.noise = Noise(self.cube, beam=beam, method=method)
def binning(f_nam, bin_width=500, thisbin=0):
    """A function creating brightness bins of pixels, and eventualy a map, in the given spectral cube"""
    cube = SpectralCube.read(f_nam)
    cube = cube.with_spectral_unit(u.km/u.s,velocity_convention='radio')
    Tmax = cube.apply_numpy_function(np.nanmax,axis=0) # array of the maximum values in the spectra of each pixel
    baddata = nd.morphology.binary_dilation(np.isnan(Tmax),np.ones((25,25)))
    Tmax[baddata]=0.0
    Tmax[np.isfinite(Tmax)]
    bin_arr = np.sort(Tmax[np.isfinite(Tmax)])
    bin_arr2 = bin_arr[:: - bin_width] # this creates an array of the bin margins, in which every bin has a width of "bin_width"  
    np.digitize(Tmax,bin_arr2)
    bins = np.digitize(Tmax,bin_arr2)
    y, x = np.where(bins==thisbin)
    return y, x
Ejemplo n.º 24
0
def SampleWithConvolution(file, positions, beam=defaultBeam,
                          order=1, **kwargs):
    s = SpectralCube.read(file)
    spaxis = s.spectral_axis.value
    spaxis.shape += (1,)
    spaxis_ones = np.ones_like(spaxis)
    s2 = s.convolve_to(beam)
    ravals = spaxis_ones * positions.ra.value
    decvals = spaxis_ones * positions.dec.value
    vvals = spaxis * np.ones_like(positions.ra.value)
    x, y, v = s.wcs.all_world2pix(ravals, decvals, vvals, 0)
    output = map_coordinates(s2.filled_data[:], [v, y, x],
                             order=order, **kwargs)
    # import pdb; pdb.set_trace()
    return output
Ejemplo n.º 25
0
def structure_function(input,nScales = 10, noiseScales = 10, spatialMethod = 'contour', spectralMethod = 'interpolate', meanCorrection = False):
    """
    Calculates structure function of molecular line emission cube using PCA

    Parameters
    ----------
    input : SpectralCube or string
       Either the SpectralCube object or load path for the same.
    nScales : int
       Number of size - line width scales to explore.  Defaults to 10.
    noiseScales : int
       Number of scales used for noise estimation.  Defaults to 10.  To suppress 
       noise correction, set to 0.
    method : 'fit', 'interpolate', or 'contour'
       Choses method to estimate the 1/e widths of the ACFs.  Defaults to 'interpolate' 
       for 1D and 'contour' for 2D.
    meanCorrection : bool
       If True, calculates a proper covariance in PCA matrix.  If False (default), 
       no correctio nis applied, following the literature approach.

    Returns
    -------
    Size_scale : 1D `numpy` array 
        Measure of size in pixel units
    LineWidth_scale : 1D `numpy` array
        Measure of LineWidth returned in pixel units

    """


    if isinstance(input,SpectralCube):
        cube = input
    elif isinstance(input,str):
        try:
            cube = SpectralCube.read(input)
        except:
            raise
    else:
        raise NotImplementedError

    evals, evec, _ = pca.pca(cube, meanCorrection = meanCorrection)
    imgStack = pca.EigenImages(evec, cube, nScales = nScales)
    acorImg = pca.AutoCorrelateImages(imgStack)
    NoiseACF = pca.NoiseACF(evec, cube ,nScales = noiseScales)
    acorSpec = pca.AutoCorrelateSpectrum(evec, nScales = nScales)
    line_width = pca.WidthEstimate1D(acorSpec, method = spectralMethod)
    size = pca.WidthEstimate2D(acorImg, NoiseACF = NoiseACF, method = spatialMethod)
    return size,line_width
Ejemplo n.º 26
0
def drawM33(vmin=40,vmax=80, deltaX=40, deltaV=6, deltadeltaX=1, deltadeltaV=1,xi_mode=0):
	"""
	Activates "draw" with each of the .py file's subcube selections,
	   with the same args as "arrayM33".

	Parameters:
	-----------
	vmin,...,deltadeltaV : int
		Parameters used in relevant xi map.
		WARNING: Selecting too large of a vmax-vmin will hugely increase
		processing time.
	xi_mode : int
		For xi calculations only. 
		When "xi_mode" is 0, the program will use a cube from the 
		   default .fits file and a "convolved cube" from that same
		   .fits file.
		When "xi_mode" is 1 (OBSOLETE), the program will use ONLY a 
		   cube from the filename +"_blank" .fits file, which is 
		   assumed to have NO NOISE.
		When "xi_mode" is 2, the program functions like "xi_mode==0" 
		   EXCEPT it then subtracts two similar maps that are 
		   assumed to be made entirely of noise.

	Returns:
	-----------
	none

	These MUST match the args/kwargs used in "arrayM33"!
	"""

	galaxyname = 'M33'
	filename = 'm33.co21_iram_CLEANED'

	cube = SpectralCube.read(filename+".fits")

	pixelwidthDEG = cube.header['CDELT2']			# The width of each pixel, in degrees.
	distancePC = 840000.0			# The distance to the galaxy that M51's .fits file deals with, in parsecs.  (???) Is this number accurate, though?
	pixelwidthPC = pixelwidthDEG*np.pi/180.0*distancePC	# The width of each pixel, in pc.


	ymin = np.array([350,600,650,525,300,250])	# These are the minimum "y" values of the regions that we're dealing with.
	ymax = np.array([550,800,850,725,500,450])	# These are the corresponding maximum "y" values of these regions.
	xmin = np.array([500,100,400,288,200,550])	# These are the corresponding minimum "x" values of these regions.
	xmax = np.array([700,300,600,488,400,750])	# These are the corresponding maximum "x" values of these regions. (Example: The first region has ymin=350, ymax=550, xmin=500, xmax=700.)
	sets = np.ravel(ymin.shape)[0]		# This is the number of regions that we're dealing with.

	for i in range(0,sets):
		draw(vmin,vmax,ymin[i],ymax[i],xmin[i],xmax[i],deltaX,deltaV,deltadeltaX,deltadeltaV,filename,galaxyname,xi_mode)
Ejemplo n.º 27
0
def mean_spectra(region,line,file_extension,restFreq,spec_param):
    '''
    Sum spectra over entire mapped region
    Cubes are missing BUNIT header parameter. Fix. 
    '''
    filein = '{0}/0{}_{1}_{2}_trim.fits'.format(region,line,file_extension)
    #add_fits_units(filein,'K')
    cube = SpectralCube.read(filein)
    #trim_edge_cube(cube)
    slice_unmasked = cube.unmasked_data[:,:,:]
    if line == 'NH3_33':
        slice_unmasked[spec_param['mask33_chans'][0]:spec_param['mask33_chans'][1],:,:]=0.
    summed_spectrum = np.nanmean(slice_unmasked,axis=(1,2))
    cube2 = cube.with_spectral_unit(u.km/u.s,velocity_convention='radio',
                                    rest_value=restFreq*u.GHz)
    return summed_spectrum, cube2.spectral_axis
def spectra_from_cubefn(cubefn, reg, bins_arcsec, coordinate):
    cube = SpectralCube.read(cubefn)

    pixcoordinate = cube.wcs.celestial.wcs_world2pix(coordinate.ra.deg,
                                                     coordinate.dec.deg,
                                                     0)

    pixscale = (cube.wcs.celestial.pixel_scale_matrix.diagonal()**2).sum()**0.5

    includemask = reg.get_mask(header=cube.wcs.celestial.to_header(),
                               shape=cube.shape[1:])


    spectra = extract_radial_spectrum(cube, pixcoordinate, ~includemask,
                                      radial_bins=bins_arcsec/(pixscale*3600))

    return spectra
Ejemplo n.º 29
0
def tilecube(catalog, infile='COHRS_all.fits', outdir='COHRS_tiles',
             root=None):
    uniqfiles = np.unique(catalog['orig_file'])
    s = SpectralCube.read(infile)
    if not os.path.exists(outdir):
        os.mkdir(outdir)
        
    for thisfile in uniqfiles:
        print "Now processing {0}".format(thisfile)
        xstart = int((thisfile.split('_'))[2])
        xend = int((thisfile.split('_'))[3])
        subcube = s[:,:,xstart:xend]
        if not root:
            root = (infile.split('_'))[0]
        subcube.write(outdir+'/'+root+'_{0}_{1}.fits'.format(xstart,
                                                             xend),
                      overwrite=True)
def image_make():
	cube = SpectralCube.read("paws_norot.fits")
	subcube = cube[:,400:500,400:600]

	# 1. Extracting a RECTANGULAR subcube
	# 2. Compute a moment0 map (add up in the spectral direction using `moment0 = subcube.moment(0)` Remember to take `moment0.value` for what follows.
	# 3. Calculate the structure function for a small number of offsets $\delta x =\{-1,0,1\}$ and $\delta y = \{-1,0,1\}$.  Given a map $M(x,y)$
	# 
	# $$ S_2(\delta x, \delta y) = \mathrm{mean}([M(x,y) - M(x+\delta x, y+\delta y)]^2)$$

	moment0 = subcube.moment(0,axis=0)


	dX = 5                      # This is simply the maximum absolute value of "dx". So if dX = 1, then dx = {-1,0,1}.
	dY = np.copy(dX)                      # Same as above, but for "dy". For simplicity, let it be the same as dX.
	nmax = abs(2*dX)+1
	S_2 = np.zeros([nmax,nmax])

	n = moment0.shape[(0)]     # The matrix "M" referred to above has n rows.
	m = moment0.shape[(1)]     # The matrix "M" referred to above has m columns.

	for dx in range (-dX,dX+1):
	    for dy in range (-dY,dY+1):
		
		M = moment0.value         # This is the matrix "M" referred to above.
		P = np.arange(n*m).reshape(n,m) # This will be used to track the shifting "pixels" of M(r) and M(r+dr).
		D = np.zeros([n,m])   # This will be the difference between M(r) and M(r+dr).
		
		D = M - np.roll(np.roll(M,-dy,axis=0),-dx,axis=1)
		
		goodpix = (P - np.roll(P,-dy,axis=0) == -dy*m) * (P - np.roll(P,-dx,axis=1) == -dx)
		        # Note: The "-dy*m" is because, for P, a pixel's value is separated from that of a
		        #        pixel above or below it by exactly m. So, the difference between a pixel's value and
		        #        that of a pixel "dy" rows below is simply dy*m.
		        # In "goodpix", pixels that have wrapped around are treated as "False".
		goodpix = goodpix.astype('float')
		goodpix[goodpix==0] = 'nan'     # Now, we can disregard the wraparound pixels entirely.
		        
		S_2[(dy+dY,dx+dX)] = (np.nanmean(D * goodpix))**2
		
	
	plt.imshow(S_2, interpolation = 'none', extent = [-dX,dX,-dY,dY])
	plt.colorbar()
	plt.xlabel('dx')
	plt.ylabel('dy')
	plt.savefig('image.png')
Ejemplo n.º 31
0
    def __init__(self, cube, scale=None, moment_method='slice'):
        super(Moments, self).__init__()

        if not spectral_cube_flag:
            raise ImportError("Moments requires the spectral-cube "
                              " to be installed: https://github.com/"
                              "radio-astro-tools/spectral-cube")

        if isinstance(cube, SpectralCube):
            self.cube = cube
            self.save_name = None
        else:
            self.cube = SpectralCube.read(cube)
            # Default save name to the cube name without the suffix.
            self.save_name = ".".join(cube.split(".")[:-1])

        if moment_method not in ['slice', 'cube', 'ray']:
            raise TypeError("Moment method must be 'slice', 'cube', or 'ray'.")
        self.moment_how = moment_method

        self.scale = scale

        self.prop_headers = None
        self.prop_err_headers = None
Ejemplo n.º 32
0
def fixExtraHERAfromAdam(fitsimage,beam=15.0):
    '''
    Purpose: fix up extra HERA data from Adam
    '''

    # switch header from M/S to m/s to fix up wcs read errors with SpectralCube
    f = fits.open(fitsimage)
    f[0].header['CUNIT3'] = 'm/s'
    newimage = fitsimage.replace('.fits','_fixed.fits')
    f.writeto(newimage,overwrite=True)
    f.close()
    
    # open image
    cube = SpectralCube.read(newimage)    
    
    # switch to km/s
    cube_kms = cube.with_spectral_unit(u.km / u.s)

    # smooth
    newBeam = Beam(beam*u.arcsec)
    smoothCube = cube_kms.convolve_to(newBeam)

    # write out
    smoothCube.write(newimage.replace('.fits','_10kms_gauss15.fits'),overwrite=True)
Ejemplo n.º 33
0
def fixHeracles(fitsimage):
    '''

    fixes up image headers for missing values etc so that we can
    regrid appropriately.
 
    '''

    # Using the astropy.io.fits to modify headers 
    # since SpectralCube disapproves of modifying headers as far 
    # as I can tell.
    f = fits.open(fitsimage)

    # Convert VELOCITY to VRAD for Heracles headers.
    if f[0].header['CTYPE3'] == 'VELOCITY':
        f[0].header['CTYPE3'] = 'VRAD' 
        
    newimage = fitsimage.replace('.fits','_fixed.fits')
    f.writeto(newimage,overwrite=True)

    cube = SpectralCube.read(newimage)
    cube_kms = cube.with_spectral_unit(u.km / u.s)

    cube_kms.write(newimage.replace('.fits','_kms.fits'),overwrite=True)
Ejemplo n.º 34
0
def rescale(conv_model, epsilon, residual_image, savename=None,
            export_fits=True):
    if isinstance(residual_image, BaseSpectralCube):
        residual = residual_image
        if savename is None and export_fits:
            raise ValueError("Must specify savename if exporting")
    else:
        residual = SpectralCube.read(residual_image, format='casa_image')
        if savename is None:
            savename = residual_image.replace(".residual",
                                              ".image.rescaled.fits")

    header = conv_model.header

    epsilon = epsilon*u.dimensionless_unscaled

    restor = conv_model.unitless + residual*epsilon[:,None,None]

    if export_fits:
        print("Writing")
        restor.write(savename, overwrite=True)
        print("Done writing")

    return restor
Ejemplo n.º 35
0
    def update(ii):

        try:
            if os.path.exists(f'{imname}_selfcal{ii-1}_finaliter.image.tt0'):
                cube = SpectralCube.read(f'{imname}_selfcal{ii-1}.image.tt0',
                                         format='casa_image')
                im1.set_data(cube[0].value)
                cube = SpectralCube.read(
                    f'{imname}_selfcal{ii-1}.residual.tt0',
                    format='casa_image')
                im2.set_data(cube[0].value)
                cube = SpectralCube.read(f'{imname}_selfcal{ii-1}.model.tt0',
                                         format='casa_image')
                # assume the beam doesn't change size
                data = convolve_fft(cube[0].value, kernel,
                                    allow_huge=True) * ppbeam
                im3.set_data(data)

                title.set_text(f"Selfcal iteration {ii-1} (final clean)")

                return (im1, im2, im3), (ax1, ax2, ax3)
            if ii == 0:
                return (im1, im2, im3), (ax1, ax2, ax3)
            else:
                cube = SpectralCube.read(f'{imname}_selfcal{ii}.image.tt0',
                                         format='casa_image')
                im1.set_data(cube[0].value)
                cube = SpectralCube.read(f'{imname}_selfcal{ii}.residual.tt0',
                                         format='casa_image')
                im2.set_data(cube[0].value)
                cube = SpectralCube.read(f'{imname}_selfcal{ii}.model.tt0',
                                         format='casa_image')
                # assume the beam doesn't change size
                data = convolve_fft(cube[0].value, kernel,
                                    allow_huge=True) * ppbeam
                im3.set_data(data)

                title.set_text(f"Selfcal iteration {ii}")

                return (im1, im2, im3), (ax1, ax2, ax3)
        except Exception as ex:
            print(ex)
Ejemplo n.º 36
0
import numpy as np
from spectral_cube import SpectralCube as sc
import glob
import astropy.units as u
import matplotlib.pyplot as plt
import time

home = "/blue/adamginsburg/d.jeff/imaging_results/SgrB2DS-CH3OH/"
#plt.close('all')
print('Loading cubes')
#spw3=sc.read("/blue/adamginsburg/d.jeff/imaging_results/SgrB2DS_field1_spw3_cube.image.fits")
spw0 = sc.read(
    "/blue/adamginsburg/d.jeff/imaging_results/SgrB2DS_field1_spw0_cube.image.fits"
)
'''
cube_w3=spw3.wcs

targetworldcrd=[[0,0,0],[2.66835339e+02, -2.83961660e+01, 0]]
targetpixcrd=cube_w.all_world2pix(targetworldcrd,1,ra_dec_order=True)


testonpix=spw3[:,int(round(targetpixcrd[1][1])),int(round(targetpixcrd[1][0]))]

testonreg=spw3[1:(len(spw3)-2),400:900,400:900]
'''
#spw3.allow_huge_operations=True
#spw3medsub=spw3[1:(len(spw3)-2)]
spw0.allow_huge_operations = True
print('Begin bad beam masking')
starttime = time.time()
spw0 = spw0.mask_out_bad_beams(threshold=0.01)
Ejemplo n.º 37
0
def get_psf_secondpeak(fn,
                       show_image=False,
                       min_radial_extent=1.5 * u.arcsec,
                       max_radial_extent=5 * u.arcsec):
    """ REDUNDANT with get_psf_secondpeak, but this one is better

    Process:
        1. Find the first minimum of the PSF by taking the radial profile within 50 pixels
        2. Take the integral of the PSF within that range
        3. Calculate the residual of the PSF minus the CASA-fitted Gaussian beam
        4. Integrate that to get the fraction of flux outside the synthesized
        beam in the main lobe of the dirty beam
        5. Find the peak and the location of the peak residual

    """
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        cube = SpectralCube.read(
            fn, format='casa_image' if not fn.endswith('.fits') else 'fits')
    psfim = cube[0]

    pixscale = wcs.utils.proj_plane_pixel_scales(cube.wcs.celestial)[0] * u.deg

    center = np.unravel_index(np.argmax(psfim), psfim.shape)
    cy, cx = center

    cutout = psfim[cy - 100:cy + 101, cx - 100:cx + 101]
    psfim = cutout
    fullbeam = cube.beam.as_kernel(
        pixscale,
        x_size=201,
        y_size=201,
    )

    shape = cutout.shape
    sy, sx = shape

    Y, X = np.mgrid[0:sy, 0:sx]

    beam = cube.beam

    center = np.unravel_index(np.argmax(cutout), cutout.shape)
    cy, cx = center

    # elliptical version...
    dy = (Y - cy)
    dx = (X - cx)
    costh = np.cos(beam.pa)
    sinth = np.sin(beam.pa)
    rmajmin = beam.minor / beam.major

    rr = ((dx * costh + dy * sinth)**2 / rmajmin**2 +
          (dx * sinth - dy * costh)**2 / 1**2)**0.5

    rbin = (rr).astype(np.int)

    # assume the PSF first minimum is within 100 pixels of center
    radial_mean = ndimage.mean(cutout**2, labels=rbin, index=np.arange(100))

    # find the first negative peak (approximately); we include anything
    # within this radius as part of the main beam
    first_min_ind = scipy.signal.find_peaks(-radial_mean)[0][0]

    view = (slice(cy - first_min_ind.astype('int'),
                  cy + first_min_ind.astype('int') + 1),
            slice(cx - first_min_ind.astype('int'),
                  cx + first_min_ind.astype('int') + 1))
    data = cutout[view].value
    bm = fullbeam.array[view]
    # the data and beam must be concentric
    # and there must be only one peak location
    # (these checks are to avoid the even-kernel issue in which the center
    # of the beam can have its flux spread over four pixels)
    assert np.argmax(data) == np.argmax(bm)
    assert (bm.max() == bm).sum() == 1

    bmfit_residual = data - bm / bm.max()
    radial_mask = rr[view] < first_min_ind

    psf_integral_firstpeak = (data * radial_mask).sum()
    psf_residual_integral = (bmfit_residual * radial_mask).sum()
    residual_peak = bmfit_residual.max()
    residual_peak_loc = rr[view].flat[bmfit_residual.argmax()]

    peakloc_as = (residual_peak_loc * pixscale).to(u.arcsec)

    # pl.figure(3).clf()
    # bmradmean = ndimage.mean((fullbeam.array/fullbeam.array.max())**2, labels=rbin, index=np.arange(100))
    # pl.plot(radial_mean)
    # pl.plot(bmradmean)
    # pl.figure(1)

    if show_image:
        import pylab as pl
        #pl.clf()

        # this finds the second peak
        # (useful for display)
        outside_first_peak_mask = (rr > first_min_ind) & (fullbeam.array <
                                                          1e-5)
        first_sidelobe_ind = scipy.signal.find_peaks(
            radial_mean * (np.arange(len(radial_mean)) > first_min_ind))[0][0]
        max_sidelobe = cutout[outside_first_peak_mask].max()
        max_sidelobe_loc = cutout[outside_first_peak_mask].argmax()
        r_max_sidelobe = rr[outside_first_peak_mask][max_sidelobe_loc]
        #r_max_sidelobe = first_sidelobe_ind

        # decide how big to make the plot
        if r_max_sidelobe * pixscale < min_radial_extent:
            radial_extent = (min_radial_extent / pixscale).decompose().value
        else:
            radial_extent = r_max_sidelobe
        if radial_extent * pixscale > max_radial_extent:
            radial_extent = (max_radial_extent / pixscale).decompose().value

        log.info(f"radial extent = {radial_extent},  "
                 f"r_max_sidelobe = {r_max_sidelobe}, "
                 "********" if r_max_sidelobe > radial_extent else ""
                 f"first_sidelobe_ind={first_sidelobe_ind}, "
                 f"first_min_ind = {first_min_ind}")

        bm2 = cube.beam.as_kernel(
            pixscale,
            x_size=radial_extent.astype('int') * 2 + 1,
            y_size=radial_extent.astype('int') * 2 + 1,
        )
        view = (slice(cy - radial_extent.astype('int'),
                      cy + radial_extent.astype('int') + 1),
                slice(cx - radial_extent.astype('int'),
                      cx + radial_extent.astype('int') + 1))
        bmfit_residual2 = cutout[view].value - bm2.array / bm2.array.max()

        #extent = np.array([-first_min_ind, first_min_ind, -first_min_ind, first_min_ind])*pixscale.to(u.arcsec).value
        extent = np.array([
            -radial_extent, radial_extent, -radial_extent, radial_extent
        ]) * pixscale.to(u.arcsec).value
        pl.imshow(bmfit_residual2,
                  origin='lower',
                  interpolation='nearest',
                  extent=extent,
                  cmap='gray_r')
        cb = pl.colorbar()
        pl.matplotlib.colorbar.ColorbarBase.add_lines(self=cb,
                                                      levels=[max_sidelobe],
                                                      colors=[(0.1, 0.7, 0.1,
                                                               0.9)],
                                                      linewidths=1)
        pl.contour(bm2.array / bm2.array.max(),
                   levels=[0.1, 0.5, 0.9],
                   colors=['r'] * 3,
                   extent=extent)
        pl.contour(rr[view],
                   levels=[first_min_ind, r_max_sidelobe],
                   linestyles=['--', ':'],
                   colors=[(0.2, 0.2, 1, 0.5), (0.1, 0.7, 0.1, 0.5)],
                   extent=extent)
        pl.xlabel("RA Offset [arcsec]")
        pl.ylabel("Dec Offset [arcsec]")

    return (residual_peak, peakloc_as.value,
            psf_residual_integral / psf_integral_firstpeak)
Ejemplo n.º 38
0
def imstats(fn, reg=None):
    try:
        fh = fits.open(fn)
        data = fh[0].data
        ww = wcs.WCS(fh[0].header)
    except IsADirectoryError:
        cube = SpectralCube.read(fn, format='casa_image')
        data = cube[0].value
        ww = cube.wcs

    mad = mad_std(data, ignore_nan=True)
    peak = np.nanmax(data)
    imsum = np.nansum(data)
    sumgt5sig = np.nansum(data[data > 5 * mad])
    sumgt3sig = np.nansum(data[data > 3 * mad])

    pixscale = wcs.utils.proj_plane_pixel_area(ww) * u.deg**2

    with warnings.catch_warnings():
        warnings.filterwarnings('ignore', category=UserWarning, append=True)
        warnings.filterwarnings('ignore', category=RuntimeWarning, append=True)

        if 'cube' in locals():
            try:
                bm = cube.beam
                ppbeam = (bm.sr / pixscale).decompose()
                assert ppbeam.unit.is_equivalent(u.dimensionless_unscaled)
                ppbeam = ppbeam.value
            except NoBeamError:
                ppbeam = np.nan
                bm = Beam(np.nan)
        else:
            try:
                bm = Beam.from_fits_header(fh[0].header)
                ppbeam = (bm.sr / pixscale).decompose()
                assert ppbeam.unit.is_equivalent(u.dimensionless_unscaled)
                ppbeam = ppbeam.value
            except NoBeamException:
                ppbeam = np.nan
                bm = Beam(np.nan)

        meta = {
            'beam': bm.to_header_keywords(),
            'bmaj': bm.major.to(u.arcsec).value,
            'bmin': bm.minor.to(u.arcsec).value,
            'bpa': bm.pa.value,
            'mad': mad,
            'peak': peak,
            'peak/mad': peak / mad,
            'ppbeam': ppbeam,
            'sum': imsum,
            'fluxsum': imsum / ppbeam,
            'sumgt5sig': sumgt5sig,
            'sumgt3sig': sumgt3sig,
        }

    if reg is not None:
        reglist = regions.read_ds9(reg)
        data = data.squeeze()
        composite_region = reduce(operator.or_, reglist)
        if hasattr(composite_region, 'to_mask'):
            msk = composite_region.to_mask()
        else:
            preg = composite_region.to_pixel(ww.celestial)
            msk = preg.to_mask()
        cutout_pixels = msk.cutout(data)[msk.data.astype('bool')]

        meta['mad_sample'] = mad_std(cutout_pixels, ignore_nan=True)
        meta['std_sample'] = np.nanstd(cutout_pixels)

    if fn.endswith('.image.tt0') or fn.endswith(
            '.image.tt0.fits') or fn.endswith(
                '.image.tt0.pbcor.fits') or fn.endswith('.image.tt0.pbcor'):
        psf_fn = fn.split(".image.tt0")[0] + ".psf.tt0"
    elif fn.endswith('.model.tt0') or fn.endswith(
            '.model.tt0.fits') or fn.endswith(
                '.model.tt0.pbcor.fits') or fn.endswith('.model.tt0.pbcor'):
        psf_fn = fn.split(".model.tt0")[0] + ".psf.tt0"
    elif fn.endswith('.image') or fn.endswith('.image.fits') or fn.endswith(
            '.image.pbcor.fits') or fn.endswith('.image.pbcor'):
        psf_fn = fn.split(".image") + ".psf"
    else:
        raise IOError("Wrong image type passed to imstats: {fn}".format(fn=fn))

    if os.path.exists(psf_fn):
        psf_secondpeak, psf_secondpeak_loc, psf_sidelobe1_fraction = get_psf_secondpeak(
            psf_fn)
        meta['psf_secondpeak'] = psf_secondpeak
        meta['psf_secondpeak_radius'] = psf_secondpeak_loc
        meta['psf_secondpeak_sidelobefraction'] = psf_sidelobe1_fraction
    else:
        meta['psf_secondpeak'] = np.nan
        meta['psf_secondpeak_radius'] = np.nan
        meta['psf_secondpeak_sidelobefraction'] = np.nan

    return meta
Ejemplo n.º 39
0
'''
Save a version of the 14B-088 HI cube centered on the NOEMA cube.
'''

from spectral_cube import SpectralCube
import astropy.units as u

from paths import (noema_co21_file_dict, hi_14B088_data_path,
                   hi_17B162_1kms_data_path)

co_cube = SpectralCube.read(noema_co21_file_dict['Cube'])

hi_cube = SpectralCube.read(
    hi_14B088_data_path(
        "M33_14B-088_HI.clean.image.GBT_feathered.pbcov_gt_0.5_masked.fits"))

# Pad by one beam width
pad_size = (20 * u.arcsec).to(u.deg)

hi_subcube = hi_cube.subcube(xlo=co_cube.longitude_extrema[0] - pad_size,
                             xhi=co_cube.longitude_extrema[1] + pad_size,
                             ylo=co_cube.latitude_extrema[0] - pad_size,
                             yhi=co_cube.latitude_extrema[1] + pad_size,
                             zhi=-180 * u.km / u.s,
                             zlo='min')

hi_subcube = hi_subcube.to(u.K)

hi_subcube.write(
    hi_14B088_data_path(
        "M33_14B-088_HI.clean.image.GBT_feathered.pbcov_gt_0.5_masked_noemaco21_slice.fits",
Ejemplo n.º 40
0
from astropy.utils import data
from spectral_cube import SpectralCube
from astroquery.esasky import ESASky
from astroquery.utils import TableList
from astropy.wcs import WCS
from reproject import reproject_interp
data.conf.remote_timeout = 60

#画图要用
myfont=mpl.font_manager.FontProperties(fname='C:\Windows\Fonts\simsun.ttc', size=30) #标签字体
mpl.rcParams.update({'font.size': 20}) # 改变刻度所有字体大小,改变其他性质类似
mpl.rcParams['xtick.direction'] = 'in'
mpl.rcParams['ytick.direction'] = 'in'

co_data = fits.open('1340+005U.fits')                            # Open the FITS file for reading
cube = SpectralCube.read(co_data)                                # Initiate a SpectralCube
co_data.close()                                                  # Close the FITS file - we already read it in and don't need it anymore!
#print(cube)                                                      # 查看cube头部信息
#cube[9001, :, :].quicklook(); plt.show()                                      # Slice the cube along the spectral axis, and display a quick image
#cube[:, 46, 46].quicklook(); plt.show()                                      # Extract a single spectrum through the data cube
##or plt.plt(cube[:, 46, 46])

lon_range = [133.75, 134.25] * u.deg                                   # Define desired latitude and longitude range
lat_range = [0.25, 0.75] * u.deg
sub_cube = cube.subcube(xlo=lon_range[0], xhi=lon_range[1], ylo=lat_range[0], yhi=lat_range[1])          # Create a sub_cube cut to these coordinates
sub_cube_slab = sub_cube.spectral_slab(-100. *u.km / u.s, 100. *u.km / u.s)                              # Cut along the Spectral Axis
sub_cube_slab[:,31,31].quicklook(); #plt.show() #为什么第一次画出错?后面又好了?
sub_cube_slab[:,31,31].quicklook()
plt.xticks(range(-100000,100000,20000),range(-100,100,20))
plt.xlabel('V(m/s)')
plt.ylabel('T(K)')
Ejemplo n.º 41
0
from spectral_cube import SpectralCube
from astropy import units as u


for line, freq in (('SiO',217.10498*u.GHz),
                   ('HC3N', 218.32472*u.GHz),
                   ('H2CO303', 218.22219*u.GHz),
                  ):

    northcube = SpectralCube.read('/Volumes/passport/alma/w51/longbaseline/W51northcax.SPW0_ALL_medsub_cutout.fits')
    northvcube = northcube.with_spectral_unit(u.km/u.s, rest_value=freq,
                                              velocity_convention='radio')

    northslab = northvcube.spectral_slab(-100*u.km/u.s, 210*u.km/u.s)
    northmed = northslab.median(axis=0)
    northmslab = northslab-northmed

    northsioblue = northmslab.spectral_slab(-32*u.km/u.s, 55*u.km/u.s).moment0()
    northsioblue.write('/Users/adam/work/w51/alma/FITS/longbaseline/{line}_m32to55kms_north.fits'.format(line=line), overwrite=True)

    northsiored = northmslab.spectral_slab(74*u.km/u.s, 118*u.km/u.s).moment0()
    northsiored.write('/Users/adam/work/w51/alma/FITS/longbaseline/{line}_74to118kms_north.fits'.format(line=line), overwrite=True)




    e2cube = SpectralCube.read('/Volumes/passport/alma/w51/longbaseline/W51e2cax.SPW0_ALL_medsub_cutout.fits')
    e2vcube = e2cube.with_spectral_unit(u.km/u.s, rest_value=freq,
                                        velocity_convention='radio')

    e2slab = e2vcube.spectral_slab(-100*u.km/u.s, 210*u.km/u.s)
Ejemplo n.º 42
0
import os
from hf_only_model import hfonly_66_fixed_fitter, hfonly_fitter, sixsix_movinghf_fitter
from spectral_cube import SpectralCube
from astropy import units as u
import numpy as np
import pyspeckit

pyspeckit.fitters.default_Registry.add_fitter('hfonly', hfonly_fitter(), 7)
pyspeckit.fitters.default_Registry.add_fitter('hfonly66',
                                              hfonly_66_fixed_fitter(), 4)
pyspeckit.fitters.default_Registry.add_fitter('sixsix_movinghf',
                                              sixsix_movinghf_fitter(), 5)

cube = SpectralCube.read(
    '/Volumes/passport/W51-GODDI/W51e2_66_baselined-sc-pb.cube.image.fits')
scube = cube[:, 230:307, 205:270]
errmap = scube.spectral_slab(-20 * u.km / u.s, 10 * u.km / u.s).std(axis=0)
mn = scube.min(axis=0).value

guesses = np.empty((7, scube.shape[1], scube.shape[2]))
guesses[0, :, :] = 58
guesses[1, :, :] = 26.9
guesses[2, :, :] = 0.010
guesses[3, :, :] = 1.5
guesses[4, :, :] = 31.4
guesses[5, :, :] = 0.010
guesses[6, :, :] = 1.5

negmask = mn < -0.005
guesses[2, negmask] = -0.1
guesses[5, negmask] = -0.1
Ejemplo n.º 43
0
import glob
from spectral_cube import SpectralCube
from spectral_cube.lower_dimensional_structures import Projection

for fn in glob.glob("*.image.pbcor.fits"):
    print(fn)
    #if fits.getheader(fn)['NAXIS'] <= 2:
    #    print("Skipped {0} because it wasn't a cube".format(fn))
    #    continue
    #if os.path.exists('collapse/argmax/{0}'.format(fn.replace(".image.pbcor.fits","_vmax.fits"))):
    #    print("Skipped {0} because it is done".format(fn))
    #    continue

    modfile = fn.replace(".image.pbcor", ".model")
    if os.path.exists(modfile):
        modcube = SpectralCube.read(modfile)
        modcube.beam_threshold = 100000

    cube = SpectralCube.read(fn)
    cube.beam_threshold = 1
    #cube.allow_huge_operations = True
    mcube = cube.mask_out_bad_beams(0.1)
    mcube.beam_threshold = 1

    stdspec = mcube.mad_std(axis=(1, 2), how='slice')
    stdspec.write("collapse/stdspec/{0}".format(
        fn.replace(".image.pbcor.fits", "_std_spec.fits")),
                  overwrite=True)
    stdspec.quicklook("collapse/stdspec/pngs/{0}".format(
        fn.replace(".image.pbcor.fits", "_std_spec.png")))
    'SgrB2_12m_spw1_lines.fits',
    'SgrB2_12m_spw2_lines.fits',
    'SgrB2_12m_spw3_lines.fits',
]

regions = (pyregion.open(rpath('ionizationfront_circle.reg')) +
           pyregion.open(rpath('extraction_regions_n_and_m.reg')) +
           pyregion.open(rpath('ch3cn_large_cores.reg')))

for cubename in mergecubes:
    for reg in regions:
        name = reg.attr[1]['text']
        fname = name.replace(" ", "_").lower()
        reg = pyregion.ShapeList([reg])

        suffix = os.path.splitext(cubename)[0]
        if os.path.exists(spath("{1}_{0}.fits".format(suffix, fname))):
            continue

        cube = SpectralCube.read(dpath(cubename))
        print(cube)
        log.info(name)
        log.info(fname)
        scube = cube.subcube_from_ds9region(reg)
        print(scube)
        spectrum = scube.mean(axis=(1, 2))

        spectrum.hdu.writeto(spath("{1}_{0}.fits".format(suffix, fname)),
                             clobber=True)
        print(spath("{1}_{0}.fits".format(suffix, fname)))
def run_scousepy():
    # Input values for core SCOUSE stages
    # (just put it in the directory, it will get ignored by git)
    datadirectory = '.'
    # The data cube to be analysed
    filename = 'CMZ_3mm_HNCO_60'
    # Fits extension
    fitsfile = os.path.join(datadirectory, filename + '.fits')
    # The range in velocity, x, and y over which to fit
    ppv_vol = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0]  # NOTE: not used?
    # Radius for the spectral averaging areas. Map units.
    rsaa = [2.0, 5.0, 8.0]
    # Enter an approximate rms value for the data.
    rms_approx = 0.05
    # Threshold below which all channel values set to 0.0
    sigma_cut = 3.0

    cube = SpectralCube.read(fitsfile).with_spectral_unit(u.km / u.s)

    momzero = cube.with_mask(
        cube > u.Quantity(rms_approx * sigma_cut, cube.unit)).moment0(
            axis=0).value

    # get the coverage / average the subcube spectra
    coverage_coordinates, saa_spectra = [], []
    for r in rsaa:
        cc, ss = stage_1.define_coverage(cube, momzero, r)
        coverage_coordinates.append(cc)
        saa_spectra.append(ss)

    # write fits files for all the averaged spectra
    stage_1.write_averaged_spectra(cube.header, saa_spectra, rsaa)

    # plot multiple coverage areas
    stage_1.plot_rsaa(coverage_coordinates, momzero, rsaa)

    # TODO: PARALLELISE MULTICUBE!!! (after broadcasting MemoryError's are caught)
    npeaks = 1
    npeaks2finesse = {1: [20, 10, 10]}
    multicube_kwargs = dict(
        fits_flist=['saa_cube_r{}.fits'.format(r) for r in rsaa],
        fittype="gaussian",
        # [amlitude_range, velocity_range, sigma_range]
        priors=[[0, 2], [-110, 110], [10, 50]],
        finesse=npeaks2finesse[npeaks],
        npeaks=npeaks,  # priors and finesse can be expanded if need be
        npars=3,
        clip_edges=False,
        model_grid=None,  # we can directly pass an array of spectral models
        # to avoid regenerating the spectral models (`redo=True` forces it anyway)
        model_file="model_grid_x{}.npy".format(npeaks),
        redo=False,
        data_dir=".")

    # remove the spectral model file
    try:
        os.remove(multicube_kwargs["model_file"])
    except FileNotFoundError:
        pass

    spc_list = stage_2.best_guesses_saa(**multicube_kwargs)

    # inspect the guesses suggested:
    for spc in spc_list:
        spc.parcube = spc.best_guesses
        # HACK to allow the guess inspection (no errors on guesses):
        spc.errcube = np.full_like(spc.parcube, 0.1)
        # uuuuh not sure why this is needed
        spc.specfit.fitter._make_parinfo(npeaks=multicube_kwargs['npeaks'])
        spc.specfit.parinfo = spc.specfit.fitter.parinfo

        spc.mapplot()
from spectral_cube import SpectralCube
from astropy import units as u

for line, freq in (
    ('SiO', 217.10498 * u.GHz),
    ('HC3N', 218.32472 * u.GHz),
    ('H2CO303', 218.22219 * u.GHz),
):

    e8cube = SpectralCube.read(
        '/Volumes/passport/alma/w51/longbaseline/W51e8cax.SPW0_ALL_medsub_cutout.fits'
    )
    e8vcube = e8cube.with_spectral_unit(u.km / u.s,
                                        rest_value=freq,
                                        velocity_convention='radio')

    e8slab = e8vcube.spectral_slab(-100 * u.km / u.s, 210 * u.km / u.s)

    e8slab.write(
        '/Volumes/passport/alma/w51/longbaseline/W51e8cax.{0}cutout.fits'.
        format(line))
Ejemplo n.º 47
0
def get_source_spectra(cubefile, varfile, objects, outdir = "spectra/", marzfile = None, tovac = True):
    """
    Extract spectra of sources found using SExtractor
    from datacube.
    Args:
        cubefile (str): A datacube fits file
        varfile (str): Variance datacube fits file
        objects (Table): Table of extracted objects produced
            by sep.extract
        outdir (str, optional): directory to store spectra
        marzfile (str, optional): name of MARZ file to dump
            all spectra into. File creation is skipped if
            a name is not supplied.
        tovac (bool, optional): Covert wavelengths to vacuum.
    Returns:
        speclist (ndarray): A 2D array of with an extracted
            spectrum in each row.
        varspeclist (ndarray): Similarly designed array with
            variance information.
        wave (1D Quantity array): Wavelength array.
    """

    # Preliminaries
    nobjs = len(objects)
    cube = SpectralCube.read(cubefile)
    varcube = SpectralCube.read(varfile)

    wave = cube.spectral_axis.value

    # Convert to vacuum wavelengths?
    if tovac:
        wave = _air_to_vac(wave)
    # Prepare an HDU in advance
    wavehdu = fits.ImageHDU(wave)
    wavehdu.header.set('extname', 'WAVELENGTH')

    # Initialize output lists 
    speclist = np.zeros([nobjs, len(wave)])
    varspeclist = np.zeros_like(speclist)

    # Create output folder?
    if not os.path.isdir(outdir):
        os.mkdir(outdir)
    for idx, obj in enumerate(objects):
        spec, varspec = spec_from_ellipse(cube, varcube,
                                          obj['x'], obj['y'],
                                          obj['a'], obj['b'],
                                          obj['theta'], r = 2)

        # Produce spectrum fits file
        spechdu = fits.PrimaryHDU(spec.data, header=spec.header)
        spechdu.header.set('extname', 'SPEC')
        varhdu = fits.ImageHDU(varspec.data, header=varspec.header)
        varhdu.header.set('extname', 'VAR')
        hdulist = fits.HDUList([spechdu, varhdu, wavehdu])
        specfile_name = outdir+str(idx)+"_spec1d.fits"
        hdulist.writeto(specfile_name, overwrite=True)

        # Append spectrum to list
        speclist[idx] = spec.data
        varspeclist[idx] = varspec.data

    if marzfile:
        _make_marz(cube, speclist, varspeclist, objects, outdir+marzfile, tovac=tovac)
    return speclist, varspeclist, wave
Ejemplo n.º 48
0
def get_img(cubefile, wlow = None, whigh = None, 
            trans_curve = None, how = "cube",
            bkgsub = False, save = None,
            overwrite = False, **bkgsubkw):
    """
    Flatten cube along wavelength and produce a 2D
    image.
    Args:
        cubefile (str): Path to the datacube
        wlow, whigh (Quantity, optional): wavelength 
            limits (with astropy units) to flatten between.
            If nothing is given, the cube is checked for the
            WAVGOOD keywords and flattened between
            them. If they don't exist, it's flattened fully.
        filter (function, optional): transmission
            curve as a function of wavelength. Should be able
            to take vector inputs and produce vector outputs.
            We recommend passing a function produced by scipy
            interpolation method. Wavelength is assumed to
            be in angstroms.
        how (str, optional): "cube", "slice" or "ray". How do
            you want to load the cube to memory?
            "cube" loads the whole thing for summing. "slice"
            and "ray" do it slicewise or spectral-ray-wise.
        bkgsub (bool, optional): Subtract background continuum?
        **bkgsubkw: Keyword args to be passed to sep.Background
            for background estimation.
        save (str, optional): Path to file to be
            saved to.
        overwrite (bool, optional): Overwrite existing
            file?
    Returns:
        img (Spectral Cube Projection): Flattened 2D image
    """
    assert how in ["cube", "slice", "ray"], "Invalid summing method. Choose one of 'cube', 'slice' and 'ray'."
    # Read in datacube
    cube = SpectralCube.read(cubefile)

    # Create a truncated cube based on wlow and whigh
    if not wlow:
        try:
            wlow = cube.header['WAVGOOD0']*cube.spectral_axis.unit
        except KeyError:
            wlow = cube.spectral_extrema[0]
    
    if not whigh:
        try:
            whigh = cube.header['WAVGOOD1']*cube.spectral_axis.unit
        except KeyError:
            whigh = cube.spectral_extrema[1]
    
    goodcube = cube.spectral_slab(wlow, whigh)

    # Do you want to use a filter?
    if trans_curve:
        # Compute transmission curve for cube wavelengths
        trans = trans_curve(goodcube.spectral_axis.value)

        # Create a 3D array of trans stacked in the same
        # shape as the cube spatial dimensions.
        # TODO: make this more elegant.
        tt = _spectral_tile(trans, cube)
        goodcube = goodcube*tt
    
    # Make image
    img = goodcube.sum(axis = 0, how = how)
    if bkgsub:
        bkg = sep.Background(img.value, **bkgsubkw)
        img = img - bkg*img.unit
    if save:
        img.write(save, overwrite = overwrite)
    
    return img
Ejemplo n.º 49
0
def extract_pv_slice(cube,
                     path,
                     wcs=None,
                     spacing=1.0,
                     order=3,
                     respect_nan=True):
    """
    Given a position-position-velocity cube with dimensions (nv, ny, nx), and
    a path, extract a position-velocity slice.

    Alternative implementations:
        gipsy::sliceview
        karma::kpvslice
        casaviewer::slice

    Parameters
    ----------
    cube : :class:`~numpy.ndarray` or :class:`~spectral_cube.SpectralCube` or str or HDU
        The cube to extract a slice from. If this is a plain
        :class:`~numpy.ndarray` instance, the WCS information can optionally
        be specified with the ``wcs`` parameter. If a string, it should be
        the name of a file containing a spectral cube.
    path : `Path` or list of 2-tuples
        The path along which to define the position-velocity slice. The path
        can contain coordinates defined in pixel or world coordinates.
    wcs : :class:`~astropy.wcs.WCS`, optional
        The WCS information to use for the cube. This should only be
        specified if the ``cube`` parameter is a plain
        :class:`~numpy.ndarray` instance.
    spacing : float
        The position resolution in the final position-velocity slice. This
        can be given in pixel coordinates or as a
        :class:`~astropy.units.Quantity` instance with angle units.
    order : int, optional
        Spline interpolation order when using paths with zero width. Does not
        have any effect for paths with a non-zero width.
    respect_nan : bool, optional
        If set to `False`, NaN values are changed to zero before computing
        the slices. If set to `True`, in the case of line paths a second
        computation is performed to ignore the NaN value while interpolating,
        and set the output values of NaNs to NaN.

    Returns
    -------
    slice : `PrimaryHDU`
        The position-velocity slice, as a FITS HDU object
    """

    if isinstance(cube, (six.string_types, ImageHDU, PrimaryHDU)):
        try:
            from spectral_cube import SpectralCube
            cube = SpectralCube.read(cube)
        except ImportError:
            raise ImportError("spectral_cube package required for working "
                              "with fits data. Install spectral_cube or "
                              "use NumPy arrays")

    if _is_spectral_cube(cube):
        wcs = cube.wcs
        # The fits HEADER will preserve the UNIT, but pvextractor does not care
        # what the flux units are
        cube = cube.filled_data[...].value

    if wcs is not None:
        wcs = sanitize_wcs(wcs)

    if not isinstance(cube, np.ndarray) or wcs is not None:
        scale = get_spatial_scale(wcs)
        if isinstance(spacing, u.Quantity):
            pixel_spacing = (spacing / scale).decompose()
            world_spacing = spacing
        else:
            pixel_spacing = spacing
            world_spacing = spacing * scale
    else:
        if isinstance(spacing, u.Quantity):
            raise TypeError(
                "No WCS has been specified, so spacing should be given in pixels"
            )
        else:
            pixel_spacing = spacing
            world_spacing = None

    # Allow path to be passed in as list of 2-tuples
    if not isinstance(path, paths.Path):
        path = paths.Path(path)

    pv_slice = extract_slice(cube,
                             path,
                             wcs=wcs,
                             spacing=pixel_spacing,
                             order=order,
                             respect_nan=respect_nan)

    # Generate output header
    if wcs is None:
        header = Header()
    else:
        header = slice_wcs(wcs, spatial_scale=world_spacing).to_header()

    # TODO: write path to BinTableHDU

    return PrimaryHDU(data=pv_slice, header=header)
Ejemplo n.º 50
0
    peak_velocity = 115 * u.km / u.s
    # assume ~2 beams...
    distance = (0.07 * u.arcsec * dw51).to(u.km, u.dimensionless_angles())

    timescale = distance / peak_velocity

    print("north Mass rate = {0:0.3g} to {1:0.3g}".format(
        (nsio / cosmic_si_abundance * beam_area * u.Da / 0.739).to(u.M_sun) /
        timescale.to(u.yr),
        (nsio / xsio * beam_area * 2.8 * u.Da).to(u.M_sun) /
        timescale.to(u.yr)))

    north_ds = paths.dpath('longbaseline/W51north_siocube_downsampled.fits')
    if os.path.exists(north_ds):
        sm_sio_cube_north = sm_sio_cube = SpectralCube.read(north_ds)
    else:
        siocube = (SpectralCube.read(
            paths.dpath(
                'longbaseline/linked/W51northcax.SPW0_ALL_medsub_cutout.fits')
        ).with_spectral_unit(u.km / u.s,
                             rest_value=ref_freq,
                             velocity_convention='radio').spectral_slab(
                                 -140 * u.km / u.s, 260 * u.km / u.s))
        fwhm_factor = np.sqrt(8 * np.log(2))
        hanning_factor = 1129 / 977
        current_resolution = np.mean(np.diff(
            siocube.spectral_axis)) * hanning_factor
        target_resolution = 10.0 * u.km / u.s
        pixel_scale = current_resolution
        gaussian_width = ((target_resolution**2 - current_resolution**2)**0.5 /
Ejemplo n.º 51
0
import pyspeckit
from spectral_cube import SpectralCube
from astropy import units as u
import pylab as pl

cube11 = SpectralCube.read(filename='unpb_mosaic_11_trim.fits', allow_huge_operations=True)
cube11.allow_huge_operations=True
cube11 = cube11.to(u.K)
cube11 = cube11.with_spectral_unit(u.km/u.s, velocity_convention='radio')

n11cube = pyspeckit.Cube(cube = cube11)

cube22 = SpectralCube.read('unpb_mosaic_22_trim.fits', allow_huge_operations=True)
cube22.allow_huge_operations=True
cube22 = cube22.to(u.K)
cube22 = cube22.with_spectral_unit(u.km/u.s, velocity_convention='radio')

n22cube = pyspeckit.Cube(cube = cube22)

cube44 = SpectralCube.read('unpb_mosaic_44_trim.fits', allow_huge_operations=True)
cube44.allow_huge_operations=True
cube44 = cube44.to(u.K)
cube44 = cube44.with_spectral_unit(u.km/u.s, velocity_convention='radio')

n44cube = pyspeckit.Cube(cube = cube44)

cube55 = SpectralCube.read('unpb_mosaic_55_trim.fits', allow_huge_operations=True)
cube55.allow_huge_operations=True
cube55 = cube55.to(u.K)
cube55 = cube55.with_spectral_unit(u.km/u.s, velocity_convention='radio')
Ejemplo n.º 52
0
def cleansplit(filename,
               galaxy=None,
               Vwindow=650 * u.km / u.s,
               Vgalaxy=300 * u.km / u.s,
               blorder=3,
               HanningLoops=0,
               maskfile=None,
               circleMask=True,
               edgeMask=False,
               weightCut=0.2,
               spectralSetup=None,
               spatialSmooth=1.0):
    """
    Takes a raw DEGAS cube and produces individual cubes for each
    spectral line.
    
    Paramters
    ---------
    filename : str
        The file to split.
    
    Keywords
    --------
    galaxy : Galaxy object
        Currently unused
    Vwindow : astropy.Quantity
        Width of the window in velocity units
    Vgalaxy : astropy.Quantity
        Line of sight velocity of the galaxy centre
    blorder : int
        Baseline order
    HanningLoops : int
        Number of times to smooth and resample the data
    edgeMask : bool
        Determine whether to apply an edgeMask
    weightCut : float
        Minimum weight value to include in the data
    spatialSmooth : float
        Factor to increase the (linear) beam size by in a convolution.
    spectralSetup : str
        String to determine how we set up the spectrum
        'hcn_hcop' -- split based on HCN/HCO+ setup
        '13co_c18o' -- split based on 13CO/C18O setup
        '12co' -- don't split; assume single line
    """

    Cube = SpectralCube.read(filename)
    CatalogFile = get_pkg_data_filename('./data/dense_survey.cat',
                                        package='degas')
    Catalog = Table.read(CatalogFile, format='ascii')

    # Find which galaxy in our catalog corresponds to the object we
    # are mapping
    if galaxy is None:
        RABound, DecBound = Cube.world_extrema
        match = np.zeros_like(Catalog, dtype=np.bool)
        for index, row in enumerate(Catalog):
            galcoord = SkyCoord(row['RA'],
                                row['DEC'],
                                unit=(u.hourangle, u.deg))
            if (galcoord.ra < RABound[1] and galcoord.ra > RABound[0]
                    and galcoord.dec < DecBound[1]
                    and galcoord.dec > DecBound[0]):
                match[index] = True
        MatchRow = Catalog[match]
        galcoord = SkyCoord(MatchRow['RA'],
                            MatchRow['DEC'],
                            unit=(u.hourangle, u.deg))
        Galaxy = MatchRow['NAME'].data[0]
        print("Catalog Match with " + Galaxy)
        V0 = MatchRow['CATVEL'].data[0] * u.km / u.s

    # Check spectral setups.  Use the max frequencies present to
    # determine which spectral setup we used if not specifed.
    if spectralSetup is None:
        if (Cube.spectral_axis.max() > 105 * u.GHz
                and Cube.spectral_axis.max() < 113 * u.GHz):
            warnings.warn("assuming 13CO/C18O spectral setup")
            spectralSetup = '13CO_C18O'
            filestr = '13co_c18o'
        if (Cube.spectral_axis.max() > 82 * u.GHz
                and Cube.spectral_axis.max() < 90 * u.GHz):
            warnings.warn("assuming HCN/HCO+ spectral setup")
            spectralSetup = 'HCN_HCO+'
            filestr = 'hcn_hcop'
        if (Cube.spectral_axis.max() > 113 * u.GHz):
            warnings.warn("assuming 12CO spectral setup")
            spectralSetup = '12CO'
            filestr = '12co'

    if spectralSetup == '13CO_C18O':
        CEighteenO = Cube.with_spectral_unit(u.km / u.s,
                                             velocity_convention='radio',
                                             rest_value=109.78217 * u.GHz)
        ThirteenCO = Cube.with_spectral_unit(u.km / u.s,
                                             velocity_convention='radio',
                                             rest_value=110.20135 * u.GHz)
        CubeList = (CEighteenO, ThirteenCO)
        LineList = ('C18O', '13CO')

    elif spectralSetup == 'HCN_HCO+':
        HCN = Cube.with_spectral_unit(u.km / u.s,
                                      velocity_convention='radio',
                                      rest_value=88.631847 * u.GHz)
        HCOp = Cube.with_spectral_unit(u.km / u.s,
                                       velocity_convention='radio',
                                       rest_value=89.188518 * u.GHz)
        CubeList = (HCN, HCOp)
        LineList = ('HCN', 'HCOp')

    elif spectralSetup == '12CO':
        TwelveCO = Cube.with_spectral_unit(u.km / u.s,
                                           velocity_convention='radio',
                                           rest_value=115.27120180 * u.GHz)
        CubeList = (TwelveCO, )
        LineList = ('12CO', )

    for ThisCube, ThisLine in zip(CubeList, LineList):
        if circleMask:
            x0, y0, _ = ThisCube.wcs.wcs_world2pix(galcoord.ra, galcoord.dec,
                                                   0, 0)
            ThisCube = circletrim(ThisCube,
                                  filename.replace('.fits', '_wts.fits'),
                                  x0,
                                  y0,
                                  weightCut=weightCut)
        if edgeMask:
            ThisCube = edgetrim(ThisCube,
                                filename.replace('.fits', '_wts.fits'),
                                weightCut=weightCut)

        # Trim each cube to the specified velocity range
        ThisCube = ThisCube.spectral_slab(V0 - Vwindow, V0 + Vwindow)
        ThisCube.write(Galaxy + '_' + ThisLine + '.fits', overwrite=True)
        StartChan = ThisCube.closest_spectral_channel(V0 - Vgalaxy)
        EndChan = ThisCube.closest_spectral_channel(V0 + Vgalaxy)

        if maskfile is not None:
            maskLookup = buildMaskLookup(maskfile)
            shp = ThisCube.shape
            TmpCube = ThisCube.with_spectral_unit(u.Hz)
            spaxis = TmpCube.spectral_axis
            spaxis = spaxis.value
            data = ThisCube.filled_data[:].value
            for y in np.arange(shp[1]):
                for x in np.arange(shp[2]):
                    spectrum = data[:, y, x]
                    if np.any(np.isnan(spectrum)):
                        continue
                    coords = ThisCube.world[:, y, x]
                    mask = maskLookup(coords[2].value, coords[1].value, spaxis)
                    spectrum = robustBaseline(spectrum,
                                              blorder=blorder,
                                              baselineIndex=~mask)
                    data[:, y, x] = spectrum
            ThisCube = SpectralCube(data * ThisCube.unit,
                                    ThisCube.wcs,
                                    header=ThisCube.header,
                                    meta={'BUNIT': ThisCube.header['BUNIT']})
            ThisCube.write(Galaxy + '_' + ThisLine +
                           '_rebase{0}.fits'.format(blorder),
                           overwrite=True)
        else:
            gbtpipe.Baseline.rebaseline(Galaxy + '_' + ThisLine + '.fits',
                                        baselineRegion=[
                                            slice(0, StartChan, 1),
                                            slice(EndChan, ThisCube.shape[0],
                                                  1)
                                        ],
                                        blorder=blorder)
        ThisCube = SpectralCube.read(Galaxy + '_' + ThisLine +
                                     '_rebase{0}'.format(blorder) + '.fits')
        # Smooth
        Kern = Kernel1D(array=np.array([0.5, 1.0, 0.5]))
        for i in range(HanningLoops):
            ThisCube.spectral_smooth(Kern)
            ThisCube = ThisCube[::2, :, :]

        # Spatial Smooth
        if spatialSmooth > 1.0:
            newBeam = Beam(major=ThisCube.beam.major * spatialSmooth,
                           minor=ThisCube.beam.minor * spatialSmooth)
            ThisCube.convolve_to(newBeam)
            smoothstr = '_smooth{0}'.format(spatialSmooth)
        else:
            smoothstr = ''

        # Final Writeout
        ThisCube.write(Galaxy + '_' + ThisLine + '_rebase{0}'.format(blorder) +
                       smoothstr + '_hanning{0}.fits'.format(HanningLoops),
                       overwrite=True)
def MakeRoundBeam(incube, outfile=None, overwrite=True):
    '''
    This takes a FITS file or a SpectralCube and outputs

    Parameters
    ----------
    filename : `string` or `SpectralCube`
       Input spectral cube

    Returns
    -------
    cube : `SpectralCube`

    '''
    if isinstance(incube, str):
        cube = SpectralCube.read(incube)

    if isinstance(incube, VaryingResolutionSpectralCube):
        cube = incube

    if not isinstance(cube, VaryingResolutionSpectralCube):
        warnings.warn("No information about multiple beams")
        return (None)

    beams = cube.beams
    major_axes = np.array([bm.major.to(u.deg).value for bm in beams])
    target_beamsize = np.array(major_axes.max())
    target_beam = Beam(major=target_beamsize * u.deg,
                       minor=target_beamsize * u.deg,
                       pa=0.0 * u.deg)
    print("Target beam is : {}".format(target_beam))

    # Let's assume square pixels
    pixsize = cube.wcs.pixel_scale_matrix[1, 1]
    fwhm2sigma = np.sqrt(8 * np.log(2))

    output = np.zeros(cube.shape)

    with console.ProgressBar(cube.shape[0]) as bar:

        for ii, plane in enumerate(cube.filled_data[:]):
            this_beam = beams[ii]
            conv_beam = target_beam - this_beam

            majpix = conv_beam.major.value / pixsize / fwhm2sigma
            minpix = conv_beam.minor.value / pixsize / fwhm2sigma

            output[ii, :, :] = ftconvolve(plane,
                                          major=majpix,
                                          minor=minpix,
                                          angle=conv_beam.pa.value)

            bar.update()

    hdr = copy.copy(cube.header)
    hdr['CASAMBM'] = False
    hdr['BMAJ'] = float(target_beam.major.value)
    hdr['BMIN'] = float(target_beam.major.value)
    hdr['BPA'] = 0.0
    outcube = SpectralCube(output, cube.wcs, header=hdr)
    if outfile:
        outcube.write(outfile, overwrite=overwrite)
        return None
    return (outcube)
Ejemplo n.º 54
0
# coding: utf-8
from spectral_cube import SpectralCube
from astropy.io import fits
import matplotlib.pyplot as plt
cube = SpectralCube.read('cube_69p2_0227_r3_b80_single_spectra.fits.gz')
cube
model_name = 'library/EMILES_BASTI_BASE_CH_FITS/Ech1.30Zm1.49T03.0000_iTp0.00_baseFe.fits'
hdul = fits.open(model_name)
hdul[0]
hdul[0].data
model_flux = hdul[0].data[:10000]
model_flux
cube_flux = cube[:, 40, 40]
cube_flux
cube_flux.shape
res = cube_flux / model_flux
res
plt.plot(res)
plt.show()
plt.plot(cube_flux)
plt.plot(model_flux * 64157.344)
plt.show()
def make_spw_cube(spw='spw{0}',
                  spwnum=0,
                  fntemplate='OrionSourceI',
                  overwrite_existing=False,
                  bmaj_limits=None,
                  fnsuffix="",
                  filesuffix='image.pbcor.fits',
                  first_endchannel='*',
                  cropends=False,
                  minimize=True,
                  debug_mode=False,
                  add_beam_info=True):
    """
    Parameters
    ----------
    spw : str
        String template for the input/output name
    spwnum : int
        The spectral window number
    fntemplate : str
        Filename template (goes into the glob)
    overwrite_existing : bool
        Overwrite data in the output cube?
    cropends: bool or int
        Number of pixels to crop off the ends of an image
    minimize: bool
        Compute the spatial minimal subcube before building the cube?  Slices
        for all subsequent cubes will be computed from the first cube.
    """
    if debug_mode:
        lvl = log.getEffectiveLevel()
        log.setLevel('DEBUG')

    spw = spw.format(spwnum)

    big_filename = '{1}_{0}{2}_lines.fits'.format(spw, fntemplate, fnsuffix)

    header_fn = glob.glob(
        'OrionSourceI.B3.{0}.lines0-{4}.clarkclean1000.{3}'.format(
            spw, fntemplate, fnsuffix, filesuffix, first_endchannel))
    if len(header_fn) != 1:
        raise ValueError(
            "Found too many or too few matches: {0}".format(header_fn))
    else:
        header_fn = header_fn[0]

    # First set up an empty file
    if not os.path.exists(big_filename):
        log.info("Creating large cube based on header {0}".format(header_fn))

        if minimize:
            cube0 = SpectralCube.read(header_fn)
            slices = cube0.subcube_slices_from_mask(cube0.mask,
                                                    spatial_only=True)
            # use the calculated 3rd dimension, plus the difference of the
            # x and y slices
            #header['NAXIS2'] = slices[1].stop-slices[1].start
            #header['NAXIS1'] = slices[2].stop-slices[2].start
            header = cube0[slices].header
        else:
            header = fits.getheader(header_fn)

        # Make an arbitrary, small data before prepping the header
        data = np.zeros((100, 100), dtype=np.float32)
        hdu = fits.PrimaryHDU(data=data, header=header)
        cdelt_sign = np.sign(hdu.header['CDELT3'])
        # Set the appropriate output size (this can be extracted from the LISTOBS)
        naxis3_in = header['NAXIS3']
        header['NAXIS3'] = nchans_total[spwnum]
        header_wcs = wcs.WCS(fits.getheader(header_fn))
        header_specwcs = header_wcs.sub([wcs.WCSSUB_SPECTRAL])
        if cdelt_sign == -1:
            ind0, ind1 = getinds(header_fn)
            #5/20/2017: redoing some of this, and the text below is frightening but no longer relevant
            # a +1 was on the next line before an edit on 4/10/2017
            # it may have been rendered irrelevant when I included +1
            # channel in each cube?  Not clear - the arithmetic no longer
            # makes sense but is empirically necessary.
            assert ind0 == 0

            # these reindex the cube so that it has an increasing cdelt.
            header['CRPIX3'] = 1  #nchans_total[spwnum]
            header['CRVAL3'] = header_specwcs.wcs_pix2world(
                [nchans_total[spwnum]], 1)[0][0]
            header['CDELT3'] = np.abs(header_specwcs.wcs.cdelt[0])

            # ensure that the new CRVAL evaluated at its own position matches
            # the CRVAL3.  This should be impossible to fail unless WCS itself
            # fails
            newheaderspecwcs = wcs.WCS(header).sub([wcs.WCSSUB_SPECTRAL])
            crval3 = newheaderspecwcs.wcs_pix2world([header['CRPIX3']],
                                                    1)[0][0]
            np.testing.assert_array_almost_equal_nulp(crval3, header['CRVAL3'])

        shape = (header['NAXIS3'], header['NAXIS2'], header['NAXIS1'])

        # Write to disk
        header.tofile(big_filename)
        # Using the 'append' io method, update the *header*
        with open(big_filename, 'rb+') as fobj:
            # Seek past the length of the header, plus the length of the
            # data we want to write.
            # The -1 is to account for the final byte that we are about to
            # write:
            # 'seek' works on bytes, so divide #bits / (bytes/bit)
            fobj.seek(
                len(header.tostring()) + (shape[0] * shape[1] * shape[2] *
                                          int(np.abs(header['BITPIX']) / 8)) -
                1)
            fobj.write(b'\0')

        big_cube = SpectralCube.read(big_filename)
        header_cube = SpectralCube.read(header_fn)
        # in both cases, SpectralCube sorts the extrema
        if cdelt_sign == 1:
            np.testing.assert_array_almost_equal_nulp(
                big_cube.spectral_extrema[0].value,
                header_cube.spectral_extrema[0].value)
            np.testing.assert_array_almost_equal_nulp(
                big_cube.wcs.wcs.cdelt, header_cube.wcs.wcs.cdelt)
        elif cdelt_sign == -1:
            np.testing.assert_array_almost_equal_nulp(
                big_cube.spectral_extrema[1].value,
                header_cube.spectral_extrema[1].value)
            np.testing.assert_array_almost_equal_nulp(
                big_cube.wcs.wcs.cdelt[-1] * -1, header_cube.wcs.wcs.cdelt[-1])

        log.info("Cube creation completed.  Now moving on to populating it.")

    # Find the appropriate files (this is NOT a good way to do this!  Better to
    # provide a list.  But wildcards are quick & easy...
    fileglob = "OrionSourceI.B3.{0}.lines*{3}".format(spw, fntemplate,
                                                      fnsuffix, filesuffix)
    files = glob.glob(fileglob)
    log.info("Files to be merged with glob {0}: ".format(fileglob))
    log.info(str(files))

    # open the file in update mode (it should have the right dims now)
    hdul = fits.open(big_filename, mode='update')
    main_wcs = wcs.WCS(hdul[0].header).sub([wcs.WCSSUB_SPECTRAL])

    if add_beam_info:
        shape = hdul[0].data.shape[0]
        if len(hdul) > 1 and isinstance(hdul[1], fits.BinTableHDU):
            pass
        else:
            hdul.append(
                fits.BinTableHDU(
                    np.recarray(shape,
                                names=['BMAJ', 'BMIN', 'BPA', 'CHAN', 'POL'],
                                formats=['f4', 'f4', 'f4', 'i4', 'i4'])))

    # sorted so that we deal with zero first, since it has potential to be a problem.
    for fn in ProgressBar(sorted(files)):
        log.info("inds={0} fn={1}".format(getinds(fn), fn))
        ind0, ind1 = getinds(fn)

        # this is not correct...?
        # or maybe it only applies if cropends is set....
        # if ind0 == 0:
        #     ind1 = ind1 + 1

        cdelt = fits.getheader(fn)['CDELT3']
        if 'cdelt_sign' not in locals():
            cdelt_sign = np.sign(cdelt)
            log.warn("cdelt_sign was not defined: overwriting a"
                     " previously-existing file.  "
                     "This may not be what you want; the data could be going "
                     "opposite the parent cube.  Check that the original "
                     "header is OK. sign(CDELT) is now {0}, "
                     "while for the big header it is {1}".format(
                         cdelt_sign,
                         np.sign(fits.getheader(big_filename)['CDELT3'])))

        if cropends:
            # don't crop 1st or last pixel in full cube
            if ind0 > 0:
                log.debug("ind0 going from {0} to {1}".format(
                    ind0, ind0 + cropends))
                ind0 = ind0 + cropends
                if cdelt_sign == 1:
                    dataind0 = cropends
                    log.debug("dataind0 going to {0}".format(cropends))
                else:
                    dataind1 = -cropends
                    log.debug("dataind1 going to {0}".format(-cropends))
            else:
                if cdelt_sign == 1:
                    dataind0 = 0
                    log.debug("dataind0 going to {0}".format(0))
                elif cdelt_sign == -1:
                    log.debug("dataind1 going to {0}".format(None))
                    dataind1 = None

            if (ind1 < nchans_total[spwnum] - 1):
                log.debug("ind1 going from {0} to {1}".format(
                    ind1, ind1 - cropends))
                ind1 = ind1 - cropends
                if cdelt_sign == 1:
                    dataind1 = -cropends
                    log.debug("dataind1 going to {0}".format(-cropends))
                elif cdelt_sign == -1:
                    dataind0 = cropends
                    log.debug("dataind0 going to {0}".format(cropends))
            else:
                if cdelt_sign == 1:
                    dataind1 = None
                else:
                    log.debug("dataind0 going to {0}".format(0))
                    dataind0 = 0
        else:
            dataind0 = 0
            dataind1 = None

        if cdelt_sign == -1:
            log.debug("Reversing indices from {0} {1} to ".format(ind0, ind1))
            ind1, ind0 = (nchans_total[spwnum] - ind0,
                          nchans_total[spwnum] - ind1)
            log.debug("{0} {1}".format(ind0, ind1))
            if ind0 < 0:
                ind0 = 0

        log.info("inds have been remapped to {0}, {1}".format(ind0, ind1))

        plane = hdul[0].data[ind0]
        if np.all(plane == 0) or overwrite_existing:
            log.info("Replacing indices {0}->{2} {1}".format(
                getinds(fn), fn, (ind0, ind1)))

            data = fits.getdata(fn)
            dwcs = wcs.WCS(fits.getheader(fn)).sub([wcs.WCSSUB_SPECTRAL])

            dataind1 = data.shape[0] + (dataind1 or 0)

            # handle the case where I made the indices NOT match the cube...
            # this is really stupid and should be removed because I should have
            # made the input cubes correct.  Oh well.
            if np.abs(ind1 - ind0) < np.abs(dataind1 - dataind0):
                dataind1 = dataind0 + np.abs(ind1 - ind0)

            if cdelt_sign == -1:
                dataind0, dataind1 = dataind1, dataind0
                dwcs0 = dwcs.wcs_pix2world([dataind0 - 1], 0)[0][0]
                dwcs1 = dwcs.wcs_pix2world([dataind1], 0)[0][0]
            else:
                dwcs0 = dwcs.wcs_pix2world([dataind0], 0)[0][0]
                dwcs1 = dwcs.wcs_pix2world([dataind1 - 1], 0)[0][0]
            hwcs0 = main_wcs.wcs_pix2world([ind0], 0)[0][0]
            hwcs1 = main_wcs.wcs_pix2world([ind1 - 1], 0)[0][0]

            if not np.isclose(hwcs0, dwcs0, atol=0.5 * np.abs(cdelt), rtol=0):
                log.error(
                    "current data, big cube indices: {0},{1} and {2},{3}".
                    format(dataind0, dataind1, ind0, ind1))
                raise ValueError(
                    "World coordinates of first pixels do not match: {0} - {1} = {2} ({3} cdelt)"
                    .format(dwcs0, hwcs0, dwcs0 - hwcs0,
                            (dwcs0 - hwcs0) / cdelt))
            if not np.isclose(hwcs1, dwcs1, atol=0.5 * np.abs(cdelt), rtol=0):
                log.error(
                    "current data, big cube indices: {0},{1} and {2},{3}".
                    format(dataind0, dataind1, ind0, ind1))
                raise ValueError(
                    "World coordinates of last pixels do not match: {0} - {1} = {2} ({3} cdelt)"
                    .format(dwcs1, hwcs1, dwcs1 - hwcs1,
                            (dwcs1 - hwcs1) / cdelt))

            if 'slices' not in locals():
                if minimize:
                    log.info("Determining slices")
                    cube0 = SpectralCube.read(header_fn)
                    slices = cube0.subcube_slices_from_mask(cube0.mask,
                                                            spatial_only=True)
                    log.info("Slices are {0}".format(slices))
                else:
                    slices = (slice(None), ) * 3

            if bmaj_limits is not None:
                log.info("Identifying acceptable beams")
                beamtable = fits.open(fn)[1]
                ok_beam = ((beamtable.data['BMAJ'] > bmaj_limits[0]) &
                           (beamtable.data['BMAJ'] < bmaj_limits[1]))
                data[~ok_beam] = np.nan
                log.info("Found {0} bad beams of {1}".format((~ok_beam).sum(),
                                                             ok_beam.size))

            if cdelt_sign == -1:
                if dataind1 == 0:
                    dataslice = slice(dataind0 - 1, None, -1)
                elif dataind1 >= 1:
                    dataslice = slice(dataind0 - 1, dataind1 - 1, -1)
                else:
                    raise ValueError("Something is wrong with dataind0")
            else:
                dataslice = slice(dataind0, dataind1, 1)
            log.info("Dataslice is {0}".format(dataslice))

            assert hdul[0].data[ind0:ind1].shape == data[dataslice, slices[1],
                                                         slices[2]].shape

            if not debug_mode:
                if add_beam_info:
                    log.info("Adding beam information")
                    beamtable = fits.open(fn)[1]
                    hdul[1].data[ind0:ind1] = beamtable.data[dataslice]

                log.info("Inserting data")
                hdul[0].data[ind0:ind1, :, :] = data[dataslice, slices[1],
                                                     slices[2]]
                log.info("Flushing")
                hdul.flush()
                log.info("Done with iteration for {0}".format(fn))

    if debug_mode:
        log.setLevel(lvl)
maser_channel = 318

spwfile = field + '_spw_' + str(spw) + '.fits'
directory = directory1 + spwfile

# Position of maser in pixels (use ds9).
x = 486
y = 485

# Cuts spectra to the appropriate length for gaussian fits.

maser_channel1 = (maser_channel) - 1
mc1 = maser_channel1 - 40
mc2 = maser_channel1 + 40

cube = SpectralCube.read(directory)
extracted_spectrum = cube[:, y, x]

sp = pyspeckit.Spectrum(data=extracted_spectrum[mc1:mc2],
                        xarr=extracted_spectrum.spectral_axis[mc1:mc2])
#sp.xarr.convert_to_unit('km/s')
sp.plotter()

sp.specfit(fittype='gaussian')
sp.specfit(fittype='gaussian')

print(directory1 + field + '_' + str(spw) + '_gaussian.png')

cube[maser_channel, :, :].quicklook()

print(sp.specfit(fittype='gaussian'))
Ejemplo n.º 57
0
from __future__ import print_function
from astropy import units as u
import numpy as np
import pyspeckit
from spectral_cube import SpectralCube
import paths
import pyregion

cube11 = SpectralCube.read(paths.adpath('G357.3-003.9-NH3-11-cube.fits'))
cube22 = SpectralCube.read(paths.adpath('G357.3-003.9-NH3-22-cube.fits'))
regions = pyregion.open(paths.rpath('target_fields_8x8.reg'))

sc11 = cube11.subcube_from_ds9region(regions)
sc22 = cube22.subcube_from_ds9region(regions)
sp_s11 = sc11.mean(axis=(1, 2))
sp_s22 = sc22.mean(axis=(1, 2))
print("Integrated line ratio 1-1/2-2: {0}".format(sp_s11.sum() / sp_s22.sum()))

filling_factor = 0.1

sp11 = pyspeckit.Spectrum(data=sp_s11.value / filling_factor,
                          xarr=cube11.spectral_axis,
                          header=cube11.header,
                          xarrkwargs={
                              'refX': cube11.wcs.wcs.restfrq * u.Hz,
                              'velocity_convention': 'radio'
                          })
sp22 = pyspeckit.Spectrum(data=sp_s22.value / filling_factor,
                          xarr=cube22.spectral_axis,
                          header=cube22.header,
                          xarrkwargs={
Ejemplo n.º 58
0
                    mod_date = time.ctime(os.path.getmtime(fn))

                    ia.open(fn)
                    hist = ia.history(list=False)
                    history = {
                        x.split(":")[0]: x.split(": ")[1]
                        for x in hist if ':' in x
                    }
                    history.update({
                        x.split("=")[0]: x.split("=")[1].lstrip()
                        for x in hist if '=' in x
                    })
                    ia.close()

                    if os.path.exists(fn + ".fits"):
                        cube = SpectralCube.read(fn + ".fits", use_dask=True)
                        cube.use_dask_scheduler(scheduler,
                                                num_workers=nthreads)
                    else:
                        cube = SpectralCube.read(fn)
                        cube.use_dask_scheduler(scheduler,
                                                num_workers=nthreads)
                        cube = cube.rechunk()
                    if hasattr(cube, 'beam'):
                        beam = cube.beam
                        biggest_beam = beam
                        smallest_beam = beam
                    else:
                        beams = cube.beams
                        # use the middle-ish beam
                        beam = beams[len(beams) // 2]
Ejemplo n.º 59
0
            for spw in (0, 1, 2, 3):

                if 'longbaselines' in name:
                    name = name + "_longbaselines"

                fn = fnt.format(spw)

                vcen = u.Quantity(vcen, u.km / u.s)

                #fn = '/Volumes/external/orion/full_OrionSourceI_B6_spw0_lines_cutout.fits'

                medsubfn = fn.replace(".image.pbcor.fits",
                                      "_medsub.image.pbcor.fits")

                if os.path.exists(medsubfn):
                    medsub = SpectralCube.read(medsubfn)
                    medsub.beam_threshold = 5000
                    if not medsub.wcs.wcs.radesys.lower() == 'icrs':
                        log.exception(
                            "Skipping {0} because of a bad coordinate system.".
                            format(medsubfn))
                        continue

                else:
                    #cube = (SpectralCube.read(fn)[:,515:721,550:714].mask_out_bad_beams(5))
                    cube = (SpectralCube.read(fn).mask_out_bad_beams(5))
                    if not cube.wcs.wcs.radesys.lower() == 'icrs':
                        log.exception(
                            "Skipping {0} because of a bad coordinate system.".
                            format(fn))
                        continue
Ejemplo n.º 60
0
from astropy.io import fits
from astropy.wcs import WCS
from astropy.coordinates import SkyCoord
from astropy.nddata import Cutout2D
from regions import read_ds9

#### smooth the 12CO 2-1 image to 1.1 x 0.8 arcsec and make moment 2 map.

# fitsfile='NGC5257_12CO21_combine_pbcor.fits'
# imagecube=SpectralCube.read(fitsfile)
# imcube=imagecube.with_spectral_unit(u.km/u.s,velocity_convention='radio',rest_value=225.46*10**9*u.Hz)
# beam=Beam(major=1.1*u.arcsec, minor=0.8*u.arcsec, pa=-64.5*u.degree)
# imcube_smooth=imcube.convolve_to(beam)

# rmscube=cube.calc_noise_in_cube(imcube_smooth)
# outcube=cube.find_signal_in_cube(imcube_smooth,rmscube,snr_hi=5)
# outcube.write('NGC5257_12CO21_pbcor_smooth_cube_signal.fits')

fitsfile = 'NGC5257_12CO21_pbcor_smooth_cube_signal.fits'
outcube = SpectralCube.read(fitsfile)
_12CO21_mom2 = outcube.linewidth_sigma()
_12CO21_mom2.write('NGC5257_12CO21_pbcor_smooth_cube_signal_mom2.fits')

#### imregrid the 33 GHz image

imregrid(imagename='NGC5257_33GHz_pbcor_smooth_co21.image',
         template='NGC5257_12CO21_combine_pbcor.mom0',
         output='NGC5257_33GHz_pbcor_smooth_co21_regrid.image')
exportfits(imagename='NGC5257_33GHz_pbcor_smooth_co21_regrid.image',
           fitsimage='NGC5257_33GHz_pbcor_smooth_co21_regrid.fits')