def finalcatalogue(fcube, fcube_var, catname, target_z=None, rest_line=None, cov_poly=None, working_dir='./', fcube_median=None, fcube_odd=None, fcube_even=None, fcube_median_var=None, fcube_odd_var=None, fcube_even_var=None, fcube_orig=None, fsource_img=None, marzred=None, SNcut=[7, 5], DeltaEOSNcut=[0.5, 0.5], SNEOcut=[3, 3], fracnpix=None, derived=True, checkimg=True): """ Function that processes the cubex catalogue to: 1. compute actual SN including covariance term 2. compute velocity offsets relative to target redshift 3. applies cuts on SN, continuum source, and fraction of pixels in segmap 4. generate inspection images and full spectra fcube -> cube used for cubex extraction fcube_var -> var cube for cubex extraction catname -> catalogue produced by cubex target_z -> [optional] if set and rest_line set, computes velocity offset from target z assuming every detection is at rest_line rest_line -> [optional] see target_z cov_poly -> [optional] third order polinomial model for noise set as array with (N2,N1,N0) If set, covariance is modelled working_dir -> where cubex output is generated fcube_median -> [optional] median cube for extraction of QA images fcube_odd -> [optional] odd cube for extraction of QA image and even/odd SN cut fcube_even -> [optional] even cube for extraction of QA image and even/odd SN cut fcube_median_var -> [optional] associated variance fcube_odd_var -> [optional] associated variance fcube_even_var -> [optional] associated variance fcube_orig -> [optional] if set to full cube, used for spectral extraction source_img -> [optional] if set to aperture image (and marzred set), exclude emitters projected against continuum source of known redshift marzred -> [optional] redshift file for continuum sources, ala marz (see above) SNcut -> array of SN cuts on main detection, that defines classes. NB classes are assigned in order of which condition they meet first. DeltaEOSNcut -> array of percentage change in even/odd SN allowed (if even/odd cubes provided) SNEOcut -> array of SN cut for even odd exposure fracnpix -> [optional] if set, cuts object with less than fracnpix in segmap within 5x5x5 region derived -> if true, compute derived quantities (SN etc..) checkimg -> if true generate image cut outs """ #Load cubex catalog catalog = read_cubex_catalog(working_dir + catname) #create space for bunch of keywords [some may not be used] ksig = Column(np.zeros(len(catalog), dtype=float), name='SNR') ksig_odd = Column(np.zeros(len(catalog), dtype=float), name='SNR_odd') ksig_even = Column(np.zeros(len(catalog), dtype=float), name='SNR_even') ksig_med = Column(np.zeros(len(catalog), dtype=float), name='SNR_med') kcov_fac = Column(np.zeros(len(catalog), dtype=float), name='covfac') kconfidence = Column(np.zeros(len(catalog), dtype=float), name='confidence') kveloffset = Column(np.zeros(len(catalog), dtype=float), name='veloffset') kdeltasn = Column(np.zeros(len(catalog), dtype=float), name='EODeltaSN') kfraction = Column(np.zeros(len(catalog), dtype=float), name='BoxFraction') kcontinuum = Column(np.zeros(len(catalog), dtype=bool), name='OverContinuum') catalog.add_columns([ ksig, ksig_odd, ksig_even, ksig_med, kcov_fac, kconfidence, kveloffset, kdeltasn, kfraction, kcontinuum ]) #catalog=catalog[0:100] #Calculate covariance if cov_poly is None: covariance = np.zeros(len(catalog), dtype=float) + 1.0 else: size = np.sqrt(catalog['area_isoproj']) * 0.2 covariance = np.polyval(cov_poly, size) #Open cubes cube = fits.open(working_dir + fcube)[0].data cube_var = fits.open(working_dir + fcube_var)[0].data #reconstruct name of cubex segmentation cube and open fsegmap = os.path.basename(fcube).split('.fits')[0] + ".Objects_Id.fits" segmap = fits.open(working_dir + fsegmap)[0].data if (fcube_odd): try: cube_odd = fits.open(working_dir + fcube_odd)[0].data cube_odd_var = fits.open(working_dir + fcube_odd_var)[0].data except: print('Have you set the right odd cube/variance?') exit() else: cube_odd = None cube_odd_var = None if (fcube_even): try: cube_even = fits.open(working_dir + fcube_even)[0].data cube_even_var = fits.open(working_dir + fcube_even_var)[0].data except: print('Have you set the right even cube/variance?') exit() else: cube_even = None cube_even_var = None if (fcube_median): try: cube_median = fits.open(working_dir + fcube_median)[0].data cube_median_var = fits.open(working_dir + fcube_median_var)[0].data except: print('Have you set the right median cube/variance?') exit() else: cube_median = None cube_median_var = None #open source image if (fsource_img): apermap = fits.open(fsource_img)[0].data try: contzcat = ascii.read(marzred, format='csv', header_start=2) except: print('Have you set the marz redshift file???') exit() else: apermap = None contzcat = None #now loop over sources and update the catalogue info if (derived): print("Calculating independent SNR and additional metrics") catalog = independent_SNR(catalog, covariance, segmap, cube, cube_var, cube_med=cube_median, cube_odd=cube_odd, cube_even=cube_even, var_med=cube_median_var, var_odd=cube_odd_var, var_even=cube_even_var, apermap=apermap, contzcat=contzcat) #Computing and adding velocity offset if (target_z is not None): veloff = velocityoffset(catalog['lambda_fluxw'], target_z, rest_line) catalog['veloffset'] = veloff #Compute EOSN if (fcube_even is not None): rel_diff_halves = np.abs(catalog['SNR_even'] - catalog['SNR_odd']) / np.minimum( catalog['SNR_even'], catalog['SNR_odd']) catalog['EODeltaSN'] = rel_diff_halves #Write full catalogue with SNR and derived quantities print("Writing full catalog to disk") catalog.write(working_dir + catname.split(".cat")[0] + "_all_SNR.fits", format="fits", overwrite=True) #make simplest cut to catalogue to reject unwanted sources select = catalog['SNR'] >= np.amin(SNcut) catalog = catalog[select] #loop over classes and assign print("Assigning classes") for iclass, iSN in enumerate(SNcut): #case of SN,DeltaSN,fractpix,continnum if ((fcube_even is not None) & (fracnpix is not None) & (fsource_img is not None)): thisclass = ((catalog['SNR'] >= iSN) & (catalog['SNR_odd'] >= SNEOcut[iclass]) & (catalog['SNR_even'] >= SNEOcut[iclass]) & (catalog['EODeltaSN'] <= DeltaEOSNcut[iclass]) & (catalog['OverContinuum'] == False) & (catalog['BoxFraction'] >= fracnpix) & (catalog['confidence'] == 0)) catalog['confidence'][thisclass] = iclass + 1 #case of SN,DeltaSN,fractpix elif ((fcube_even is not None) & (fracnpix is not None)): thisclass = ((catalog['SNR'] > iSN) & (catalog['SNR_odd'] > SNEOcut[iclass]) & (catalog['SNR_even'] > SNEOcut[iclass]) & (catalog['EODeltaSN'] < DeltaEOSNcut[iclass]) & (catalog[BoxFraction] > fracnpix) & (catalog['confidence'] == 0)) catalog['confidence'][thisclass] = iclass + 1 #case of SN,DeltaSN elif (fcube_even is not None): thisclass = ((catalog['SNR'] > iSN) & (catalog['SNR_odd'] > SNEOcut[iclass]) & (catalog['SNR_even'] > SNEOcut[iclass]) & (catalog['EODeltaSN'] < DeltaEOSNcut[iclass]) & (catalog['confidence'] == 0)) catalog['confidence'][thisclass] = iclass + 1 #remaining cases else: thisclass = ((catalog['SNR'] > iSN) & (catalog['confidence'] == 0)) catalog['confidence'][thisclass] = iclass + 1 #Write full catalogue with SNR and derived quantities catalog.write(working_dir + catname.split(".cat")[0] + "_select_SNR.fits", format="fits", overwrite=True) #make space for checks if not os.path.isdir(working_dir + "/objs"): os.mkdir(working_dir + "/objs") if (checkimg): print("Extracting images of sources") #loop over detections for ii in range(len(catalog)): #folder for this object objid = catalog['id'][ii] objdir = working_dir + "/objs/id{}/".format(objid) if not os.path.isdir(objdir): os.mkdir(objdir) #make images hcubelist = [fsegmap, fcube, fcube_median, fcube_odd, fcube_even] namelist = [working_dir + thc for thc in hcubelist] taglist = [ 'Image_id{}_det', 'Image_id{}_mean', 'Image_id{}_median', 'Image_id{}_half1', 'Image_id{}_half2' ] make_cubex_images(namelist, namelist[0], objid, objdir, taglist, padding=-1) taglist = [ 'Pstamp_id{}_det', 'Pstamp_id{}_mean', 'Pstamp_id{}_median', 'Pstamp_id{}_half1', 'Pstamp_id{}_half2' ] make_cubex_images(namelist, namelist[0], objid, objdir, taglist, padding=50) #trim segmap x = int(catalog['x_geow'][ii]) y = int(catalog['y_geow'][ii]) z = int(catalog['z_geow'][ii]) mz, my, mx = segmap.shape segmapshort = segmap[max(z - 10, 0):min(z + 10, mz), max(y - 25, 0):min(y + 25, my), max(x - 25, 0):min(x + 25, mx)] savename = objdir + "/segcube.fits".format(objid) hdu = fits.PrimaryHDU(segmapshort) hdul = fits.HDUList([hdu]) hdul.writeto(savename, overwrite=True) #Extract spectrum if (fcube_orig is not None): savename = objdir + "/spectrum.fits".format(objid) utl.cube2spec(fcube_orig, 0.0, 0.0, 0.0, shape='mask', helio=0, mask=segmap, twod=True, tovac=True, write=savename, idsource=objid)
def findsources(image,cube,check=False,output='.',spectra=False,helio=0,nsig=2., minarea=10.,regmask=None,clean=True,outspec='Spectra',marz=False, rphot=False, sname='MUSE'): """ Take a detection image (collapse of a cube), or median of an RGB, or whatever you want (but aligned to the cube) and run sourceextractor Use SEP utilities http://sep.readthedocs.org/en/stable/ image -> fits file of image to process check -> if true write a bunch of check mages output -> where to dump the output cube -> the cube used to extract spectra spectra -> if True, extract spectra in VACUUM wave!! helio -> pass additional heliocentric correction nsig -> number of skyrms used for source id minarea -> minimum area for extraction regmask -> ds9 region file (image) of regions to be masked before extraction [e.g. edges] clean -> clean souces outspec -> where to store output spectra marz -> write spectra in also marz format (spectra needs to be true). If set to numerical value, this is used as an r-band magnitude limit. rphot -> perform r-band aperture photometry and add r-band magnitudes to the catalogue sname -> prefix for the source names. Default = MUSE """ import sep from astropy.io import fits from astropy import wcs from astropy import coordinates from astropy import units as u from astropy import table import numpy as np import os try: from mypython.ifu import muse_utils as utl from mypython.fits import pyregmask as msk except ImportError: from mypython import ifu from ifu import muse_utils as utl from mypython import fits from fits import pyregmask as msk from astropy.io import fits from shutil import copyfile import glob #open image img=fits.open(image) try: header=img[1].header except: header= img[0].header imgwcs = wcs.WCS(header) try: #this is ok for narrow band images data=img[1].data except: #white cubex images data=img[0].data data=data.byteswap(True).newbyteorder() #grab effective dimension nex,ney=data.shape #close fits img.close() #create bad pixel mask if(regmask): Mask=msk.PyMask(ney,nex,regmask,header=img[0].header) for ii in range(Mask.nreg): Mask.fillmask(ii) if(ii == 0): badmask=Mask.mask else: badmask+=Mask.mask badmask=1.*badmask else: badmask=np.zeros((nex,ney)) if(check): print('Dumping badmask') hdumain = fits.PrimaryHDU(badmask,header=header) hdulist = fits.HDUList([hdumain]) hdulist.writeto(output+"/badmask.fits",overwrite=True) #check background level, but do not subtract it print('Checking background levels') bkg = sep.Background(data,mask=badmask) print('Residual background level ', bkg.globalback) print('Residual background rms ', bkg.globalrms) if(check): print('Dumping sky...') #dump sky properties back = bkg.back() rms = bkg.rms() hdumain = fits.PrimaryHDU(back,header=header) hdubk = fits.ImageHDU(back) hdurms = fits.ImageHDU(rms) hdulist = fits.HDUList([hdumain,hdubk,hdurms]) hdulist.writeto(output+"/skyprop.fits",overwrite=True) #extracting sources at nsigma thresh = nsig * bkg.globalrms # segmap = np.zeros((header["NAXIS1"],header["NAXIS2"])) objects, segmap=sep.extract(data,thresh,segmentation_map=True, minarea=minarea,clean=clean,mask=badmask,deblend_cont=0.0001) print("Extracted {} objects... ".format(len(objects))) if(spectra): if not os.path.exists(outspec): os.makedirs(outspec) if((check) | (spectra)): #create a detection mask alla cubex srcmask=np.zeros((1,data.shape[0],data.shape[1])) nbj=1 print('Generating spectra...') #loop over detections for obj in objects: #init mask tmpmask=np.zeros((data.shape[0],data.shape[1]),dtype=np.bool) tmpmask3d=np.zeros((1,data.shape[0],data.shape[1]),dtype=np.bool) #fill this mask sep.mask_ellipse(tmpmask,obj['x'],obj['y'],obj['a'],obj['b'],obj['theta'],r=2) tmpmask3d[0,:,:]=tmpmask[:,:] srcmask=srcmask+tmpmask3d*nbj if(spectra): savename="{}/id{}.fits".format(outspec,nbj) if not os.path.exists(savename): utl.cube2spec(cube,obj['x'],obj['y'],None,write=savename, shape='mask',helio=helio,mask=tmpmask3d,tovac=True) else: print("{} already exists. Skipping it...".format(savename)) #go to next nbj=nbj+1 if(check): print('Dumping source mask...') hdumain = fits.PrimaryHDU(srcmask,header=header) hdubk = fits.ImageHDU(srcmask) hdulist = fits.HDUList([hdumain,hdubk]) hdulist.writeto(output+"/source.fits",overwrite=True) print('Dumping segmentation map') hdumain = fits.PrimaryHDU(segmap,header=header) hdubk = fits.ImageHDU(segmap) hdulist = fits.HDUList([hdumain,hdubk]) hdulist.writeto(output+"/segmap.fits",overwrite=True) #Generate source names using coordinates and name prefix ra, dec = imgwcs.wcs_pix2world(objects['x'], objects['y'],0) coord = coordinates.FK5(ra*u.degree, dec*u.degree) rastr = coord.ra.to_string(u.hour, precision=2, sep='') decstr = coord.dec.to_string(u.degree, precision=1, sep='', alwayssign=True) name = [sname+'J{0}{1}'.format(rastr[k], decstr[k]) for k in range(len(rastr))] ids = np.arange(len(name)) #write source catalogue print('Writing catalogue..') tab = table.Table(objects) tab.add_column(table.Column(name),0,name='name') tab.add_column(table.Column(ids),0,name='ID') tab.write(output+'/catalogue.fits',overwrite=True) #cols = fits.ColDefs(objects) #cols.add_col(fits.Column(name, format='A')) #tbhdu = fits.BinTableHDU.from_columns(cols) #tbhdu.writeto(output+'/catalogue.fits',clobber=True) #rband photometry if (rphot): if not os.path.exists(output+'/Image_R.fits'): rimg, rvar, rwcsimg = utl.cube2img(cube, filt=129, write=output+'/Image_R.fits') phot_r = sourcephot(output+'/catalogue.fits', output+'/Image_R.fits', output+'/segmap.fits', image) phot_r.add_column(table.Column(name),1,name='name') tbhdu = fits.open(output+'/catalogue.fits')[1] tbhdu2 = fits.BinTableHDU(phot_r) hdulist = fits.HDUList([fits.PrimaryHDU(), tbhdu, tbhdu2]) hdulist.writeto(output+'/catalogue.fits',overwrite=True) if((marz) & (spectra)): #if marz is True but no magnitude limit set, create marz file for whole catalogue if marz==True: marz_file(image, output+'/catalogue.fits', outspec, output) else: #create folder and catalogue with just sources brighter than mag limit if os.path.exists(output + '/spectra_r' + str(marz)): files = glob.glob(output + '/spectra_r' + str(marz) +'/*') for f in files: os.remove(f) else: os.mkdir(output + '/spectra_r' + str(marz)) mag = phot_r['MAGSEG'] #add in x y pixels from original catalogue x, y = tbhdu.data['x'], tbhdu.data['y'] phot_r['x'], phot_r['y'] = x, y #add in ra,dec img = fits.open(image) mywcs = wcs.WCS(img[0].header) ra, dec = mywcs.all_pix2world(x,y,0) phot_r['RA'] = ra phot_r['dec'] = dec for i in range(len(mag)): if mag[i] < marz: copyfile((output + '/spectra/id' + str(i+1) + '.fits'), (output + '/spectra_r' + str(marz) + '/id' + str(i+1) + '.fits')) #Write photometry catalog with objects below magnitude limit excluded phot_r.remove_rows(phot_r['MAGSEG'] > marz) catalogue_lim_name = (output + '/catalogue_r' + str(marz) +'.fits') if os.path.exists(catalogue_lim_name): os.remove(catalogue_lim_name) phot_r.write(catalogue_lim_name) outspec = output + '/spectra_r' + str(marz) marz_file(image, output+'/catalogue_r' + str(marz) +'.fits', outspec, output, r_lim=marz) print('All done') return objects
def sourceimgspec(cubes,tags,objidmask,specube=None,vacuum=True,outpath='imgspecs',\ variance=None, scalevar=None): """ This code takes one (cube) or more cubes together with a 3D segmentation map produced by cubex and generates images and spectra for inspection Cube2Im need to be installed and in the path cubes -> list of short cubes (matched to objid dimenion) for image extraction tags -> name tags to be used for *_imf.fits output objidmask -> the 3d segmentation map used for extraction specube -> if set, used to extract spectrum (e.g. full cube). If not available, use first cube in the list of spectrum generation vacuum -> if true, convert wave to wacuum outpath -> where to store products variance -> if set to a list of variance cubes [1st extension], propagate variance inside the projected 2D images scalevar -> if set to cubex variance rescale file (z,scalefactor), apply variance scaling before propagating variance """ from astropy.io import fits import os import numpy as np import subprocess from mypython.ifu import muse_utils as mutl from astropy.table import Table #sanity checks #turn in list what should be list and not string if(isinstance(cubes, basestring)): cubes=[cubes] tags=[tags] #check tags match size of cubes if(len(cubes) != len(tags)): raise ValueError("Cubes and tags do not match in size!") #set cube to be used for spectrum extraction if(specube): cubespec=specube else: cubespec=cubes[0] #stash the variance if needed allvar=[] if(variance): print('Prepare variance data...') #check if need to rescale if(scalevar): #read scaling factor vscale=Table.read(scalevar,format='ascii') #grab cube dimension and construct rescaled variance thisv=fits.open(variance[0]) nl,nx,ny=thisv[0].data.shape scale=np.transpose(np.tile(vscale['rescaling_fact'],(ny,nx,1))) else: scale=1.0 #now store with scaling for varname in variance: thisv=fits.open(varname) allvar.append(thisv[0].data*scale) thisv.close() #make output folder if not os.path.exists(outpath): os.makedirs(outpath) #grab the indexes segmap=fits.open(objidmask) objid=np.max(segmap[0].data) print('Loop over IDs...') for ii in range(objid): #first check if index is legit inside=np.where(segmap[0].data == ii+1) if(len(inside[0]) > 1): print('Work on ID {}'.format(ii+1)) #make outfolder if not os.path.exists(outpath+'/id{}'.format(ii+1)): os.makedirs(outpath+'/id{}'.format(ii+1)) currentpath=outpath+'/id{}'.format(ii+1) #next generate image for cc,thiscube in enumerate(cubes): subprocess.call(["Cube2Im","-cube",thiscube,"-out","{}/{}_img.fits".format(currentpath,tags[cc]),"-id","{}".format(ii+1),"-idcube",objidmask,"-nl","-1","-idpad","20","-sbscale",".true."]) #append variance if needed if(variance): #propagate the variance along lambda axis #find star-end in each dimension ilstart=np.min(inside[0]) ilend=np.max(inside[0]) dl=ilend-ilstart+1 ixstart=np.min(inside[1]) ixend=np.max(inside[1]) dx=ixend-ixstart+1 iystart=np.min(inside[2]) iyend=np.max(inside[2]) dy=iyend-iystart+1 #now build a mask array thismask=np.zeros((dl,dx,dy)) thismask[inside[0]-ilstart,inside[1]-ixstart,inside[2]-iystart]=1.0 #build variance slice varslice=np.zeros((dl,dx,dy)) varslice[inside[0]-ilstart,inside[1]-ixstart,inside[2]-iystart]=\ allvar[cc][inside[0],inside[1],inside[2]] #now with flux flux=fits.open(thiscube) fluxslice=np.zeros((dl,dx,dy)) fluxslice[inside[0]-ilstart,inside[1]-ixstart,inside[2]-iystart]=\ flux[0].data[inside[0],inside[1],inside[2]] flux.close() #compute integral flux and SN totflux=np.sum(np.nan_to_num(fluxslice)*thismask)*1e-20*1.25 toterr=np.sqrt(np.sum(np.nan_to_num(varslice)*thismask))*1e-20*1.25 print('Flux: {}'.format(totflux)) print('S/N: {}'.format(totflux/toterr)) #open image to update imgupdate=fits.open("{}/{}_img.fits".format(currentpath,tags[cc]),mode='update') imgupdate[0].header['ISOFLUX']=np.nan_to_num(totflux) imgupdate[0].header['ISOERR']=np.nan_to_num(toterr) imgupdate[0].header['S2N']=np.nan_to_num(totflux/toterr) imgupdate.flush() imgupdate.close() #finally call spectrum generation print('Extracting 1d spectrum from {}'.format(cubespec)) mutl.cube2spec(cubespec,0,0,0,shape='mask',tovac=vacuum,idsource=ii+1,mask=segmap[0].data,\ write='{}/spectrum.fits'.format(currentpath)) else: print('Skip index {} as not in ID map'.format(ii+1)) segmap.close()
def findsources(image, cube, varima=None, check=False, output='./', spectra=False, helio=0, nsig=2., minarea=10., deblend_cont=0.0001, regmask=None, invregmask=False, fitsmask=None, clean=True, outspec='Spectra', marz=False, rphot=False, detphot=False, sname='MUSE'): """ Take a detection image (collapse of a cube), or median of an RGB, or whatever you want (but aligned to the cube) and run sourceextractor Use SEP utilities http://sep.readthedocs.org/en/stable/ image -> fits file of image to process cube -> the cube used to extract spectra varima -> the noise image corresponding to the science image (std), optional check -> if true write a bunch of check mages output -> where to dump the output spectra -> if True, extract spectra in VACUUM wave!! helio -> pass additional heliocentric correction nsig -> number of skyrms used for source id minarea -> minimum area for extraction regmask -> ds9 region file (image) of regions to be masked before extraction [e.g. edges] invregmask -> if True invert the mask (region defines good area) fitsmask -> Fits file with good mask, overrides regmask clean -> clean souces outspec -> where to store output spectra marz -> write spectra in also marz format (spectra needs to be true). If set to numerical value, this is used as an r-band magnitude limit. detphot -> perform aperture phtometry on the detection image and add magnitues to the catalogue rphot -> perform r-band aperture photometry and add r-band magnitudes to the catalogue sname -> prefix for the source names. Default = MUSE """ import sep from astropy.io import fits from astropy import wcs from astropy import coordinates from astropy import units as u from astropy import table import numpy as np import os from mypython.ifu import muse_utils as utl from mypython.fits import pyregmask as msk from shutil import copyfile import glob #open image img = fits.open(image) header = img[0].header imgwcs = wcs.WCS(header) try: #this is ok for narrow band images data = img[1].data except: #white cubex images data = img[0].data data = data.byteswap(True).newbyteorder() #grab effective dimension nex, ney = data.shape #close fits img.close() if (varima): var = fits.open(varima) try: datavar = var[1].data except: datavar = var[0].data datavar = datavar.byteswap(True).newbyteorder() #grab effective dimension stdx, stdy = datavar.shape #close fits var.close() if (stdx != nex) or (stdy != ney): print( "The noise image does not have the same dimensions as the science image" ) return -1 #create bad pixel mask if (fitsmask): print("Using FITS image for badmask") hdumsk = fits.open(fitsmask) try: badmask = hdumsk[1].data except: badmask = hdumsk[0].data badmask = badmask.byteswap(True).newbyteorder() elif (regmask): print("Using region file for badmask") Mask = msk.PyMask(ney, nex, regmask, header=img[0].header) for ii in range(Mask.nreg): Mask.fillmask(ii) if (ii == 0): badmask = Mask.mask else: badmask += Mask.mask badmask = 1. * badmask else: badmask = np.zeros((nex, ney)) if (regmask) and (invregmask) and not (fitsmask): badmask = 1 - badmask if (check): print('Dumping badmask') hdumain = fits.PrimaryHDU(badmask, header=header) hdulist = fits.HDUList([hdumain]) hdulist.writeto(output + "/badmask.fits", overwrite=True) #check background level, but do not subtract it print('Checking background levels') bkg = sep.Background(data, mask=badmask) print('Residual background level ', bkg.globalback) print('Residual background rms ', bkg.globalrms) if (check): print('Dumping sky...') #dump sky properties back = bkg.back() rms = bkg.rms() hdumain = fits.PrimaryHDU(back, header=header) hdubk = fits.ImageHDU(back) hdurms = fits.ImageHDU(rms) hdulist = fits.HDUList([hdumain, hdubk, hdurms]) hdulist.writeto(output + "/skyprop.fits", overwrite=True) if (varima): #Use nsigma threshold and a pixel by pixel effective treshold based on variance map thresh = nsig objects, segmap = sep.extract(data, thresh, var=datavar, segmentation_map=True, minarea=minarea, clean=clean, mask=badmask, deblend_cont=deblend_cont, deblend_nthresh=32) else: #extracting sources at nsigma, use constant threshold thresh = nsig * bkg.globalrms objects, segmap = sep.extract(data, thresh, segmentation_map=True, minarea=minarea, clean=clean, mask=badmask, deblend_cont=deblend_cont, deblend_nthresh=32) print("Extracted {} objects... ".format(len(objects))) ids = np.arange(len(objects)) + 1 if (spectra): if not os.path.exists(outspec): os.makedirs(outspec) if ((check) | (spectra)): #create a detection mask a'la cubex srcmask = np.zeros((data.shape[0], data.shape[1])) print('Generating spectra...') #loop over detections for nbj in ids: obj = objects[nbj - 1] #init mask tmpmask = np.zeros((data.shape[0], data.shape[1]), dtype=np.bool) #fill this mask sep.mask_ellipse(tmpmask, obj['x'], obj['y'], obj['a'], obj['b'], obj['theta'], r=2) #add in global mask srcmask = srcmask + tmpmask * nbj #verify conflicts, resolve using segmentation map if np.nanmax(srcmask) > nbj: blended = (srcmask > nbj) srcmask[blended] = segmap[blended] #Now loop again and extract spectra if required if (spectra): #Verify that the source mask has the same number of objects as the object list if not len(np.unique(srcmask[srcmask > 0])) == len(objects): print( "Mismatch between number of objects and number of spectra to extract." ) for nbj in ids: savename = "{}/id{}.fits".format(outspec, nbj) tmpmask3d = np.zeros((1, data.shape[0], data.shape[1])) tmpmask3d[0, :, :] = srcmask[:, :] tmpmask3d[tmpmask3d != nbj] = 0 tmpmask3d[tmpmask3d > 0] = 1 tmpmask3d = np.array(tmpmask3d, dtype=np.bool) utl.cube2spec(cube, None, None, None, write=savename, shape='mask', helio=helio, mask=tmpmask3d, tovac=True) if (check): print('Dumping source mask...') hdumain = fits.PrimaryHDU(srcmask, header=header) hdubk = fits.ImageHDU(srcmask) hdulist = fits.HDUList([hdumain, hdubk]) hdulist.writeto(output + "/source.fits", overwrite=True) print('Dumping segmentation map') hdumain = fits.PrimaryHDU(segmap, header=header) hdubk = fits.ImageHDU(segmap) hdulist = fits.HDUList([hdumain, hdubk]) hdulist.writeto(output + "/segmap.fits", overwrite=True) #Generate source names using coordinates and name prefix ra, dec = imgwcs.wcs_pix2world(objects['x'], objects['y'], 0) coord = coordinates.FK5(ra * u.degree, dec * u.degree) rastr = coord.ra.to_string(u.hour, precision=2, sep='', pad=True) decstr = coord.dec.to_string(u.degree, precision=1, sep='', alwayssign=True, pad=True) name = [ sname + 'J{0}{1}'.format(rastr[k], decstr[k]) for k in range(len(rastr)) ] #Generate a column to be used to flag the sources to be used in the analysis #True for all sources at this point use_source = np.ones_like(name, dtype=bool) #write source catalogue print('Writing catalogue..') tab = table.Table(objects) tab.add_column(table.Column(dec), 0, name='DEC') tab.add_column(table.Column(ra), 0, name='RA') tab.add_column(table.Column(name), 0, name='name') tab.add_column(table.Column(ids), 0, name='ID') tab.add_column(table.Column(use_source), name='use_source') tab.write(output + '/catalogue.fits', overwrite=True) if (detphot): #Run source photometry on the extraction image whiteimg, whitevar, whitewcsimg = utl.cube2img(cube, write=output + '/Image_white.fits') phot_det = sourcephot(output + '/catalogue.fits', output + '/Image_white.fits', output + '/segmap.fits', image, zpab=28.35665) phot_det.add_column(table.Column(name), 1, name='name') tbhdu = fits.open(output + '/catalogue.fits') tbhdu.append(fits.BinTableHDU(phot_det)) tbhdu[-1].header['PHOTBAND'] = 'Detection' tbhdu.writeto(output + '/catalogue.fits', overwrite=True) #rband photometry if (rphot): rimg, rvar, rwcsimg = utl.cube2img(cube, filt=129, write=output + '/Image_R.fits') phot_r = sourcephot(output + '/catalogue.fits', output + '/Image_R.fits', output + '/segmap.fits', image) phot_r.add_column(table.Column(name), 1, name='name') tbhdu = fits.open(output + '/catalogue.fits') tbhdu.append(fits.BinTableHDU(phot_r)) tbhdu[-1].header['PHOTBAND'] = 'SDSS_r' tbhdu.writeto(output + '/catalogue.fits', overwrite=True) if ((marz) & (spectra)): #if marz is True but no magnitude limit set, create marz file for whole catalogue if marz > 10 and (rphot): #Requires testing hdu = fits.open(output + '/catalogue.fits') hdu[1].data['use_source'][hdu[2].data['MAGAP'] > marz] = False hdu.writeto(output + '/catalogue.fits', overwrite=True) marz_file(output + '/catalogue.fits', outspec, output, r_lim=marz) else: marz_file(output + '/catalogue.fits', outspec, output) print('All done') return objects
def findsources(image, cube, check=False, output='./', spectra=False, helio=0, nsig=2., minarea=10., regmask=None, clean=True, outspec='Spectra'): """ Take a detection image (collapse of a cube), or median of an RGB, or whatever you want (but aligned to the cube) and run sourceextractor Use SEP utilities http://sep.readthedocs.org/en/stable/ image -> fits file of image to process check -> if true write a bunch of check mages output -> where to dump the output cube -> the cube used to extract spectra spectra -> if True, extract spectra in VACUUM wave!! helio -> pass additional heliocentric correction nsig -> number of skyrms used for source id minarea -> minimum area for extraction regmask -> ds9 region file (image) of regions to be masked before extraction [e.g. edges] clean -> clean souces outspec -> where to store output spectra """ import sep from astropy.io import fits import numpy as np import os from mypython.ifu import muse_utils as utl from mypython.fits import pyregmask as msk #open image img = fits.open(image) header = img[0].header try: #this is ok for narrow band images data = img[1].data except: #white cubex images data = img[0].data data = data.byteswap(True).newbyteorder() #grab effective dimension nex, ney = data.shape #close fits img.close() #create bad pixel mask if (regmask): Mask = msk.PyMask(ney, nex, regmask) for ii in range(Mask.nreg): Mask.fillmask(ii) if (ii == 0): badmask = Mask.mask else: badmask += Mask.mask badmask = 1. * badmask else: badmask = np.zeros((nex, ney)) if (check): print('Dumping badmask') hdumain = fits.PrimaryHDU(badmask, header=header) hdulist = fits.HDUList([hdumain]) hdulist.writeto(output + "/badmask.fits", clobber=True) #check background level, but do not subtract it print 'Checking background levels' bkg = sep.Background(data, mask=badmask) print 'Residual background level ', bkg.globalback print 'Residual background rms ', bkg.globalrms if (check): print 'Dumping sky...' #dump sky properties back = bkg.back() rms = bkg.rms() hdumain = fits.PrimaryHDU(back, header=header) hdubk = fits.ImageHDU(back) hdurms = fits.ImageHDU(rms) hdulist = fits.HDUList([hdumain, hdubk, hdurms]) hdulist.writeto(output + "/skyprop.fits", clobber=True) #extracting sources at nsigma thresh = nsig * bkg.globalrms segmap = np.zeros((header["NAXIS1"], header["NAXIS2"])) objects, segmap = sep.extract(data, thresh, segmentation_map=True, minarea=minarea, clean=clean, mask=badmask) print "Extracted {} objects... ".format(len(objects)) if (spectra): if not os.path.exists(outspec): os.makedirs(outspec) if ((check) | (spectra)): #create a detection mask alla cubex srcmask = np.zeros((1, data.shape[0], data.shape[1])) nbj = 1 print('Generating spectra...') #loop over detections for obj in objects: #init mask tmpmask = np.zeros((data.shape[0], data.shape[1]), dtype=np.bool) tmpmask3d = np.zeros((1, data.shape[0], data.shape[1]), dtype=np.bool) #fill this mask sep.mask_ellipse(tmpmask, obj['x'], obj['y'], obj['a'], obj['b'], obj['theta'], r=2) tmpmask3d[0, :, :] = tmpmask[:, :] srcmask = srcmask + tmpmask3d * nbj if (spectra): savename = "{}/id{}.fits".format(outspec, nbj) utl.cube2spec(cube, obj['x'], obj['y'], None, write=savename, shape='mask', helio=helio, mask=tmpmask3d, tovac=True) #go to next nbj = nbj + 1 if (check): print 'Dumping source mask...' hdumain = fits.PrimaryHDU(srcmask, header=header) hdubk = fits.ImageHDU(srcmask) hdulist = fits.HDUList([hdumain, hdubk]) hdulist.writeto(output + "/source.fits", clobber=True) print 'Dumping segmentation map' hdumain = fits.PrimaryHDU(segmap, header=header) hdubk = fits.ImageHDU(segmap) hdulist = fits.HDUList([hdumain, hdubk]) hdulist.writeto(output + "/segmap.fits", clobber=True) #write source catalogue print 'Writing catalogue..' cols = fits.ColDefs(objects) tbhdu = fits.BinTableHDU.from_columns(cols) tbhdu.writeto(output + '/catalogue.fits', clobber=True) print 'All done' return objects
def findsources(image,cube,check=False,output='./',spectra=False,helio=0,nsig=2., minarea=10.,regmask=None,clean=True,outspec='Spectra'): """ Take a detection image (collapse of a cube), or median of an RGB, or whatever you want (but aligned to the cube) and run sourceextractor Use SEP utilities http://sep.readthedocs.org/en/stable/ image -> fits file of image to process check -> if true write a bunch of check mages output -> where to dump the output cube -> the cube used to extract spectra spectra -> if True, extract spectra in VACUUM wave!! helio -> pass additional heliocentric correction nsig -> number of skyrms used for source id minarea -> minimum area for extraction regmask -> ds9 region file (image) of regions to be masked before extraction [e.g. edges] clean -> clean souces outspec -> where to store output spectra """ import sep from astropy.io import fits import numpy as np import os from mypython.ifu import muse_utils as utl from mypython.fits import pyregmask as msk #open image img=fits.open(image) header=img[0].header try: #this is ok for narrow band images data=img[1].data except: #white cubex images data=img[0].data data=data.byteswap(True).newbyteorder() #close fits img.close() #create bad pixel mask if(regmask): Mask=msk.PyMask(header["NAXIS1"],header["NAXIS2"],regmask) for ii in range(Mask.nreg): Mask.fillmask(ii) if(ii == 0): badmask=Mask.mask else: badmask+=Mask.mask badmask=1.*badmask else: badmask=np.zeros((header["NAXIS1"],header["NAXIS2"])) if(check): print('Dumping badmask') hdumain = fits.PrimaryHDU(badmask,header=header) hdulist = fits.HDUList([hdumain]) hdulist.writeto(output+"/badmask.fits",clobber=True) #check background level, but do not subtract it print 'Checking background levels' bkg = sep.Background(data,mask=badmask) print 'Residual background level ', bkg.globalback print 'Residual background rms ', bkg.globalrms if(check): print 'Dumping sky...' #dump sky properties back = bkg.back() rms = bkg.rms() hdumain = fits.PrimaryHDU(back,header=header) hdubk = fits.ImageHDU(back) hdurms = fits.ImageHDU(rms) hdulist = fits.HDUList([hdumain,hdubk,hdurms]) hdulist.writeto(output+"/skyprop.fits",clobber=True) #extracting sources at nsigma thresh = nsig * bkg.globalrms segmap = np.zeros((header["NAXIS1"],header["NAXIS2"])) objects,segmap=sep.extract(data,thresh,segmentation_map=True, minarea=minarea,clean=clean,mask=badmask) print "Extracted {} objects... ".format(len(objects)) if(spectra): if not os.path.exists(outspec): os.makedirs(outspec) if((check) | (spectra)): #create a detection mask alla cubex srcmask=np.zeros((1,data.shape[0],data.shape[1])) nbj=1 print('Generating spectra...') #loop over detections for obj in objects: #init mask tmpmask=np.zeros((data.shape[0],data.shape[1]),dtype=np.bool) tmpmask3d=np.zeros((1,data.shape[0],data.shape[1]),dtype=np.bool) #fill this mask sep.mask_ellipse(tmpmask,obj['x'],obj['y'],obj['a'],obj['b'],obj['theta'],r=2) tmpmask3d[0,:,:]=tmpmask[:,:] srcmask=srcmask+tmpmask3d*nbj if(spectra): savename="{}/id{}.fits".format(outspec,nbj) utl.cube2spec(cube,obj['x'],obj['y'],None,write=savename, shape='mask',helio=helio,mask=tmpmask3d,tovac=True) #go to next nbj=nbj+1 if(check): print 'Dumping source mask...' hdumain = fits.PrimaryHDU(srcmask,header=header) hdubk = fits.ImageHDU(srcmask) hdulist = fits.HDUList([hdumain,hdubk]) hdulist.writeto(output+"/source.fits",clobber=True) print 'Dumping segmentation map' hdumain = fits.PrimaryHDU(segmap,header=header) hdubk = fits.ImageHDU(segmap) hdulist = fits.HDUList([hdumain,hdubk]) hdulist.writeto(output+"/segmap.fits",clobber=True) #write source catalogue print 'Writing catalogue..' cols = fits.ColDefs(objects) tbhdu = fits.BinTableHDU.from_columns(cols) tbhdu.writeto(output+'/catalogue.fits',clobber=True) print 'All done' return objects
def findsources(image,cube,check=False,output='./',spectra=False,helio=0.0): """ Take a detection image (collapse of a cube), or median of an RGB, or whatever you want (but aligned to the cube) and run sourceextractor Use SEP utilities http://sep.readthedocs.org/en/v0.4.x/ image -> fits file of image to process check -> if true write a bunch of check mages output -> where to dump the output cube -> the cube used to extract spectra spectra -> if True, extract spectra helio -> pass additional heliocentric correction """ import sep from astropy.io import fits import numpy as np import os from mypython.ifu import muse_utils as utl #open image img=fits.open(image) header=img[0].header data=img[1].data data=data.byteswap(True).newbyteorder() #close fits img.close() #check bacground level, but do not subtract it print 'Checking background levels' bkg = sep.Background(data) print 'Residual background level ', bkg.globalback print 'Residual background rms ', bkg.globalrms if(check): print 'Dumping sky...' #dump sky properties back = bkg.back() rms = bkg.rms() hdumain = fits.PrimaryHDU(back,header=header) hdubk = fits.ImageHDU(back) hdurms = fits.ImageHDU(rms) hdulist = fits.HDUList([hdumain,hdubk,hdurms]) hdulist.writeto(output+"/skyprop.fits",clobber=True) #extracting sources thresh = 2. * bkg.globalrms objects = sep.extract(data,thresh, minarea=10, clean=0.0) print "Extracted {} objects... ".format(len(objects)) if(spectra): if not os.path.exists("Spectra"): os.makedirs("Spectra") if((check) | (spectra)): #create a detection mask srcmask=np.zeros((data.shape[0],data.shape[1])) nbj=1 #loop over detections for obj in objects: #init mask tmpmask=np.zeros((data.shape[0],data.shape[1]),dtype=np.bool) #fill this mask sep.mask_ellipse(tmpmask,obj['x'],obj['y'],obj['a'],obj['b'],obj['theta'],r=2) srcmask=srcmask+tmpmask*nbj if(spectra): savename="Spectra/id{}.fits".format(nbj) utl.cube2spec(cube,obj['x'],obj['y'],None,write=savename, shape='mask',helio=helio,mask=tmpmask) #go to next nbj=nbj+1 if(check): print 'Dumping source mask...' hdumain = fits.PrimaryHDU(srcmask,header=header) hdubk = fits.ImageHDU(srcmask) hdulist = fits.HDUList([hdumain,hdubk]) hdulist.writeto(output+"/source.fits",clobber=True) #write source catalogue print 'Writing catalogue..' cols = fits.ColDefs(objects) tbhdu = fits.BinTableHDU.from_columns(cols) tbhdu.writeto(output+'/catalogue.fits',clobber=True) print 'All done' return objects
def finalcatalogue(fcube, fcube_var, catname, target_z=None, rest_line=None, vel_cut=None, cov_poly=None, working_dir='./', output_dir='./', fcube_median=None, fcube_odd=None, fcube_even=None, fcube_median_var=None, fcube_odd_var=None, fcube_even_var=None, fcube_orig=None, fsource_img=None, marzred=None, SNcut=[7, 5], DeltaEOSNcut=[0.5, 0.5], SNEOcut=[3, 3], fracnpix=None, derived=True, checkimg=True, mask=None, startind=0): """ Function that processes the cubex catalogue to: 1. compute actual SN including covariance term 2. compute velocity offsets relative to target redshift 3. applies cuts on SN, continuum source, and fraction of pixels in segmap 4. generate inspection images and full spectra fcube -> cube used for cubex extraction, assumed to be in working_dir fcube_var -> var cube for cubex extraction catname -> catalogue produced by cubex target_z -> [optional] if set and rest_line set, computes velocity offset from target z assuming every detection is at rest_line rest_line -> [optional] see target_z vel_cut -> [optional] if set, and target_z and rest_line are set as well, it will cut the catalogue to include only detections within abs(target_z-source_z) <= vel_cut cov_poly -> [optional] third order polinomial model for noise set as array with (N2,N1,N0) If set, covariance is modelled working_dir -> where cubex output is generated output_dir -> where the output of this function is generated fcube_median -> [optional] median cube for extraction of QA images fcube_odd -> [optional] odd cube for extraction of QA image and even/odd SN cut fcube_even -> [optional] even cube for extraction of QA image and even/odd SN cut fcube_median_var -> [optional] associated variance fcube_odd_var -> [optional] associated variance fcube_even_var -> [optional] associated variance fcube_orig -> [optional] if set to full cube, used for spectral extraction, absolute path can be used. source_img -> [optional] if set to aperture image (and marzred set), exclude emitters projected against continuum source of known redshift marzred -> [optional] redshift file for continuum sources, ala marz (see above) SNcut -> array of SN cuts on main detection, that defines classes. NB classes are assigned in order of which condition they meet first. DeltaEOSNcut -> array of percentage change in even/odd SN allowed (if even/odd cubes provided) SNEOcut -> array of SN cut for even odd exposure fracnpix -> [optional] if set, cuts object with less than fracnpix in segmap within 5x5x5 region derived -> if true, compute derived quantities (SN etc..) checkimg -> if true generate image cut outs mask --> [optional] If a mask is provided (1=bad, 0=ok) the sources with their center on a masked pixel will be rejected """ #Load cubex catalog catalog = read_cubex_catalog(working_dir + catname) #create space for bunch of keywords [some may not be used] ksig = Column(np.zeros(len(catalog), dtype=float), name='SNR') ksig_odd = Column(np.zeros(len(catalog), dtype=float), name='SNR_odd') ksig_even = Column(np.zeros(len(catalog), dtype=float), name='SNR_even') ksig_med = Column(np.zeros(len(catalog), dtype=float), name='SNR_med') kcov_fac = Column(np.zeros(len(catalog), dtype=float), name='covfac') kconfidence = Column(np.zeros(len(catalog), dtype=float), name='confidence') kveloffset = Column(np.zeros(len(catalog), dtype=float), name='veloffset') kdeltasn = Column(np.zeros(len(catalog), dtype=float), name='EODeltaSN') kfraction = Column(np.zeros(len(catalog), dtype=float), name='BoxFraction') kcontinuum = Column(np.zeros(len(catalog), dtype=bool), name='OverContinuum') catalog.add_columns([ ksig, ksig_odd, ksig_even, ksig_med, kcov_fac, kconfidence, kveloffset, kdeltasn, kfraction, kcontinuum ]) #catalog=catalog[0:100] #Calculate covariance if cov_poly is None: covariance = np.zeros(len(catalog), dtype=float) + 1.0 elif cov_poly.ndim == 1: size = np.sqrt(catalog['area_isoproj']) * 0.2 covariance = np.polyval(cov_poly, size) elif cov_poly.ndim == 2: size = np.sqrt(catalog['area_isoproj']) * 0.2 covariance = np.zeros(len(catalog), dtype=float) for ii in range(len(catalog)): try: okind = np.where( catalog['lambda_geow'][ii] > cov_poly[:, 0])[0][-1] except: okind = 0 covariance[ii] = np.polyval(cov_poly[okind, 2:], size[ii]) #Open cubes, take header from data extension of average cube #We assume all the other products share the same WCS!!! print("Reading cubes") cubehdu = fits.open(working_dir + fcube) try: cube = cubehdu[1].data cubehdr = cubehdu[1].header except: cube = cubehdu[0].data cubehdr = cubehdu[0].header try: cube_var = fits.open(working_dir + fcube_var)[1].data except: cube_var = fits.open(working_dir + fcube_var)[0].data #reconstruct name of cubex segmentation cube and open fsegmap = os.path.basename(fcube).split('.fits')[0] + ".Objects_Id.fits" segmap = fits.open(working_dir + fsegmap)[0].data if (fcube_odd): try: try: cube_odd = fits.open(working_dir + fcube_odd)[1].data except: cube_odd = fits.open(working_dir + fcube_odd)[0].data try: cube_odd_var = fits.open(working_dir + fcube_odd_var)[1].data except: cube_odd_var = fits.open(working_dir + fcube_odd_var)[0].data except: print('Have you set the right odd cube/variance?') exit() else: cube_odd = None cube_odd_var = None if (fcube_even): try: try: cube_even = fits.open(working_dir + fcube_even)[1].data except: cube_even = fits.open(working_dir + fcube_even)[0].data try: cube_even_var = fits.open(working_dir + fcube_even_var)[1].data except: cube_even_var = fits.open(working_dir + fcube_even_var)[0].data except: print('Have you set the right even cube/variance?') exit() else: cube_even = None cube_even_var = None if (fcube_median): try: try: cube_median = fits.open(working_dir + fcube_median)[1].data except: cube_median = fits.open(working_dir + fcube_median)[0].data try: cube_median_var = fits.open(working_dir + fcube_median_var)[1].data except: cube_median_var = fits.open(working_dir + fcube_median_var)[0].data except: print('Have you set the right median cube/variance?') exit() else: cube_median = None cube_median_var = None #open source image if (fsource_img) and (marzred): apermap = fits.open(fsource_img)[0].data try: contzcat = ascii.read(marzred, format='csv', header_start=2) except: print('Have you set the marz redshift file???') exit() else: apermap = None contzcat = None #Computing and adding velocity offset if (target_z is not None): veloff = velocityoffset(catalog['lambda_fluxw'], target_z, rest_line) catalog['veloffset'] = veloff #Trim in velocity if requested if (vel_cut is not None): select = np.abs(catalog['veloffset']) <= vel_cut catalog = catalog[select] #now loop over sources and update the catalogue info if (derived): print( "Calculating independent SNR and additional metrics for {} sources" .format(len(catalog))) catalog = independent_SNR_fast(catalog, covariance, segmap, cube, cube_var, cube_med=cube_median, cube_odd=cube_odd, cube_even=cube_even, var_med=cube_median_var, var_odd=cube_odd_var, var_even=cube_even_var, apermap=apermap, contzcat=contzcat) #Compute EOSN if (fcube_even is not None): rel_diff_halves = np.abs(catalog['SNR_even'] - catalog['SNR_odd']) / np.minimum( catalog['SNR_even'], catalog['SNR_odd']) catalog['EODeltaSN'] = rel_diff_halves #make space for checks if not os.path.isdir(output_dir): os.mkdir(output_dir) #Write full catalogue with SNR and derived quantities print("Writing full catalog to disk") catalog.write(output_dir + catname.split(".cat")[0] + "_all_SNR.fits", format="fits", overwrite=True) #make simplest cut to catalogue to reject unwanted sources select = catalog['SNR'] >= np.amin(SNcut) catalog = catalog[select] #If mask is provided apply it if (mask): hdu = fits.open(mask) try: msk = hdu[0].data except: msk = hdu[1].data masked = msk[np.array(catalog['y_geow'], dtype=int), np.array(catalog['x_geow'], dtype=int)] catalog = catalog[masked == 0] #loop over classes and assign print("Assigning classes") for iclass, iSN in enumerate(SNcut): #case of SN,DeltaSN,fractpix,continnum if ((fcube_even is not None) & (fracnpix is not None) & (fsource_img is not None)): thisclass = ((catalog['SNR'] >= iSN) & (catalog['SNR_odd'] >= SNEOcut[iclass]) & (catalog['SNR_even'] >= SNEOcut[iclass]) & (catalog['EODeltaSN'] <= DeltaEOSNcut[iclass]) & (catalog['OverContinuum'] == False) & (catalog['BoxFraction'] >= fracnpix) & (catalog['confidence'] == 0)) catalog['confidence'][thisclass] = iclass + 1 #case of SN,DeltaSN,fractpix elif ((fcube_even is not None) & (fracnpix is not None)): thisclass = ((catalog['SNR'] > iSN) & (catalog['SNR_odd'] > SNEOcut[iclass]) & (catalog['SNR_even'] > SNEOcut[iclass]) & (catalog['EODeltaSN'] < DeltaEOSNcut[iclass]) & (catalog[BoxFraction] > fracnpix) & (catalog['confidence'] == 0)) catalog['confidence'][thisclass] = iclass + 1 #case of SN,DeltaSN elif (fcube_even is not None): thisclass = ((catalog['SNR'] > iSN) & (catalog['SNR_odd'] > SNEOcut[iclass]) & (catalog['SNR_even'] > SNEOcut[iclass]) & (catalog['EODeltaSN'] < DeltaEOSNcut[iclass]) & (catalog['confidence'] == 0)) catalog['confidence'][thisclass] = iclass + 1 #remaining cases else: thisclass = ((catalog['SNR'] > iSN) & (catalog['confidence'] == 0)) catalog['confidence'][thisclass] = iclass + 1 #Write full catalogue with SNR and derived quantities catalog.write(output_dir + catname.split(".cat")[0] + "_select_SNR.fits", format="fits", overwrite=True) #make space for checks if not os.path.isdir(output_dir + "/objs"): os.mkdir(output_dir + "/objs") if (checkimg): print("Extracting images for {} sources".format(len(catalog))) #loop over detections for ii in range(startind, len(catalog)): #folder for this object objid = catalog['id'][ii] objdir = output_dir + "/objs/id{}/".format(objid) if not os.path.isdir(objdir): os.mkdir(objdir) #make images hdulist = [cube, cube_median, cube_odd, cube_even] outnamelist = ['_mean', '_median', '_half1', '_half2'] make_images_fast(hdulist, segmap, cubehdr, catalog[ii], objid, objdir, outnamelist, padding=50) #Uncomment if you want to use cubex to make the images, (SLOW!) #hcubelist=[fsegmap,fcube,fcube_median,fcube_odd,fcube_even] #namelist=[working_dir+thc for thc in hcubelist] #taglist=['Image_id{}_det','Image_id{}_mean', 'Image_id{}_median', 'Image_id{}_half1', 'Image_id{}_half2'] #make_cubex_images(namelist, namelist[0], objid, objdir,taglist, padding=-1) #taglist=['Pstamp_id{}_det','Pstamp_id{}_mean', 'Pstamp_id{}_median', 'Pstamp_id{}_half1', 'Pstamp_id{}_half2'] #make_cubex_images(namelist, namelist[0], objid, objdir,taglist, padding=50) #Extract spectrum if (fcube_orig is not None): savename = objdir + "/spectrum.fits".format(objid) utl.cube2spec(fcube_orig, 0.0, 0.0, 0.0, shape='mask', helio=0, mask=segmap, twod=True, tovac=True, write=savename, idsource=objid)