示例#1
0
    def redux_basic(self,path='./',nproc=12):
        
        """ 

        This is a basic engine that performs redux of MUSE data using the eso pipeline in a basic 
        form, that is apply all the basic calibs but stop before sky subtraction and coaddition.
        This can be done in a later step, after some post-processing of the cube for enhanced 
        data quality

        path - the top level folder where data reduction has to be performed.
               This includes a folder Raw where the data have been downloaded using the 
               eso shell script. It assumes that basic calibrations are also included, as 
               provided by eso archive. 
        
        nproc - the number of processors to use during the reduction 

        This code is designed to handle a single OB or groups of OBs that share the same sets of calibrations
 
        """
        
        import muse_redux_basic as rdx
        import os

        print 'Starting reduction...'
        
        #First, make sure the various folders exist as needed 
        if not os.path.exists(path+"Raw"):
            print "Cannot find Raw data..."
            exit()
        if not os.path.exists(path+"Script"):
            os.makedirs(path+"Script")
        if not os.path.exists(path+"Proc"):
            os.makedirs(path+"Proc")

        #parse the xml file(s) 
        xml_info=rdx.parse_xml(path=path,nproc=nproc)
        
        #now start reduction. Enter the proc folder
        currdir=os.getcwd()
        os.chdir(path+'Proc')
        print 'Changing dir to proc...'
        
        #First handle the bias
        if not os.path.isfile("MASTER_BIAS.fits"):
            print 'Creating bias...'
            rdx.make_bias(xml_info,nproc=nproc)
            print 'All done with the bias...'
        else:
            print 'Bias already exist'
            
        #Next handle the dark
        if not os.path.isfile("MASTER_DARK.fits"):
            print 'Creating dark...'
            rdx.make_dark(xml_info,nproc=nproc)
            print 'All done with the dark...'
        else:
            print 'Dark already exist'
            
        #Next handle the flats
        if not os.path.isfile("MASTER_FLAT.fits"):
            print 'Creating flat...'
            rdx.make_flat(xml_info,nproc=nproc)
            print 'All done with flat...'
        else:
            print 'Flat already exist'
  
        #Next handle the arcs
        if not os.path.isfile("WAVECAL_RESIDUALS.fits"):
            print 'Processing the arcs...'
            rdx.make_arcs(xml_info,nproc=nproc)
            print 'All done with arcs...'
        else:
            print 'Arcs already processed'
            
        #Next handle the twilight flat
        if not os.path.isfile("DATACUBE_SKYFLAT.fits"):
            print 'Processing the twiflat...'
            rdx.make_twiflat(xml_info,nproc=nproc)
            print 'All done with twiflat...'
        else:
            print 'Twiflat already processed'

        #Next calibrate standard star
        if not os.path.isfile("STD_RED_0001.fits"):
            print 'Processing the standard star...'
            rdx.make_stdstar(xml_info,nproc=nproc)
            print 'All done with standard star...'
        else:
            print 'Standard star already processed'
                
        #Next generate flux table
        if not os.path.isfile("STD_FLUXES_0001.fits"):
            print 'Processing the flux table...'
            rdx.make_stdflux(xml_info,nproc=nproc)
            print 'All done with flux table...'
        else:
            print 'Flux table already processed'
      
        #Next calibrate objects
        if not os.path.isfile("OBJECT_RED_0001.fits"):
            print 'Processing the objects...'
            rdx.make_objects(xml_info,nproc=nproc)
            print 'All done with objects...'
        else:
            print 'Objects already processed'

        #Finally, process science
        print 'Preparing intermediate data cubes...'
        rdx.make_cubes(xml_info,nproc=nproc)
        
        #Done - back to original directory!
        print 'All done with basic redux...'
        os.chdir(currdir)
        
        return xml_info
示例#2
0
    def redux_basic(self, path='./', nproc=12, pipecal=False):
        """ 

        This is a basic engine that performs redux of MUSE data using the eso pipeline in a basic 
        form, that is apply all the basic calibs but stop before sky subtraction and coaddition.
        This can be done in a later step, after some post-processing of the cube for enhanced 
        data quality

        path - the top level folder where data reduction has to be performed.
               This includes a folder Raw where the data have been downloaded using the 
               eso shell script. It assumes that basic calibrations are also included, as 
               provided by eso archive. 
        
        nproc - the number of processors to use during the reduction 

        pipecal - if set to true, static calibrations provided with the pipeline
                  will be used. This pplies to ALL static calibrations

        This code is designed to handle a single OB or groups of OBs that share the same sets of calibrations
 
        """

        import muse_redux_basic as rdx
        import os

        print 'Starting reduction...'

        #First, make sure the various folders exist as needed
        if not os.path.exists(path + "Raw"):
            print "Cannot find Raw data..."
            exit()
        if not os.path.exists(path + "Script"):
            os.makedirs(path + "Script")
        if not os.path.exists(path + "Proc"):
            os.makedirs(path + "Proc")

        #parse the xml file(s)
        xml_info = rdx.parse_xml(path=path, nproc=nproc, pipecal=pipecal)

        #now start reduction. Enter the proc folder
        currdir = os.getcwd()
        os.chdir(path + 'Proc')
        print 'Changing dir to proc...'

        #First handle the bias
        if not os.path.isfile("MASTER_BIAS.fits"):
            print 'Creating bias...'
            rdx.make_bias(xml_info, nproc=nproc)
            print 'All done with the bias...'
        else:
            print 'Bias already exist'

        #Next handle the dark
        if not os.path.isfile("MASTER_DARK.fits"):
            print 'Creating dark...'
            rdx.make_dark(xml_info, nproc=nproc)
            print 'All done with the dark...'
        else:
            print 'Dark already exist'

        #Next handle the flats
        if not os.path.isfile("MASTER_FLAT.fits"):
            print 'Creating flat...'
            rdx.make_flat(xml_info, nproc=nproc)
            print 'All done with flat...'
        else:
            print 'Flat already exist'

        #Next handle the arcs
        if not os.path.isfile("WAVECAL_RESIDUALS.fits"):
            print 'Processing the arcs...'
            rdx.make_arcs(xml_info, nproc=nproc)
            print 'All done with arcs...'
        else:
            print 'Arcs already processed'

        #Next handle the twilight flat
        if not os.path.isfile("DATACUBE_SKYFLAT.fits"):
            print 'Processing the twiflat...'
            rdx.make_twiflat(xml_info, nproc=nproc)
            print 'All done with twiflat...'
        else:
            print 'Twiflat already processed'

        #Next calibrate standard star
        if not os.path.isfile("STD_RED_0001.fits"):
            print 'Processing the standard star...'
            rdx.make_stdstar(xml_info, nproc=nproc)
            print 'All done with standard star...'
        else:
            print 'Standard star already processed'

        #Next generate flux table
        if not os.path.isfile("STD_FLUXES_0001.fits"):
            print 'Processing the flux table...'
            rdx.make_stdflux(xml_info, nproc=nproc)
            print 'All done with flux table...'
        else:
            print 'Flux table already processed'

        #Next calibrate objects
        if not os.path.isfile("OBJECT_RED_0001.fits"):
            print 'Processing the objects...'
            rdx.make_objects(xml_info, nproc=nproc)
            print 'All done with objects...'
        else:
            print 'Objects already processed'

        #Finally, process science
        print('Preparing intermediate data cubes...')
        rdx.make_cubes(xml_info, nproc=nproc)

        #In the end, handle sky offsets if present
        print('Checking if sky offsets are present and preparing sky model')
        rdx.make_skymodel(xml_info, nproc=nproc)

        #Done - back to original directory!
        print('All done with basic redux...')
        os.chdir(currdir)

        return xml_info
示例#3
0
    def cubex_process(self,catalogue,xml_info,path='./',refcube=None,highsn=None):
        
        """ 
        
        Take individual cubes and process them with CubEx to tweak the illumination and perform sky subtraction
        Needs a catalogue of sources in the field, (ra, dec) one per line, so as to realign the WCS to 
        absolute references
        
        CubEx is a privite code by S. Cantalupo and cannot be redistributed. Contact the author directly. 

        path - the top level folder where data reduction has to be performed.
               This includes a folder Proc with the processed datacube  
         
        catalogue is catalogue used for ra dec alignement
        xml_info  is xml_info used in reduction
        refcube  a reference cube used to resample WCS
        highsn  a combined, sky subtracted, high signal to noise cube used for the last pass 
                of sky subtraction 

        """

        #now start reduction. Enter the proc folder
        import os
        import glob
        import subprocess
        import muse_redux_cubex as cx 
        import muse_utils as utl
        import muse_redux_basic as rdx
        import multiprocessing
        import numpy as np

        currdir=os.getcwd()
        os.chdir(path+'Proc')
        print 'Changing dir to proc...'

        ########################################
        # do a first loop of cubex fix and sky #
        ########################################

        #Search how many exposures are there
        scils=glob.glob("OBJECT_RED_0*.fits*")
        nsci=len(scils)

        #do it in parallel on exposures
        workers=[]
        for dd in range(nsci):
            #reconstruct the name 
            pixtab="PIXTABLE_REDUCED_EXP{0:d}.fits".format(dd+1)
            cube="DATACUBE_FINAL_EXP{0:d}.fits".format(dd+1)
            #now launch the task
            p = multiprocessing.Process(target=cx.fixandsky_firstpass,args=(cube,pixtab,True,))
            workers.append(p)
            p.start()
   
        #wait for completion of all of them 
        for p in workers:
            if(p.is_alive()):
                p.join()

        
        ########################################
        # compute offsets and re-project cubes #
        ########################################
       
        #space
        ra_off=[]
        dec_off=[]
        
        if not os.path.isfile('radecoffsets.txt'):
            print 'Compute offsets to reference...'

            for dd in range(nsci):
                #reconstruct the name 
                cube="DATACUBE_FINAL_EXP{0:d}_skysub.fits".format(dd+1)
                #Compute the offsets
                offra,offdec=utl.aligntocat(cube,currdir+'/'+catalogue)
                ra_off.append(offra)
                dec_off.append(offdec)
                
            #write to disk offsets
            np.savetxt('radecoffsets.txt',(ra_off,dec_off))

        else:
            print 'Offsets file exists.. loading'
            off=open('radecoffsets.txt','r')
            ra_off=[float(ii) for ii in off.readline().strip().split()]
            dec_off=[float(ii) for ii in off.readline().strip().split()]
            off.close()
            
        #now reproject cubes with offsets
        if(refcube):
            print 'Using reference cube for wcs...'
            rdx.make_cubes(xml_info,nproc=12,wcsoff=[ra_off,dec_off],refcube=currdir+'/'+refcube) 
        else:
            print 'Computing reference WCS on data themselves...'
            rdx.make_cubes(xml_info,nproc=12,wcsoff=[ra_off,dec_off])  
            
        ######################################################
        # run a second iter of cubex fix and sky             #
        # with realigned images  mostly to find sources for  #
        # masking                                            #
        ###################################################### 
        
        #do it in parallel on exposures
        workers=[]
        for dd in range(nsci):
            #reconstruct the name 
            pixtab="PIXTABLE_REDUCED_EXP{0:d}_off.fits".format(dd+1)
            cube="DATACUBE_FINAL_EXP{0:d}_off.fits".format(dd+1)
            #now launch the task
            p = multiprocessing.Process(target=cx.fixandsky_firstpass,args=(cube,pixtab,True,))
            workers.append(p)
            p.start()
   
        #wait for completion of all of them 
        for p in workers:
            if(p.is_alive()):
                p.join()

        ##########################################
        # run a third iter of cubex fix and sky  #
        # with better masking on sources         #
        ##########################################
        if(highsn):
            #run the loop
            print 'Do third pass of cubex..'
            #do it in parallel on exposures
            workers=[]
            for dd in range(nsci):
                #reconstruct the name 
                pixtab="PIXTABLE_REDUCED_EXP{0:d}_off.fits".format(dd+1)
                cube="DATACUBE_FINAL_EXP{0:d}_off.fits".format(dd+1)
                #now launch the task
                p = multiprocessing.Process(target=cx.fixandsky_secondpass,args=(cube,pixtab,True,currdir+'/'+highsn))
                workers.append(p)
                p.start()
   
            #wait for completion of all of them 
            for p in workers:
                if(p.is_alive()):
                    p.join()  
        else:
            print 'High SN cube not provided.. skip step 3'
      
        #Done - back to original directory!
        print 'All done with fancy redux... Ready to coadd'
        os.chdir(currdir)