Exemplo n.º 1
0
def combine_cubes(listcubes, listmasks, regions=True):
    """
    Combine cubes in mean mode o with median.
    Apply masks as desired.

    cubes    -> a list of cubes to use in the combine
    masks    -> a list of goodpix masks from the pipeline
 
    regions  -> if True, code searches for ds9 region files inside path with same 
                name as pipeline mask (.reg), to mask additional area that one wants 
                to clip

           
    """

    from astropy.io import fits
    import numpy as np
    import scipy
    import os
    import matplotlib.pyplot as plt
    from mypython.fits import pyregmask as msk

    if (os.path.isfile("COMBINED_CUBE_MED.fits")
            & os.path.isfile("COMBINED_CUBE.fits")):
        print("Coadded cubes already exists!")
        return

    #continue with implicit else if passed the checks

    if (regions):
        print("Updating the masks following ds9 regions")

        #loads list
        clistmask = np.loadtxt(listmasks, dtype=np.dtype('a'))

        #redefine new mask
        mask_new = "new_" + listmasks
        llms = open(mask_new, "w")

        #loop over and update with regions
        #if scalar, make it 1 element list
        if (clistmask.shape == ()):
            clistmask = [clistmask]

        for i, cmask in enumerate(clistmask):

            #create region name
            regname_line = (cmask.split(".fits")[0]) + ".reg"
            #reconstruct cubex region name
            rnpath = (cmask.split("MASK")[0])
            rnexp = (cmask.split("_")[1])
            regname_cubex = rnpath + "DATACUBE_FINAL_LINEWCS_" + rnexp + "_fix2_SliceEdgeMask.reg"

            #search if file exist
            if (os.path.isfile(regname_line)):
                regname = regname_line
            elif (os.path.isfile(regname_cubex)):
                regname = regname_cubex
            else:
                regname = None

            if (regname):
                #update the mask
                print("Updating mask using {}".format(regname))

                #open fits
                cfits = fits.open(cmask)

                #init reg mask
                Mask = msk.PyMask(cfits[1].header["NAXIS1"],
                                  cfits[1].header["NAXIS2"], regname)
                for ii in range(Mask.nreg):
                    Mask.fillmask(ii)
                    if (ii == 0):
                        totmask = Mask.mask
                    else:
                        totmask += Mask.mask

                #update the mask
                cfits[1].data = cfits[1].data * 1 * np.logical_not(totmask)
                savename = cmask.split(".fits")[0] + '_wreg.fits'
                cfits.writeto(savename, clobber=True)
                llms.write(savename + '\n')

            else:
                #keep current mask
                llms.write(cmask + '\n')

        #done with new masks
        llms.close()

    else:
        print('Using original masks...')
        mask_new = listmasks

    print("Combining cubes with mean and median")

    #load the relevant lists
    cblis = open(listcubes)
    mklis = open(mask_new)

    allcubes = []
    allmasks = []

    for cc in cblis:
        allcubes.append(fits.open(cc.strip()))

    for mm in mklis:
        allmasks.append(fits.open(mm.strip()))

    cblis.close()
    mklis.close()

    #generate list of cubes
    nexp = len(allcubes)
    print('Coadding {} exposures...'.format(nexp))

    #make space for final grid
    finalcube_mean = np.copy((allcubes[1])[1].data)
    finalvar = np.copy((allcubes[1])[2].data)
    finalcube_median = np.copy((allcubes[1])[1].data)

    #grab info on pixels
    nx = (allcubes[1])[1].header["NAXIS1"]
    ny = (allcubes[1])[1].header["NAXIS2"]
    nw = (allcubes[1])[1].header["NAXIS3"]

    #giant for loop over wave,pix
    print('Working on {} slices...'.format(nw))
    piximage = np.zeros((nexp, ny, nx))
    varimage = np.zeros((nexp, ny, nx))
    mskimage = np.zeros((nexp, ny, nx))
    masknans = np.zeros((ny, nx))

    for ww in range(nw):
        #print (' {} '.format(ww+1),end='')
        #now loop over exposure
        for ee in range(nexp):
            piximage[ee, :] = (allcubes[ee])[1].data[ww, :]
            varimage[ee, :] = (allcubes[ee])[2].data[ww, :]
            #clean nan
            masknans = masknans * 0
            notnans = np.where(np.isfinite(piximage[ee, :]))
            masknans[notnans] = 1
            #1 good pixels at first, then 1 bad pixels
            mskimage[ee, :] = np.logical_not(
                ((allmasks[ee])[1].data) * masknans)

        #construct masked arrays
        pixmasked = np.ma.array(piximage, mask=mskimage)
        varmasked = np.ma.array(varimage, mask=mskimage)

        #make coadds with masking
        finalcube_median[ww, :] = np.ma.median(pixmasked, axis=0)
        finalcube_mean[ww, :] = np.ma.mean(pixmasked, axis=0)
        countmap = np.ma.count(varmasked, axis=0)
        finalvar[ww, :] = np.ma.sum(varmasked, axis=0) / countmap / countmap

    #write
    hdu1 = fits.PrimaryHDU([])
    hdu2 = fits.ImageHDU(finalcube_mean)
    hdu3 = fits.ImageHDU(finalvar)
    hdu2.header = (allcubes[0])[1].header
    hdu3.header = (allcubes[0])[2].header
    hdulist = fits.HDUList([hdu1, hdu2, hdu3])
    hdulist.writeto("COMBINED_CUBE.fits", clobber=True)

    #write
    hdu1 = fits.PrimaryHDU([])
    hdu2 = fits.ImageHDU(finalcube_median)
    hdu3 = fits.ImageHDU(finalvar)
    hdu2.header = (allcubes[0])[1].header
    hdu3.header = (allcubes[0])[2].header
    hdulist = fits.HDUList([hdu1, hdu2, hdu3])
    hdulist.writeto("COMBINED_CUBE_MED.fits", clobber=True)

    #make white images
    print('Creating final white images')
    white_mean = np.zeros((ny, nx))
    white_med = np.zeros((ny, nx))

    for xx in range(nx):
        for yy in range(ny):
            white_mean[yy, xx] = np.sum(finalcube_mean[:, yy, xx]) / nw
            white_med[yy, xx] = np.sum(finalcube_median[:, yy, xx]) / nw

    #save projected image
    hdu1 = fits.PrimaryHDU([])
    hdu2 = fits.ImageHDU(white_mean)
    hdu2.header = (allcubes[0])[1].header
    hdulist = fits.HDUList([hdu1, hdu2])
    hdulist.writeto("COMBINED_IMAGE.fits", clobber=True)

    #save projected image
    hdu1 = fits.PrimaryHDU([])
    hdu2 = fits.ImageHDU(white_med)
    hdu2.header = (allcubes[0])[1].header
    hdulist = fits.HDUList([hdu1, hdu2])
    hdulist.writeto("COMBINED_IMAGE_MED.fits", clobber=True)
Exemplo n.º 2
0
def internalskysub(listob, skymask, deepwhite=None):
    """

    Perform sky-subtraction using pixels within the cube

    listob  -> OBs to loop on
    skymask -> if defined to a ds9 region file (iamge coordinate), 
               compute sky in these regions (excluding sources)
               Otherwise mask sources and use all the pixels in the field.

    """

    import os
    import glob
    from astropy.io import fits
    import numpy as np
    import zap
    import matplotlib.pyplot as plt
    import sep

    #grab top dir
    topdir = os.getcwd()
    #now loop over each folder and make the final illcorrected cubes
    for ob in listob:

        #change dir
        os.chdir(ob + '/Proc/Line/')
        print('Processing {} for sky subtraction correction'.format(ob))

        #Search how many exposures are there
        scils = glob.glob("../Basic/OBJECT_RED_0*.fits*")
        nsci = len(scils)

        #loop on exposures and reduce frame with zeroth order sky subtraction + ZAP
        for exp in range(nsci):

            #do pass on IFUs
            print('Interal sky subtraction of exposure {}'.format(exp + 1))

            #define names
            oldcube = "DATACUBE_FINAL_LINEWCS_EXP{0:d}_ILLCORR_stack.fits".format(
                exp + 1)
            oldimage = "IMAGE_FOV_LINEWCS_EXP{0:d}_ILLCORR_stack.fits".format(
                exp + 1)
            newcube = "DATACUBE_FINAL_LINEWCS_EXP{0:d}_lineskysub.fits".format(
                exp + 1)
            newimage = "IMAGE_FOV_LINEWCS_EXP{0:d}_lineskysub.fits".format(
                exp + 1)
            ifumask_iname = "IMAGE_IFUMASK_LINEWCS_EXP{0:d}.fits".format(exp +
                                                                         1)
            source_mask = "IMAGE_SOURCEMASK_LINEWCS_EXP{0:d}.fits".format(exp +
                                                                          1)
            zapcube = "DATACUBE_FINAL_LINEWCS_EXP{0:d}_zapsky.fits".format(
                exp + 1)
            zapimage = "IMAGE_FOV_LINEWCS_EXP{0:d}_zapsky.fits".format(exp + 1)
            zapsvdout = "ZAPSVDOUT_EXP{0:d}.fits".format(exp + 1)

            if not os.path.isfile(zapcube):

                #open the cube
                cube = fits.open(oldcube)
                #open mask ifu
                ifumask = fits.open(ifumask_iname)

                #if white image provided load it
                if (deepwhite):
                    print("Use source mask image {}".format(deepwhite))
                    whsrc = fits.open(topdir + '/' + deepwhite)
                    whitesource = whsrc[0].data.byteswap().newbyteorder()
                else:
                    #create from cube
                    print("Create source mask image from cube")
                    whitesource = np.nanmedian(cube[1].data, axis=0)

                #now create a source mask
                print('Create a source mask')
                header = cube[1].header
                bkg = sep.Background(whitesource)
                bkg_subtraced_data = whitesource - bkg.back()
                thresh = 3. * bkg.globalrms
                minarea = 20.
                clean = True
                segmap = np.zeros((header["NAXIS2"], header["NAXIS1"]))

                #extract objects
                objects, segmap = sep.extract(bkg_subtraced_data,
                                              thresh,
                                              segmentation_map=True,
                                              minarea=minarea,
                                              clean=clean)

                #plt.imshow(segmap,origin='low')
                #plt.show()

                #plt.imshow(whitesource,origin='low')
                #plt.show()

                #define geometry
                nwave = cube[1].header["NAXIS3"]
                nx = cube[1].header["NAXIS1"]
                ny = cube[1].header["NAXIS2"]

                #make sure pixels are sky sub once and only once
                countsub = np.copy(ifumask[1].data) * 0.

                #if mask is set do a corse median sky subtraction
                if (skymask):
                    print('Constructing sky mask')
                    #for zap, sky region should be 0, and sources >1
                    skybox = np.zeros((ny, nx)) + 1
                    #construct the sky region mask
                    from mypython.fits import pyregmask as pmk
                    mysky = pmk.PyMask(nx,
                                       ny,
                                       "../../../" + skymask,
                                       header=cube[1].header)
                    for ii in range(mysky.nreg):
                        mysky.fillmask(ii)
                        usepix = np.where(mysky.mask > 0)
                        skybox[usepix] = 0

                    #plt.imshow(skybox,origin='low')
                    #plt.show()
                    #plt.imshow(segmap,origin='low')
                    #plt.show()
                    #plt.imshow(ifumask[1].data,origin='low')
                    #plt.show()
                    #exit()

                    #now do median sky subtraction
                    #loop over wavelength
                    for ww in range(nwave):
                        #extract sky slice
                        skyimg = cube[1].data[ww, :, :]
                        #grab pixels with no source and in mask region
                        #avoid edges not flagged by IFU mask
                        pixels = np.where((skybox < 1) & (segmap < 1)
                                          & (ifumask[1].data > 0))
                        #compute sky in good regions
                        medsky = np.nanmedian(skyimg[pixels])
                        #subtract from all  pixels
                        cube[1].data[ww, :, :] = skyimg - medsky

                else:
                    #otherwise do coarse sky IFU by IFU
                    #loop over ifu
                    for iff in range(24):
                        thisifu = (iff + 1) * 100.
                        nextifu = (iff + 2) * 100. + 1
                        #grab pixels in ifu without sources
                        pixels=np.where((ifumask[1].data >= thisifu) & \
                                            (ifumask[1].data < nextifu)\
                                            & (segmap < 1) )
                        pixels_ifu=np.where((ifumask[1].data >= thisifu) \
                                                & (ifumask[1].data < nextifu)\
                                                & (countsub < 1))
                        #update used pixels
                        countsub[pixels_ifu] = 1

                        #loop over wavelength
                        for ww in range(nwave):
                            skyimg = cube[1].data[ww, :, :]
                            #compute sky in good regions
                            medsky = np.nanmedian(skyimg[pixels])
                            #subtract from all IFU pixels
                            skyimg[pixels_ifu] = skyimg[pixels_ifu] - medsky
                            cube[1].data[ww, :, :] = skyimg

                #write final cube
                cube.writeto(newcube, clobber=True)

                #create white image
                print('Creating final white image')
                white_new = np.zeros((ny, nx))
                for xx in range(nx):
                    for yy in range(ny):
                        white_new[yy, xx] = np.nansum(cube[1].data[:, yy,
                                                                   xx]) / nwave

                #save projected image
                hdu1 = fits.PrimaryHDU([])
                hdu2 = fits.ImageHDU(white_new)
                hdu2.header = cube[1].header
                hdulist = fits.HDUList([hdu1, hdu2])
                hdulist.writeto(newimage, clobber=True)

                #save segmap
                #make it redundant to be sure ZAP read right extension
                hdu1 = fits.PrimaryHDU(segmap)
                #hdu1.header=header
                hdu2 = fits.ImageHDU(segmap)
                #hdu2.header=header
                hdulist = fits.HDUList([hdu1, hdu2])
                hdulist.writeto(source_mask, clobber=True)

                print('Running ZAP on exposure {}'.format(exp + 1))

                #deal with masks
                if (skymask):
                    #combine sky mask with source mask
                    #make it redundant to be sure ZAP read right extension
                    tmpzapmask = segmap + skybox
                    hdu1 = fits.PrimaryHDU(tmpzapmask)
                    #hdu1.header=header
                    hdu2 = fits.ImageHDU(tmpzapmask)
                    #hdu2.header=header
                    hdulist = fits.HDUList([hdu1, hdu2])
                    hdulist.writeto("ZAP_" + source_mask, clobber=True)
                    zapmask = "ZAP_" + source_mask
                else:
                    zapmask = source_mask

                #clean old if exists
                try:
                    os.remove(zapsvdout)
                except:
                    pass
                #run new - handle change in keywords from v1 to v2
                try:
                    zap.process(newcube,
                                outcubefits=zapcube,
                                clean=True,
                                svdoutputfits=zapsvdout,
                                mask=zapmask)
                except:
                    zap.process(newcube,
                                outcubefits=zapcube,
                                clean=True,
                                mask=zapmask)

                #create white image from zap cube
                cube = fits.open(zapcube)
                print('Creating final white image from ZAP')
                white_new = np.zeros((ny, nx))
                for xx in range(nx):
                    for yy in range(ny):
                        white_new[yy, xx] = np.nansum(cube[1].data[:, yy,
                                                                   xx]) / nwave

                #save projected image
                hdu1 = fits.PrimaryHDU([])
                hdu2 = fits.ImageHDU(white_new)
                hdu2.header = cube[1].header
                hdulist = fits.HDUList([hdu1, hdu2])
                hdulist.writeto(zapimage, clobber=True)

            else:
                print("ZAP cube exist alread for exposure {}... skip!".format(
                    exp + 1))

        #back to top for next OB
        os.chdir(topdir)
Exemplo n.º 3
0
def findsources(image,
                cube,
                varima=None,
                check=False,
                output='./',
                spectra=False,
                helio=0,
                nsig=2.,
                minarea=10.,
                deblend_cont=0.0001,
                regmask=None,
                invregmask=False,
                fitsmask=None,
                clean=True,
                outspec='Spectra',
                marz=False,
                rphot=False,
                detphot=False,
                sname='MUSE'):
    """      

    Take a detection image (collapse of a cube), or median 
    of an RGB, or whatever you want (but aligned to the cube)
    and run sourceextractor 

   
    Use SEP utilities http://sep.readthedocs.org/en/stable/

    image   -> fits file of image to process
    cube    -> the cube used to extract spectra
    varima  -> the noise image corresponding to the science image (std), optional
    check   -> if true write a bunch of check mages
    output  -> where to dump the output
    spectra -> if True, extract spectra in VACUUM wave!!
    helio   -> pass additional heliocentric correction
    nsig    -> number of skyrms used for source id 
    minarea -> minimum area for extraction 
    regmask -> ds9 region file (image) of regions to be masked before extraction [e.g. edges]
    invregmask -> if True invert the mask (region defines good area)
    fitsmask -> Fits file with good mask, overrides regmask
    clean   -> clean souces 
    outspec -> where to store output spectra 
    marz    -> write spectra in also marz format (spectra needs to be true). 
               If set to numerical value, this is used as an r-band magnitude limit.
    detphot -> perform aperture phtometry on the detection image and add magnitues to the catalogue            
    rphot   -> perform r-band aperture photometry and add r-band magnitudes to the catalogue
    sname   -> prefix for the source names. Default = MUSE

    """

    import sep
    from astropy.io import fits
    from astropy import wcs
    from astropy import coordinates
    from astropy import units as u
    from astropy import table
    import numpy as np
    import os
    from mypython.ifu import muse_utils as utl
    from mypython.fits import pyregmask as msk
    from shutil import copyfile
    import glob

    #open image
    img = fits.open(image)
    header = img[0].header
    imgwcs = wcs.WCS(header)
    try:
        #this is ok for narrow band images
        data = img[1].data
    except:
        #white cubex images
        data = img[0].data

    data = data.byteswap(True).newbyteorder()
    #grab effective dimension
    nex, ney = data.shape
    #close fits
    img.close()

    if (varima):
        var = fits.open(varima)
        try:
            datavar = var[1].data
        except:
            datavar = var[0].data

        datavar = datavar.byteswap(True).newbyteorder()
        #grab effective dimension
        stdx, stdy = datavar.shape
        #close fits
        var.close()

        if (stdx != nex) or (stdy != ney):
            print(
                "The noise image does not have the same dimensions as the science image"
            )
            return -1

    #create bad pixel mask
    if (fitsmask):
        print("Using FITS image for badmask")
        hdumsk = fits.open(fitsmask)
        try:
            badmask = hdumsk[1].data
        except:
            badmask = hdumsk[0].data
        badmask = badmask.byteswap(True).newbyteorder()
    elif (regmask):
        print("Using region file for badmask")
        Mask = msk.PyMask(ney, nex, regmask, header=img[0].header)
        for ii in range(Mask.nreg):
            Mask.fillmask(ii)
            if (ii == 0):
                badmask = Mask.mask
            else:
                badmask += Mask.mask
            badmask = 1. * badmask
    else:
        badmask = np.zeros((nex, ney))

    if (regmask) and (invregmask) and not (fitsmask):
        badmask = 1 - badmask

    if (check):
        print('Dumping badmask')
        hdumain = fits.PrimaryHDU(badmask, header=header)
        hdulist = fits.HDUList([hdumain])
        hdulist.writeto(output + "/badmask.fits", overwrite=True)

    #check background level, but do not subtract it
    print('Checking background levels')
    bkg = sep.Background(data, mask=badmask)
    print('Residual background level ', bkg.globalback)
    print('Residual background rms ', bkg.globalrms)

    if (check):
        print('Dumping sky...')
        #dump sky properties
        back = bkg.back()
        rms = bkg.rms()
        hdumain = fits.PrimaryHDU(back, header=header)
        hdubk = fits.ImageHDU(back)
        hdurms = fits.ImageHDU(rms)
        hdulist = fits.HDUList([hdumain, hdubk, hdurms])
        hdulist.writeto(output + "/skyprop.fits", overwrite=True)

    if (varima):
        #Use nsigma threshold and a pixel by pixel effective treshold based on variance map
        thresh = nsig
        objects, segmap = sep.extract(data,
                                      thresh,
                                      var=datavar,
                                      segmentation_map=True,
                                      minarea=minarea,
                                      clean=clean,
                                      mask=badmask,
                                      deblend_cont=deblend_cont,
                                      deblend_nthresh=32)
    else:
        #extracting sources at nsigma, use constant threshold
        thresh = nsig * bkg.globalrms
        objects, segmap = sep.extract(data,
                                      thresh,
                                      segmentation_map=True,
                                      minarea=minarea,
                                      clean=clean,
                                      mask=badmask,
                                      deblend_cont=deblend_cont,
                                      deblend_nthresh=32)

    print("Extracted {} objects... ".format(len(objects)))
    ids = np.arange(len(objects)) + 1

    if (spectra):
        if not os.path.exists(outspec):
            os.makedirs(outspec)

    if ((check) | (spectra)):
        #create a detection mask a'la cubex
        srcmask = np.zeros((data.shape[0], data.shape[1]))
        print('Generating spectra...')
        #loop over detections
        for nbj in ids:
            obj = objects[nbj - 1]
            #init mask
            tmpmask = np.zeros((data.shape[0], data.shape[1]), dtype=np.bool)
            #fill this mask
            sep.mask_ellipse(tmpmask,
                             obj['x'],
                             obj['y'],
                             obj['a'],
                             obj['b'],
                             obj['theta'],
                             r=2)
            #add in global mask
            srcmask = srcmask + tmpmask * nbj
            #verify conflicts, resolve using segmentation map
            if np.nanmax(srcmask) > nbj:
                blended = (srcmask > nbj)
                srcmask[blended] = segmap[blended]

        #Now loop again and extract spectra if required
        if (spectra):
            #Verify that the source mask has the same number of objects as the object list
            if not len(np.unique(srcmask[srcmask > 0])) == len(objects):
                print(
                    "Mismatch between number of objects and number of spectra to extract."
                )
            for nbj in ids:
                savename = "{}/id{}.fits".format(outspec, nbj)
                tmpmask3d = np.zeros((1, data.shape[0], data.shape[1]))
                tmpmask3d[0, :, :] = srcmask[:, :]
                tmpmask3d[tmpmask3d != nbj] = 0
                tmpmask3d[tmpmask3d > 0] = 1
                tmpmask3d = np.array(tmpmask3d, dtype=np.bool)
                utl.cube2spec(cube,
                              None,
                              None,
                              None,
                              write=savename,
                              shape='mask',
                              helio=helio,
                              mask=tmpmask3d,
                              tovac=True)

    if (check):
        print('Dumping source mask...')
        hdumain = fits.PrimaryHDU(srcmask, header=header)
        hdubk = fits.ImageHDU(srcmask)
        hdulist = fits.HDUList([hdumain, hdubk])
        hdulist.writeto(output + "/source.fits", overwrite=True)

        print('Dumping segmentation map')
        hdumain = fits.PrimaryHDU(segmap, header=header)
        hdubk = fits.ImageHDU(segmap)
        hdulist = fits.HDUList([hdumain, hdubk])
        hdulist.writeto(output + "/segmap.fits", overwrite=True)

    #Generate source names using coordinates and name prefix
    ra, dec = imgwcs.wcs_pix2world(objects['x'], objects['y'], 0)
    coord = coordinates.FK5(ra * u.degree, dec * u.degree)
    rastr = coord.ra.to_string(u.hour, precision=2, sep='', pad=True)
    decstr = coord.dec.to_string(u.degree,
                                 precision=1,
                                 sep='',
                                 alwayssign=True,
                                 pad=True)
    name = [
        sname + 'J{0}{1}'.format(rastr[k], decstr[k])
        for k in range(len(rastr))
    ]

    #Generate a column to be used to flag the sources to be used in the analysis
    #True for all sources at this point
    use_source = np.ones_like(name, dtype=bool)

    #write source catalogue
    print('Writing catalogue..')
    tab = table.Table(objects)
    tab.add_column(table.Column(dec), 0, name='DEC')
    tab.add_column(table.Column(ra), 0, name='RA')
    tab.add_column(table.Column(name), 0, name='name')
    tab.add_column(table.Column(ids), 0, name='ID')
    tab.add_column(table.Column(use_source), name='use_source')
    tab.write(output + '/catalogue.fits', overwrite=True)

    if (detphot):
        #Run source photometry on the extraction image
        whiteimg, whitevar, whitewcsimg = utl.cube2img(cube,
                                                       write=output +
                                                       '/Image_white.fits')
        phot_det = sourcephot(output + '/catalogue.fits',
                              output + '/Image_white.fits',
                              output + '/segmap.fits',
                              image,
                              zpab=28.35665)
        phot_det.add_column(table.Column(name), 1, name='name')
        tbhdu = fits.open(output + '/catalogue.fits')
        tbhdu.append(fits.BinTableHDU(phot_det))
        tbhdu[-1].header['PHOTBAND'] = 'Detection'
        tbhdu.writeto(output + '/catalogue.fits', overwrite=True)

    #rband photometry
    if (rphot):
        rimg, rvar, rwcsimg = utl.cube2img(cube,
                                           filt=129,
                                           write=output + '/Image_R.fits')
        phot_r = sourcephot(output + '/catalogue.fits',
                            output + '/Image_R.fits', output + '/segmap.fits',
                            image)
        phot_r.add_column(table.Column(name), 1, name='name')

        tbhdu = fits.open(output + '/catalogue.fits')
        tbhdu.append(fits.BinTableHDU(phot_r))
        tbhdu[-1].header['PHOTBAND'] = 'SDSS_r'
        tbhdu.writeto(output + '/catalogue.fits', overwrite=True)

    if ((marz) & (spectra)):
        #if marz is True but no magnitude limit set, create marz file for whole catalogue
        if marz > 10 and (rphot):
            #Requires testing
            hdu = fits.open(output + '/catalogue.fits')
            hdu[1].data['use_source'][hdu[2].data['MAGAP'] > marz] = False
            hdu.writeto(output + '/catalogue.fits', overwrite=True)

            marz_file(output + '/catalogue.fits', outspec, output, r_lim=marz)
        else:
            marz_file(output + '/catalogue.fits', outspec, output)

    print('All done')
    return objects
Exemplo n.º 4
0
def findsources(image,cube,check=False,output='.',spectra=False,helio=0,nsig=2.,
                minarea=10.,regmask=None,clean=True,outspec='Spectra',marz=False, 
		rphot=False, sname='MUSE'):

    """      

    Take a detection image (collapse of a cube), or median 
    of an RGB, or whatever you want (but aligned to the cube)
    and run sourceextractor 

   
    Use SEP utilities http://sep.readthedocs.org/en/stable/

    image   -> fits file of image to process
    check   -> if true write a bunch of check mages
    output  -> where to dump the output
    cube    -> the cube used to extract spectra
    spectra -> if True, extract spectra in VACUUM wave!!
    helio   -> pass additional heliocentric correction
    nsig    -> number of skyrms used for source id 
    minarea -> minimum area for extraction 
    regmask -> ds9 region file (image) of regions to be masked before extraction [e.g. edges]
    clean   -> clean souces 
    outspec -> where to store output spectra 
    marz    -> write spectra in also marz format (spectra needs to be true). 
               If set to numerical value, this is used as an r-band magnitude limit.
    rphot   -> perform r-band aperture photometry and add r-band magnitudes to the catalogue
    sname   -> prefix for the source names. Default = MUSE

    """

    import sep
    from astropy.io import fits
    from astropy import wcs
    from astropy import coordinates
    from astropy import units as u
    from astropy import table
    import numpy as np
    import os
    try:
        from mypython.ifu import muse_utils as utl
        from mypython.fits import pyregmask as msk
    except ImportError:
        from mypython import ifu
        from ifu import muse_utils as utl
        from mypython import fits
        from fits import pyregmask as msk
        from astropy.io import fits
    from shutil import copyfile
    import glob

    #open image
    img=fits.open(image)
    try:
        header=img[1].header
    except:
        header= img[0].header
    imgwcs = wcs.WCS(header)
    try:
        #this is ok for narrow band images 
        data=img[1].data
    except:
        #white cubex images
        data=img[0].data
    data=data.byteswap(True).newbyteorder()
    #grab effective dimension
    nex,ney=data.shape
    #close fits
    img.close()

    #create bad pixel mask
    if(regmask):
        Mask=msk.PyMask(ney,nex,regmask,header=img[0].header)
        for ii in range(Mask.nreg):
            Mask.fillmask(ii)
            if(ii == 0):
                badmask=Mask.mask
            else:
                badmask+=Mask.mask
            badmask=1.*badmask
    else:
        badmask=np.zeros((nex,ney))

    if(check):
        print('Dumping badmask')
        hdumain  = fits.PrimaryHDU(badmask,header=header)
        hdulist = fits.HDUList([hdumain])
        hdulist.writeto(output+"/badmask.fits",overwrite=True)
    

    #check background level, but do not subtract it
    print('Checking background levels')
    bkg = sep.Background(data,mask=badmask)    
    print('Residual background level ', bkg.globalback)
    print('Residual background rms ', bkg.globalrms)

    if(check):
        print('Dumping sky...')
        #dump sky properties
        back = bkg.back() 
        rms = bkg.rms()  
        hdumain  = fits.PrimaryHDU(back,header=header)
        hdubk  = fits.ImageHDU(back)
        hdurms  = fits.ImageHDU(rms)
        hdulist = fits.HDUList([hdumain,hdubk,hdurms])
        hdulist.writeto(output+"/skyprop.fits",overwrite=True)

    #extracting sources at nsigma
    thresh = nsig * bkg.globalrms
    # segmap = np.zeros((header["NAXIS1"],header["NAXIS2"]))
    objects, segmap=sep.extract(data,thresh,segmentation_map=True,
                               minarea=minarea,clean=clean,mask=badmask,deblend_cont=0.0001)
    print("Extracted {} objects... ".format(len(objects)))
    
    
    if(spectra):
        if not os.path.exists(outspec):
            os.makedirs(outspec)

    if((check) | (spectra)):
        #create a detection mask alla cubex
        srcmask=np.zeros((1,data.shape[0],data.shape[1]))
        nbj=1
        print('Generating spectra...')
        #loop over detections
        for obj in objects:
            #init mask
            tmpmask=np.zeros((data.shape[0],data.shape[1]),dtype=np.bool)
            tmpmask3d=np.zeros((1,data.shape[0],data.shape[1]),dtype=np.bool)
            #fill this mask
            sep.mask_ellipse(tmpmask,obj['x'],obj['y'],obj['a'],obj['b'],obj['theta'],r=2)
            tmpmask3d[0,:,:]=tmpmask[:,:]
            srcmask=srcmask+tmpmask3d*nbj
            if(spectra):
                savename="{}/id{}.fits".format(outspec,nbj)
                if not os.path.exists(savename):
                    utl.cube2spec(cube,obj['x'],obj['y'],None,write=savename,
                                shape='mask',helio=helio,mask=tmpmask3d,tovac=True)
                else:
                    print("{} already exists. Skipping it...".format(savename))
            #go to next
            nbj=nbj+1

    if(check):
        print('Dumping source mask...')
        hdumain  = fits.PrimaryHDU(srcmask,header=header)
        hdubk  = fits.ImageHDU(srcmask)
        hdulist = fits.HDUList([hdumain,hdubk])
        hdulist.writeto(output+"/source.fits",overwrite=True)
        
        print('Dumping segmentation map')
        hdumain  = fits.PrimaryHDU(segmap,header=header)
        hdubk  = fits.ImageHDU(segmap)
        hdulist = fits.HDUList([hdumain,hdubk])
        hdulist.writeto(output+"/segmap.fits",overwrite=True)
    
    #Generate source names using coordinates and name prefix
    ra, dec = imgwcs.wcs_pix2world(objects['x'], objects['y'],0)
    coord = coordinates.FK5(ra*u.degree, dec*u.degree)
    rastr  = coord.ra.to_string(u.hour, precision=2, sep='')
    decstr = coord.dec.to_string(u.degree, precision=1, sep='', alwayssign=True)
    name = [sname+'J{0}{1}'.format(rastr[k], decstr[k]) for k in range(len(rastr))]
    ids  = np.arange(len(name))

    #write source catalogue
    print('Writing catalogue..')
    tab = table.Table(objects)
    tab.add_column(table.Column(name),0,name='name')
    tab.add_column(table.Column(ids),0,name='ID')
    tab.write(output+'/catalogue.fits',overwrite=True)
    
    #cols = fits.ColDefs(objects)
    #cols.add_col(fits.Column(name, format='A'))
    #tbhdu = fits.BinTableHDU.from_columns(cols)
    #tbhdu.writeto(output+'/catalogue.fits',clobber=True)
    
    #rband photometry
    if (rphot):
        if not os.path.exists(output+'/Image_R.fits'):
            rimg, rvar, rwcsimg = utl.cube2img(cube, filt=129, write=output+'/Image_R.fits')
        phot_r = sourcephot(output+'/catalogue.fits', output+'/Image_R.fits', output+'/segmap.fits', image)
        phot_r.add_column(table.Column(name),1,name='name')

    tbhdu = fits.open(output+'/catalogue.fits')[1]
    tbhdu2 = fits.BinTableHDU(phot_r)
    hdulist = fits.HDUList([fits.PrimaryHDU(), tbhdu, tbhdu2])
    hdulist.writeto(output+'/catalogue.fits',overwrite=True)	

    if((marz) & (spectra)):
        #if marz is True but no magnitude limit set, create marz file for whole catalogue
        if marz==True:
            marz_file(image, output+'/catalogue.fits', outspec, output)
        else:
            #create folder and catalogue with just sources brighter than mag limit
            if os.path.exists(output + '/spectra_r' + str(marz)):
	            files = glob.glob(output +  '/spectra_r' + 
                          str(marz) +'/*')
	            for f in files:
       		        os.remove(f)
            else:
	            os.mkdir(output +  '/spectra_r' + str(marz))
            
            mag = phot_r['MAGSEG']

            #add in x y pixels from original catalogue
            x, y = tbhdu.data['x'], tbhdu.data['y']
            phot_r['x'], phot_r['y'] = x, y

            #add in ra,dec 
            img = fits.open(image)
            mywcs = wcs.WCS(img[0].header)
            ra, dec = mywcs.all_pix2world(x,y,0)
            phot_r['RA'] = ra
            phot_r['dec'] = dec

            for i in range(len(mag)):
	            if mag[i] < marz:
		            copyfile((output + '/spectra/id' + str(i+1) 
                              + '.fits'), (output + '/spectra_r' + 
                              str(marz) + '/id' + str(i+1) + '.fits'))

            #Write photometry catalog with objects below magnitude limit excluded
            phot_r.remove_rows(phot_r['MAGSEG'] > marz)
            catalogue_lim_name = (output + '/catalogue_r' + 
                                  str(marz) +'.fits')
            if os.path.exists(catalogue_lim_name):
                os.remove(catalogue_lim_name)
            phot_r.write(catalogue_lim_name)

            outspec = output + '/spectra_r' + str(marz)
            marz_file(image, output+'/catalogue_r' + str(marz) +'.fits', outspec, output, r_lim=marz)

    
    print('All done')
    return objects
Exemplo n.º 5
0
def combine_cubes(cubes, masks, regions=True, final=False):
    """
    Combine a bunch of cubes using masks with CubeCombine
        
    cubes    -> a list of cubes to use in the combine
    masks    -> a list of goodpix masks from the pipeline
    regions  -> if True, code searches for ds9 region files inside path with same name as 
                pipeline mask (.reg), to mask additional area that one wants to clip
    final    -> is True, append final tag to name and prepare median cubes

    """
    import subprocess
    import os
    import numpy as np
    from astropy.io import fits
    from mypython.fits import pyregmask as msk

    #define some names for the cubes
    if (final):
        cname = "COMBINED_CUBE_FINAL.fits"
        iname = "COMBINED_IMAGE_FINAL.fits"
        cmed = "COMBINED_CUBE_MED_FINAL.fits"
        imed = "COMBINED_IMAGE_MED_FINAL.fits"
    else:
        cname = "COMBINED_CUBE.fits"
        iname = "COMBINED_IMAGE.fits"
        cmed = "COMBINED_CUBE_MED.fits"
        imed = "COMBINED_IMAGE_MED.fits"

    if (os.path.isfile(cname)):
        print('Cube {} already exists... skip!'.format(cname))
    else:
        print('Creating combined cube {}'.format(cname))

        if (regions):
            print "Updating the masks"

            #loads list
            listmask = np.loadtxt(masks, dtype=np.dtype('a'))

            #redefine new mask
            mask_new = "new_" + masks
            llms = open(mask_new, "w")

            #loop over and update with regions
            for i, cmask in enumerate(listmask):

                #create region name
                regname = (cmask.split(".fits")[0]) + ".reg"

                #search if file exist
                if (os.path.isfile(regname)):

                    #update the mask
                    print("Updating mask for {}".format(regname))

                    #open fits
                    cfits = fits.open(cmask)

                    #init reg mask
                    Mask = msk.PyMask(cfits[0].header["NAXIS1"],
                                      cfits[0].header["NAXIS2"], regname)
                    for ii in range(Mask.nreg):
                        Mask.fillmask(ii)
                        if (ii == 0):
                            totmask = Mask.mask
                        else:
                            totmask += Mask.mask

                    #update the mask
                    cfits[0].data = cfits[0].data * 1 * np.logical_not(totmask)
                    savename = cmask.split(".fits")[0] + '_wreg.fits'
                    cfits.writeto(savename, clobber=True)
                    llms.write(savename + '\n')

                else:
                    #keep current mask
                    llms.write(cmask + '\n')

            #done with new masks
            llms.close()

        else:
            print 'Using original masks'
            mask_new = masks

        #now run combine
        print 'Combine the cube...'

        #make mean cube - write this as script that can be ran indepedently
        if (final):
            scriptname = 'runcombine_final.sh'
        else:
            scriptname = 'runcombine.sh'

        scr = open(scriptname, 'w')
        scr.write("export OMP_NUM_THREADS=1\n")
        scr.write("CubeCombine -list " + cubes + " -out " + cname +
                  " -masklist " + mask_new + "\n")
        scr.write("Cube2Im -cube " + cname + " -out " + iname + "\n")
        scr.write("CubeCombine -list " + cubes + " -out " + cmed +
                  " -masklist " + mask_new + " -comb median\n")
        scr.write("Cube2Im -cube " + cmed + " -out " + imed)
        scr.close()
        subprocess.call(["sh", scriptname])
Exemplo n.º 6
0
def fixandsky_secondpass(cube, pixtab, noclobber, highsn=None, skymask=None):
    """ 
 
    Similar to first pass, but operates on cubes that have been realigned and uses masks 
    as appropriate

    If highsn cube is provided, use it to mask the sources and for skysubtraction
 
    """

    import os
    import subprocess
    import numpy as np
    from astropy.io import fits
    from mypython.fits import pyregmask as pmk

    if (highsn):
        #prepare final names
        fixed = cube.split('.fits')[0] + "_fixhsn.fits"
        skysub = cube.split('.fits')[0] + "_skysubhsn.fits"
        white = cube.split('.fits')[0] + "_whitehsn.fits"
        sharpmsk = cube.split('.fits')[0] + "_sharpmasksn.fits"
    else:
        #prepare intermediate names
        fixed = cube.split('.fits')[0] + "_fix2.fits"
        skysub = cube.split('.fits')[0] + "_skysub2.fits"
        white = cube.split('.fits')[0] + "_white2.fits"
        sharpmsk = cube.split('.fits')[0] + "_sharpmask2.fits"

    #assign names for source mask
    mask_source = cube.split('.fits')[0] + "_white.Objects_Id.fits"
    white_source = cube.split('.fits')[0] + "_white.fits"

    #now fix the cube using masks
    if ((os.path.isfile(fixed)) & (noclobber)):
        print "Cube {0} already fixed".format(cube)
    else:

        print 'Create source mask ', white_source
        #if high cube provide, overwrite white image
        if (highsn):
            print 'Using high SN cube...'
            subprocess.call(["Cube2Im", "-cube", highsn, "-out", white_source])
            subprocess.call([
                "CubEx", white_source, '-MultiExt', '.false.', '-SN_Threshold',
                '3', '-RescaleVar', '.true.'
            ])
        else:
            print 'Using white image from previous loop'
            #create source mask
            subprocess.call([
                "CubEx", white_source, '-MultiExt', '.false.', '-SN_Threshold',
                '5', '-RescaleVar', '.true.'
            ])

        print 'Cubefix ', cube
        subprocess.call([
            "CubeFix", "-cube", cube, "-pixtable", pixtab, "-out", fixed,
            "-sourcemask", mask_source
        ])

        #At this step, check out cubeAdd2Mask if want to fix edges or weird ifus/slices

    #if told to mask sky do it.. otherwise leave image empty
    cb = fits.open(cube)
    nx = cb[1].header['NAXIS1']
    ny = cb[1].header['NAXIS2']
    sharpmask = np.zeros((ny, nx))
    if (skymask):
        #construct the sky region mask
        mysky = pmk.PyMask(nx, ny, "../../" + skymask, header=cb[1].header)
        for ii in range(mysky.nreg):
            mysky.fillmask(ii)
            sharpmask = sharpmask + mysky.mask
    cb.close()

    #inject src mask in sharpmask
    srcmk = fits.open(mask_source)
    sharpmask = sharpmask + srcmk[0].data

    #write mask
    hdu = fits.PrimaryHDU(sharpmask)
    hdulist = fits.HDUList([hdu])
    hdulist.writeto(sharpmsk, clobber=True)

    #now run cube skysub
    if ((os.path.isfile(skysub)) & (noclobber)):
        print "Cube {0} already skysub".format(fixed)
    else:
        print 'Sky sub ', fixed
        if (highsn):
            #now few more options to control sky sub
            subprocess.call([
                "CubeSharp", "-cube", fixed, "-out", skysub, "-sourcemask",
                sharpmsk, "-hsncube", highsn, "-lcheck", ".false."
            ])
        else:
            subprocess.call([
                "CubeSharp", "-cube", fixed, "-out", skysub, "-sourcemask",
                sharpmsk
            ])

    #create a white image
    if ((os.path.isfile(white)) & (noclobber)):
        print "White image for cube {0} already exists".format(skysub)
    else:
        print 'Create white image for ', skysub
        subprocess.call(["Cube2Im", "-cube", skysub, "-out", white])
Exemplo n.º 7
0
def findsources(image,
                cube,
                check=False,
                output='./',
                spectra=False,
                helio=0,
                nsig=2.,
                minarea=10.,
                regmask=None,
                clean=True,
                outspec='Spectra'):
    """      

    Take a detection image (collapse of a cube), or median 
    of an RGB, or whatever you want (but aligned to the cube)
    and run sourceextractor 

   
    Use SEP utilities http://sep.readthedocs.org/en/stable/

    image   -> fits file of image to process
    check   -> if true write a bunch of check mages
    output  -> where to dump the output
    cube    -> the cube used to extract spectra
    spectra -> if True, extract spectra in VACUUM wave!!
    helio   -> pass additional heliocentric correction
    nsig    -> number of skyrms used for source id 
    minarea -> minimum area for extraction 
    regmask -> ds9 region file (image) of regions to be masked before extraction [e.g. edges]
    clean   -> clean souces 
    outspec -> where to store output spectra 

    """

    import sep
    from astropy.io import fits
    import numpy as np
    import os
    from mypython.ifu import muse_utils as utl
    from mypython.fits import pyregmask as msk

    #open image
    img = fits.open(image)
    header = img[0].header
    try:
        #this is ok for narrow band images
        data = img[1].data
    except:
        #white cubex images
        data = img[0].data
    data = data.byteswap(True).newbyteorder()
    #grab effective dimension
    nex, ney = data.shape
    #close fits
    img.close()

    #create bad pixel mask
    if (regmask):
        Mask = msk.PyMask(ney, nex, regmask)
        for ii in range(Mask.nreg):
            Mask.fillmask(ii)
            if (ii == 0):
                badmask = Mask.mask
            else:
                badmask += Mask.mask
            badmask = 1. * badmask
    else:
        badmask = np.zeros((nex, ney))

    if (check):
        print('Dumping badmask')
        hdumain = fits.PrimaryHDU(badmask, header=header)
        hdulist = fits.HDUList([hdumain])
        hdulist.writeto(output + "/badmask.fits", clobber=True)

    #check background level, but do not subtract it
    print 'Checking background levels'
    bkg = sep.Background(data, mask=badmask)
    print 'Residual background level ', bkg.globalback
    print 'Residual background rms ', bkg.globalrms

    if (check):
        print 'Dumping sky...'
        #dump sky properties
        back = bkg.back()
        rms = bkg.rms()
        hdumain = fits.PrimaryHDU(back, header=header)
        hdubk = fits.ImageHDU(back)
        hdurms = fits.ImageHDU(rms)
        hdulist = fits.HDUList([hdumain, hdubk, hdurms])
        hdulist.writeto(output + "/skyprop.fits", clobber=True)

    #extracting sources at nsigma
    thresh = nsig * bkg.globalrms
    segmap = np.zeros((header["NAXIS1"], header["NAXIS2"]))
    objects, segmap = sep.extract(data,
                                  thresh,
                                  segmentation_map=True,
                                  minarea=minarea,
                                  clean=clean,
                                  mask=badmask)
    print "Extracted {} objects... ".format(len(objects))

    if (spectra):
        if not os.path.exists(outspec):
            os.makedirs(outspec)

    if ((check) | (spectra)):
        #create a detection mask alla cubex
        srcmask = np.zeros((1, data.shape[0], data.shape[1]))
        nbj = 1
        print('Generating spectra...')
        #loop over detections
        for obj in objects:
            #init mask
            tmpmask = np.zeros((data.shape[0], data.shape[1]), dtype=np.bool)
            tmpmask3d = np.zeros((1, data.shape[0], data.shape[1]),
                                 dtype=np.bool)
            #fill this mask
            sep.mask_ellipse(tmpmask,
                             obj['x'],
                             obj['y'],
                             obj['a'],
                             obj['b'],
                             obj['theta'],
                             r=2)
            tmpmask3d[0, :, :] = tmpmask[:, :]
            srcmask = srcmask + tmpmask3d * nbj
            if (spectra):
                savename = "{}/id{}.fits".format(outspec, nbj)
                utl.cube2spec(cube,
                              obj['x'],
                              obj['y'],
                              None,
                              write=savename,
                              shape='mask',
                              helio=helio,
                              mask=tmpmask3d,
                              tovac=True)
            #go to next
            nbj = nbj + 1

    if (check):
        print 'Dumping source mask...'
        hdumain = fits.PrimaryHDU(srcmask, header=header)
        hdubk = fits.ImageHDU(srcmask)
        hdulist = fits.HDUList([hdumain, hdubk])
        hdulist.writeto(output + "/source.fits", clobber=True)

        print 'Dumping segmentation map'
        hdumain = fits.PrimaryHDU(segmap, header=header)
        hdubk = fits.ImageHDU(segmap)
        hdulist = fits.HDUList([hdumain, hdubk])
        hdulist.writeto(output + "/segmap.fits", clobber=True)

    #write source catalogue
    print 'Writing catalogue..'
    cols = fits.ColDefs(objects)
    tbhdu = fits.BinTableHDU.from_columns(cols)
    tbhdu.writeto(output + '/catalogue.fits', clobber=True)

    print 'All done'
    return objects
Exemplo n.º 8
0
def fixandsky_firstpass(cube, pixtab, noclobber, skymask=None):
    """ 
    Take a cube and pixel table and fix the cube, then skysub and produce white image,
    using CubEx utils

    is skymask is set, mask regions when computing normalisation 

    """

    import os
    import subprocess
    import numpy as np
    from astropy.io import fits
    from mypython.fits import pyregmask as pmk
    import matplotlib.pyplot as plt

    #make some intermediate names
    fixed = cube.split('.fits')[0] + "_fix.fits"
    skysub = cube.split('.fits')[0] + "_skysub.fits"
    white = cube.split('.fits')[0] + "_white.fits"
    sharpmsk = cube.split('.fits')[0] + "_sharpmask.fits"

    #if told to mask sky do it.. otherwise leave image empty
    cb = fits.open(cube)
    nx = cb[1].header['NAXIS1']
    ny = cb[1].header['NAXIS2']
    sharpmask = np.zeros((ny, nx))
    if (skymask):
        #construct the sky region mask
        mysky = pmk.PyMask(nx, ny, "../../" + skymask, header=cb[1].header)
        for ii in range(mysky.nreg):
            mysky.fillmask(ii)
            sharpmask = sharpmask + mysky.mask
    #write mask
    hdu = fits.PrimaryHDU(sharpmask)
    hdulist = fits.HDUList([hdu])
    hdulist.writeto(sharpmsk, clobber=True)
    cb.close()

    #now fix the cube
    if ((os.path.isfile(fixed)) & (noclobber)):
        print "Cube {0} already fixed".format(cube)
    else:
        print 'Cubefix ', cube
        subprocess.call(
            ["CubeFix", "-cube", cube, "-pixtable", pixtab, "-out", fixed])

    #now run cube skysub
    if ((os.path.isfile(skysub)) & (noclobber)):
        print "Cube {0} already skysub".format(fixed)
    else:
        print 'Sky sub ', fixed
        subprocess.call([
            "CubeSharp", "-cube", fixed, "-out", skysub, "-sourcemask",
            sharpmsk, "-lcheck", ".false."
        ])

    #create a white image
    if ((os.path.isfile(white)) & (noclobber)):
        print "White image for cube {0} already exists".format(skysub)
    else:
        print 'Create white image for ', skysub
        subprocess.call(["Cube2Im", "-cube", skysub, "-out", white])
Exemplo n.º 9
0
def fixandsky_secondpass(cube,pixtab,noclobber,highsn=None,skymask=None,exthsnmask=None,version='1.6'):
        
    """ 
 
    Similar to first pass, but operates on cubes that have been realigned and uses masks 
    as appropriate

    If highsn cube is provided, use it to mask the sources and for skysubtraction
 
    """
      
    import os 
    import subprocess
    import numpy as np
    from astropy.io import fits
    from mypython.fits import pyregmask as pmk


    #define the step to catch error
    if('1.8' in version):
        errorstep='4'
    else:
        errorstep='3'


    if(highsn):
        #prepare final names
        fixed=cube.split('.fits')[0]+"_fixhsn.fits"
        skysub=cube.split('.fits')[0]+"_skysubhsn.fits"
        white=cube.split('.fits')[0]+"_whitehsn.fits"
        sharpmsk=cube.split('.fits')[0]+"_sharpmaskhsn.fits"
    else:
        #prepare intermediate names
        fixed=cube.split('.fits')[0]+"_fix2.fits"
        skysub=cube.split('.fits')[0]+"_skysub2.fits"
        white=cube.split('.fits')[0]+"_white2.fits"
        sharpmsk=cube.split('.fits')[0]+"_sharpmask2.fits"

    #now fix the cube using masks
    if ((os.path.isfile(fixed)) & (noclobber)):
        print("Cube {0} already fixed".format(cube))
    else:
        print('Create source mask...')
        if(highsn):
            #The final mask will always have the same name and will overwrite existing file
            mask_source=cube.split('.fits')[0]+"_whitedeep.Objects_Id.fits"
            if exthsnmask:
              print('Using external HSN mask...')
              subprocess.call(["cp", exthsnmask, mask_source])
            else:
              print('Using high SN cube...')
              #create source mask from deep exposure 
              white_source=cube.split('.fits')[0]+"_whitedeep.fits"
              subprocess.call(["Cube2Im","-cube",highsn,"-out",white_source])
              subprocess.call(["CubEx",white_source,'-MultiExt','.false.','-ApplyFilter','.true.','-ApplyFilterVar','.true.','-FilterXYRad','1','-SN_Threshold','7','-MinNSpax','5'])
        else:
            print('Using white image from previous loop')
            #create source mask from previous step
            mask_source=cube.split('.fits')[0]+"_white.Objects_Id.fits"
            white_source=cube.split('.fits')[0]+"_white.fits"
            subprocess.call(["CubEx",white_source,'-MultiExt','.false.','-SN_Threshold','4.5','-RescaleVar','.true.'])
            
        print('Cubefix ', cube)
        subprocess.call(["CubeFix","-cube", cube,"-pixtable", pixtab,"-out", fixed,"-sourcemask",mask_source])         
        #catch werid cases in which cubefix crashes
        if not os.path.isfile(fixed):
            #try again with last step only
            print('Redo fix with last step only')
            subprocess.call(["CubeFix","-cube", cube,"-pixtable", pixtab,"-out", fixed,"-sourcemask",mask_source,"-step",errorstep]) 


        #At this step, check out cubeAdd2Mask if want to fix edges or weird ifus/slices 

        #if told to mask a particular sky region do it.. otherwise leave image empty
        cb=fits.open(cube)
        nx=cb[1].header['NAXIS1']
        ny=cb[1].header['NAXIS2']
        sharpmask=np.zeros((ny,nx))
        if(skymask):
            #construct the sky region mask
            mysky=pmk.PyMask(nx,ny,"../../"+skymask,header=cb[1].header)
            for ii in range(mysky.nreg):
                mysky.fillmask(ii)
                sharpmask=sharpmask+mysky.mask
        cb.close()

        #inject src mask in sharpmask
        srcmk=fits.open(mask_source)
        sharpmask=sharpmask+srcmk[0].data

        #write mask
        hdu = fits.PrimaryHDU(sharpmask)
        hdulist = fits.HDUList([hdu])
        hdulist.writeto(sharpmsk,overwrite=True)

    #now run cube skysub
    if ((os.path.isfile(skysub)) & (noclobber)):
        print("Cube {0} already skysub".format(fixed))
    else:
        print('Sky sub ', fixed)
        if(highsn):
            #now few more options to control sky sub 
            subprocess.call(["CubeSharp","-cube",fixed,"-out",skysub,"-sourcemask",sharpmsk,"-hsncube",highsn,"-lsig","5"])
        else:
            subprocess.call(["CubeSharp","-cube",fixed,"-out",skysub,"-sourcemask",sharpmsk])
                               
    #create a white image
    if ((os.path.isfile(white)) & (noclobber)):
        print("White image for cube {0} already exists".format(skysub))
    else:
        print('Create white image for ', skysub)
        subprocess.call(["Cube2Im","-cube",skysub,"-out",white])
Exemplo n.º 10
0
def selfcalibrate(listob,
                  deepwhite,
                  refpath='esocombine',
                  nproc=24,
                  extmask=None,
                  extmaskonly=False):
    """

    Loop over each OB and performs self-calibration after masking sources as 

    listob -> OBs to process
    deepwhite -> the best white image available to mask sources
    refpath -> where to find a white image to be used as reference wcs grid
    nproc -> number of processors
    extmask -> ds9 region file in image coordinates defining the regions to be masked
    extmaskonly -> if true ose only the mask supplied externally, if false merge sextractor and extmasks

    """
    import os
    import glob
    import subprocess
    import shutil
    from astropy.io import fits
    import muse_utils as mut
    import numpy as np
    import sep
    from mpdaf.drs import PixTable
    from mypython.fits import pyregmask as pmk

    #grab top dir
    topdir = os.getcwd()

    #now loop over each folder and make the final sky-subtracted cubes
    for ob in listob:

        #change dir
        os.chdir(ob + '/Proc/MPDAF')

        print("Processing {} for self-calibration".format(ob))

        #make source mask
        srcmask = 'selfcalib_mask.fits'

        if (os.path.isfile(srcmask)):
            print("Source mask already exists!")
        else:
            print("Create source mask for selfcalibration")

            #open the ifu mask to create a good mask
            deepdata = fits.open("../../../" + deepwhite)

            #now flag the sources (allow cubex/eso images)
            try:
                image = deepdata[0].data.byteswap().newbyteorder()
                datheader = deepdata[0].header
            except:
                image = deepdata[1].data.byteswap().newbyteorder()
                datheader = deepdata[1].header

            bkg = sep.Background(image)
            bkg.subfrom(image)
            obj, segmap = sep.extract(image,
                                      5. * bkg.globalrms,
                                      minarea=6,
                                      segmentation_map=True)
            segmap[np.where(segmap > 0)] = 1

            #write source mask to disk
            hdu = fits.PrimaryHDU(segmap, header=datheader)
            hdu.writeto(srcmask, overwrite=True)

        if (extmask):
            extfitsmask = 'ext_mask.fits'

            #Read geometry from automatic mask
            srchdu = fits.open(srcmask)
            srchead = srchdu[0].header
            srcmsk = srchdu[0].data
            extmsk = np.zeros_like(srcmsk)
            nx = srchead['NAXIS1']
            ny = srchead['NAXIS2']

            #construct the sky region mask
            mysky = pmk.PyMask(nx,
                               ny,
                               "../../../" + extmask,
                               header=srchdu[0].header)
            for ii in range(mysky.nreg):
                mysky.fillmask(ii)
                extmsk = extmsk + mysky.mask

            outhdu = fits.PrimaryHDU(extmsk, header=srchead)
            outhdu.writeto(extfitsmask, overwrite=True)

            srchdu.close()

        if (extmask and not extmaskonly):
            namecombmask = 'extsrc_mask.fits'

            finalmsk = extmsk + srcmsk
            finalmsk[finalmsk > 0] = 1
            outhdu = fits.PrimaryHDU(finalmsk, header=srchead)
            outhdu.writeto(namecombmask, overwrite=True)
            finalmask = namecombmask
        elif (extmask and extmaskonly):
            finalmask = extfitsmask
        else:
            finalmask = srcmask

        #now loop over exposures and apply self calibration
        scils = glob.glob("../Basic/OBJECT_RED_0*.fits*")
        nsci = len(scils)

        #loop on exposures and apply self calibration
        for exp in range(nsci):

            pixselfcal = "PIXTABLE_REDUCED_RESAMPLED_EXP{0:d}_fix.fits".format(
                exp + 1)
            if not os.path.isfile(pixselfcal):
                print("Apply self-calibration to {}".format(pixselfcal))

                #open reduced pixel table
                pix = PixTable(
                    "PIXTABLE_REDUCED_RESAMPLED_EXP{0:d}.fits".format(exp + 1))

                #create mask
                maskpix = pix.mask_column(finalmask)
                maskpix.write(
                    "PIXTABLE_REDUCED_RESAMPLED_EXP{0:d}_mask.fits".format(
                        exp + 1))
                #selfcalibrate
                autocalib = pix.selfcalibrate(pixmask=maskpix)

                #write to disk
                autocalib.write(
                    "PIXTABLE_REDUCED_RESAMPLED_EXP{0:d}_autocalib.fits".
                    format(exp + 1))
                pix.write(pixselfcal)

            else:
                print("Self-calibration for {} already done! Skip...".format(
                    pixselfcal))

            cubeselfcal = "DATACUBE_RESAMPLED_EXP{0:d}_fix.fits".format(exp +
                                                                        1)
            imageselfcal = "IMAGE_RESAMPLED_EXP{0:d}_fix.fits".format(exp + 1)
            if not os.path.isfile(cubeselfcal):
                print('Reconstruct cube {}'.format(cubeselfcal))

                #now reconstruct cube and white image on right reference frame
                #handle sof file
                sof_name = "../../Script/scipost_mpdaf_self{0:d}.sof".format(
                    exp + 1)
                sofedit = open(sof_name, 'w')
                sofedit.write(
                    '../../../{}/DATACUBE_FINAL.fits OUTPUT_WCS\n'.format(
                        refpath))
                sofedit.write(
                    "PIXTABLE_REDUCED_RESAMPLED_EXP{0:d}_fix.fits PIXTABLE_OBJECT\n"
                    .format(exp + 1))
                sofedit.close()

                #now run script
                scr = open(
                    "../../Script/make_scipost_mpdaf_self{0:d}.sh".format(exp +
                                                                          1),
                    "w")
                scr.write("OMP_NUM_THREADS={0:d}\n".format(nproc))
                scr.write(
                    'esorex --log-file=scipost_mpdaf_self{0:d}.log muse_scipost_make_cube  ../../Script/scipost_mpdaf_self{0:d}.sof'
                    .format(exp + 1))
                scr.close()

                #Run pipeline
                subprocess.call([
                    "sh",
                    "../../Script/make_scipost_mpdaf_self{0:d}.sh".format(exp +
                                                                          1)
                ])
                subprocess.call(["mv", "DATACUBE_FINAL.fits", cubeselfcal])
                subprocess.call(["mv", "IMAGE_FOV_0001.fits", imageselfcal])

            else:
                print('Cube {} already exists! Skip...'.format(cubeselfcal))

        #back to top
        os.chdir(topdir)
Exemplo n.º 11
0
def zapskysub(listob, extmask=None, extmaskonly=False):
    """

    Loop over each OB and performs zap sky subtraction
    

    listob -> OBs to process
 
    """

    import os
    import glob
    import subprocess
    import shutil
    from astropy.io import fits
    import muse_utils as mut
    import numpy as np
    import sep
    import zap
    from mypython.fits import pyregmask as pmk

    #grab top dir
    topdir = os.getcwd()

    #now loop over each folder and make the final sky-subtracted cubes
    for ob in listob:

        #change dir
        os.chdir(ob + '/Proc/MPDAF')

        #use source mask already available (should we assume it is always available?)
        srcmask = 'selfcalib_mask.fits'

        if (extmask):
            extfitsmask = 'ext_mask.fits'

            srchdu = fits.open(srcmask)
            srcmsk = srchdu[0].data
            extmsk = np.zeros_like(srcmsk)
            nx = srchdu[0].header['NAXIS1']
            ny = srchdu[0].header['NAXIS2']

            #construct the sky region mask
            mysky = pmk.PyMask(nx,
                               ny,
                               "../../../" + extmask,
                               header=srchdu[0].header)
            for ii in range(mysky.nreg):
                mysky.fillmask(ii)
                extmsk = extmsk + mysky.mask

            outhdu = fits.PrimaryHDU(extmsk)
            outhdu.writeto(extfitsmask, overwrite=True)

            srchdu.close()

        if (extmask and extmaskonly):
            namecombmask = 'extsrc_mask.fits'

            finalmsk = extmsk + srcmsk
            finalmsk[finalmsk > 0] = 1
            outhdu = fits.PrimaryHDU(finalmsk)
            outhdu.writeto(namecombmask, overwrite=True)
            finalmask = namecombmask
        elif (extmask and not extmaskonly):
            finalmask = extfitsmask
        else:
            finalmask = srcmask

        #now loop over exposures and apply self calibration
        scils = glob.glob("../Basic/OBJECT_RED_0*.fits*")
        nsci = len(scils)

        print("Processing {} with ZAP".format(ob))

        #loop on exposures and apply self calibration
        for exp in range(nsci):

            #these are the self-calibrated data
            cubeselfcal = "DATACUBE_RESAMPLED_EXP{0:d}_fix.fits".format(exp +
                                                                        1)
            imageselfcal = "IMAGE_RESAMPLED_EXP{0:d}_fix.fits".format(exp + 1)

            #these are the sky subtracted products
            cubezap = "DATACUBE_RESAMPLED_EXP{0:d}_zap.fits".format(exp + 1)
            imagezap = "IMAGE_RESAMPLED_EXP{0:d}_zap.fits".format(exp + 1)

            if not os.path.isfile(cubezap):

                print('Reconstruct cube {} with ZAP'.format(cubezap))
                #check if first 50 slices are all nan (typically from mixing E and N modes)
                currcube = fits.open(cubeselfcal)
                slice50 = currcube[1].data[0:50, :, :]
                goodpix = np.count_nonzero(np.isfinite(slice50))

                if (goodpix < 1):
                    #scan layers to find first with value
                    endslice = 1
                    while (goodpix < 1):
                        slice50 = currcube[1].data[0:endslice, :, :]
                        goodpix = np.count_nonzero(np.isfinite(slice50))
                        endslice = endslice + 1

                    #find start index for good data
                    endslice = endslice - 2

                    #write tmp trimmed cube
                    hdu1 = fits.PrimaryHDU([], header=currcube[0].header)
                    hdu2 = fits.ImageHDU(currcube[1].data[endslice:, :, :],
                                         header=currcube[1].header)
                    hdu3 = fits.ImageHDU(currcube[2].data[endslice:, :, :],
                                         header=currcube[2].header)

                    #update wave solution
                    hdu2.header['CRVAL3'] = currcube[1].header[
                        'CRVAL3'] + endslice * currcube[1].header['CD3_3']
                    hdu3.header['CRVAL3'] = currcube[2].header[
                        'CRVAL3'] + endslice * currcube[2].header['CD3_3']

                    #write to new file
                    hdul = fits.HDUList([hdu1, hdu2, hdu3])
                    hdul.writeto('trimmed_' + cubeselfcal, overwrite=True)
                    currcube.close()

                    #now run zap
                    zap.process('trimmed_' + cubeselfcal,
                                outcubefits='trimmed_' + cubezap,
                                clean=True,
                                mask=finalmask)

                    #make copy of original cube
                    shutil.copy(cubeselfcal, cubezap)

                    #now update
                    longcube = fits.open(cubezap, mode='update')
                    shortcube = fits.open('trimmed_' + cubezap)

                    longcube[1].data[endslice:, :, :] = shortcube[1].data
                    longcube[2].data[endslice:, :, :] = shortcube[2].data

                    longcube.flush()
                    longcube.close()
                    shortcube.close()

                else:
                    #proceed with current data
                    currcube.close()
                    zap.process(cubeselfcal,
                                outcubefits=cubezap,
                                clean=True,
                                mask=finalmask)

                #create white image from zap cube
                cube = fits.open(cubezap)
                #define geometry
                nwave = cube[1].header["NAXIS3"]
                nx = cube[1].header["NAXIS1"]
                ny = cube[1].header["NAXIS2"]

                print('Creating final white image from ZAP')
                white_new = np.zeros((ny, nx))
                for xx in range(nx):
                    for yy in range(ny):
                        white_new[yy, xx] = np.nansum(cube[1].data[:, yy,
                                                                   xx]) / nwave

                #save projected image
                hdu1 = fits.PrimaryHDU([])
                hdu2 = fits.ImageHDU(white_new)
                hdu2.header = cube[1].header
                hdulist = fits.HDUList([hdu1, hdu2])
                hdulist.writeto(imagezap, overwrite=True)

            else:
                print('Cube {} already exists! Skip...'.format(cubezap))

        #back to top
        os.chdir(topdir)
Exemplo n.º 12
0
def coaddcubes(listob, nclip=2.5):
    """

    Loop over each OB and make final combined (mean, median) cubes
    
    listob -> OBs to process
    nclip -> threshold for sigmaclipping

 
    """

    import os
    import glob
    import subprocess
    import shutil
    from astropy.io import fits
    import muse_utils as mut
    import numpy as np
    import matplotlib.pyplot as plt

    #names for median/mean output
    cubemed = "mpdafcombine/COMBINED_CUBE_MED_FINAL.fits"
    imagemed = "mpdafcombine/COMBINED_IMAGE_MED_FINAL.fits"
    cubemean = "mpdafcombine/COMBINED_CUBE_FINAL.fits"
    imagemean = "mpdafcombine/COMBINED_IMAGE_FINAL.fits"

    if ((not os.path.isfile(cubemed)) | (not os.path.isfile(cubemean))):
        print('Compute mean/median coadd {} {}'.format(cubemed, cubemean))

        #first collect all relevant exposures
        allexposures = []
        for ob in listob:
            finalexp = glob.glob(
                "{}/Proc/MPDAF/DATACUBE_RESAMPLED_EXP*_zap.fits".format(ob))
            allexposures += finalexp

        #loop over exposures for  combine
        for i, exp in enumerate(allexposures):
            print('Ingesting data {}'.format(exp))
            #open exposure
            data = fits.open(exp)

            #operations to do only once
            if (i == 0):
                nw, nx, ny = data[1].data.shape
                nexp = len(allexposures)
                datashape = (nexp, nw, nx, ny)
                alldata = np.ma.zeros(datashape)
                alldata.mask = np.full(datashape, False)
                headermain = data[0].header
                headerext = data[1].header
                headervar = data[2].header

            #store data
            alldata.data[i] = data[1].data

            #catch nans in this exposure
            alldata.mask[i] = np.logical_not(np.isfinite(data[1].data))

            data.close()

            #now handle mask for this exposure if available in cubex or mpdaf region
            mpdafmask = exp.split("DATACUBE")[0] + 'IMAGE' + exp.split(
                "DATACUBE")[1]
            mpdafreg = mpdafmask.split('fits')[0] + 'reg'
            cubexmask = exp.split("_RESAMPLED")
            cubexmask = cubexmask[0] + "_FINAL_RESAMPLED" + cubexmask[1]
            cubexmask = cubexmask.replace('MPDAF', 'Cubex')
            cubexmask1 = cubexmask.replace('_zap.fits',
                                           '_fixhsn_SliceEdgeMask_wreg.fits')
            cubexmask2 = cubexmask.replace('_zap.fits',
                                           '_fixhsn_SliceEdgeMask.fits')

            #handle mpdaf first
            if os.path.isfile(mpdafreg):
                #now fill using region
                Mask = msk.PyMask(ny, nx, mpdafreg)
                for ii in range(Mask.nreg):
                    Mask.fillmask(ii)
                    if (ii == 0):
                        slicemask = Mask.mask
                    else:
                        slicemask += Mask.mask
                slicecube = np.tile(slicemask, (nw, 1, 1))
                alldata.mask[i] = slicecube
                cubexmask = None
            #else, look at cubex and wor as selector
            elif os.path.isfile(cubexmask1):
                cubexmask = cubexmask1
            elif os.path.isfile(cubexmask2):
                cubexmask = cubexmask2
            else:
                cubexmask = None

            #fill in cubex mask if available
            if (cubexmask):
                #mask edges
                fitsmask = fits.open(cubexmask)
                slicemask = np.full(fitsmask[0].data.shape, False)
                masked = np.where(fitsmask[0].data < 1)
                slicemask[masked] = True
                #grow cube
                slicecube = np.tile(slicemask, (nw, 1, 1))
                alldata.mask[i] = slicecube
                fitsmask.close()

        #sanitise output
        alldata = np.ma.masked_invalid(alldata)

        #iterative sigmaclipping
        for niter in range(3):
            #now catch significant outliers
            print(
                'Catching outliers with sigma clipping; loop {}'.format(niter +
                                                                        1))
            stdevtmp = alldata.std(axis=0)
            medtmp = np.ma.median(alldata, axis=0)

            #loop over exposures
            for i, exp in enumerate(allexposures):
                print('Masking outliers in {}'.format(exp))
                alldata[i] = np.ma.masked_where(
                    abs(alldata[i] - medtmp) >= nclip * stdevtmp, alldata[i])

        #save exposure time map
        print('Compute exposure map')
        expcube = alldata.count(axis=0)
        expmap = np.sum(expcube, axis=0) / nw

        #at last perform median combine
        print('Computing median')
        medcube = np.ma.median(alldata, axis=0)

        #at last perform mean combine
        print('Computing mean')
        meancube = alldata.mean(axis=0)

        #now ingest variance
        for i, exp in enumerate(allexposures):
            print('Ingesting variance {}'.format(exp))
            #open exposure
            data = fits.open(exp)
            #store data
            alldata.data[i] = data[2].data
            data.close()

        print('Computing variance')
        varcube = alldata.sum(axis=0) / expcube / expcube

        #save exposure time map
        hdu1 = fits.PrimaryHDU([])
        hdu2 = fits.ImageHDU(expmap)
        hdu2.header = headerext
        hdulist = fits.HDUList([hdu1, hdu2])
        hdulist.writeto("mpdafcombine/FINAL_COADD_EXPOSUREMAP.fits",
                        overwrite=True)

        #save output median
        hdu = fits.PrimaryHDU([])
        hdu1 = fits.ImageHDU(medcube.data)
        hdu2 = fits.ImageHDU(varcube.data)
        hdulist = fits.HDUList([hdu, hdu1, hdu2])
        hdulist[0].header = headermain
        hdulist[1].header = headerext
        hdulist[2].header = headervar
        hdulist.writeto(cubemed, overwrite=True)

        #now make white image
        print('Creating final white image from median cube')
        white_new = np.zeros((nx, ny))
        for xx in range(nx):
            for yy in range(ny):
                #skip couple of channells for cosmetics
                white_new[xx, yy] = np.nansum(medcube.data[2:-3, xx,
                                                           yy]) / (nw - 4)

        #save projected image
        hdu1 = fits.PrimaryHDU([])
        hdu2 = fits.ImageHDU(white_new)
        hdu2.header = headerext
        hdulist = fits.HDUList([hdu1, hdu2])
        hdulist.writeto(imagemed, overwrite=True)

        #save output mean
        hdu = fits.PrimaryHDU([])
        hdu1 = fits.ImageHDU(meancube.data)
        hdu2 = fits.ImageHDU(varcube.data)
        hdulist = fits.HDUList([hdu, hdu1, hdu2])
        hdulist[0].header = headermain
        hdulist[1].header = headerext
        hdulist[2].header = headervar
        hdulist.writeto(cubemean, overwrite=True)

        #now make white image
        print('Creating final white image from mean cube')
        white_new = np.zeros((nx, ny))
        for xx in range(nx):
            for yy in range(ny):
                #skip couple of channells for cosmetics
                white_new[xx, yy] = np.nansum(meancube.data[2:-3, xx,
                                                            yy]) / (nw - 4)

        #save projected image
        hdu1 = fits.PrimaryHDU([])
        hdu2 = fits.ImageHDU(white_new)
        hdu2.header = headerext
        hdulist = fits.HDUList([hdu1, hdu2])
        hdulist.writeto(imagemean, overwrite=True)

    else:

        print('Median and mean coadd {} already exists!'.format(
            cubemed, cubemean))
Exemplo n.º 13
0
def zapskysub(listob, extmask=None, extmaskonly=False):
    """

    Loop over each OB and performs zap sky subtraction
    

    listob -> OBs to process
 
    """

    import os
    import glob
    import subprocess
    import shutil
    from astropy.io import fits
    import muse_utils as mut
    import numpy as np
    import sep
    import zap
    from mypython.fits import pyregmask as pmk

    #grab top dir
    topdir = os.getcwd()

    #now loop over each folder and make the final sky-subtracted cubes
    for ob in listob:

        #change dir
        os.chdir(ob + '/Proc/MPDAF')

        #use source mask already available (should we assume it is always available?)
        srcmask = 'selfcalib_mask.fits'

        if (extmask):
            extfitsmask = 'ext_mask.fits'

            srchdu = fits.open(srcmask)
            srcmsk = srchdu[0].data
            extmsk = np.zeros_like(srcmsk)
            nx = srchdu[0].header['NAXIS1']
            ny = srchdu[0].header['NAXIS2']

            #construct the sky region mask
            mysky = pmk.PyMask(nx,
                               ny,
                               "../../../" + extmask,
                               header=srchdu[0].header)
            for ii in range(mysky.nreg):
                mysky.fillmask(ii)
                extmsk = extmsk + mysky.mask

            outhdu = fits.PrimaryHDU(extmsk)
            outhdu.writeto(extfitsmask, overwrite=True)

            srchdu.close()

        if (extmask and extmaskonly):
            namecombmask = 'extsrc_mask.fits'

            finalmsk = extmsk + srcmsk
            finalmsk[finalmsk > 0] = 1
            outhdu = fits.PrimaryHDU(finalmsk)
            outhdu.writeto(namecombmask, overwrite=True)
            finalmask = namecombmask
        elif (extmask and not extmaskonly):
            finalmask = extfitsmask
        else:
            finalmask = srcmask

        #now loop over exposures and apply self calibration
        scils = glob.glob("../Basic/OBJECT_RED_0*.fits*")
        nsci = len(scils)

        print("Processing {} with ZAP".format(ob))

        #loop on exposures and apply self calibration
        for exp in range(nsci):

            #these are the self-calibrated data
            cubeselfcal = "DATACUBE_RESAMPLED_EXP{0:d}_fix.fits".format(exp +
                                                                        1)
            imageselfcal = "IMAGE_RESAMPLED_EXP{0:d}_fix.fits".format(exp + 1)

            #these are the sky subtracted products
            cubezap = "DATACUBE_RESAMPLED_EXP{0:d}_zap.fits".format(exp + 1)
            imagezap = "IMAGE_RESAMPLED_EXP{0:d}_zap.fits".format(exp + 1)

            if not os.path.isfile(cubezap):
                print('Reconstruct cube {} with ZAP'.format(cubezap))
                zap.process(cubeselfcal,
                            outcubefits=cubezap,
                            clean=True,
                            mask=finalmask)

                #create white image from zap cube
                cube = fits.open(cubezap)
                #define geometry
                nwave = cube[1].header["NAXIS3"]
                nx = cube[1].header["NAXIS1"]
                ny = cube[1].header["NAXIS2"]

                print('Creating final white image from ZAP')
                white_new = np.zeros((ny, nx))
                for xx in range(nx):
                    for yy in range(ny):
                        white_new[yy, xx] = np.nansum(cube[1].data[:, yy,
                                                                   xx]) / nwave

                #save projected image
                hdu1 = fits.PrimaryHDU([])
                hdu2 = fits.ImageHDU(white_new)
                hdu2.header = cube[1].header
                hdulist = fits.HDUList([hdu1, hdu2])
                hdulist.writeto(imagezap, overwrite=True)

            else:
                print('Cube {} already exists! Skip...'.format(cubezap))

        #back to top
        os.chdir(topdir)