コード例 #1
0
ファイル: correct_files.py プロジェクト: ilkiewicz/polsalt
def correct_files(hdu,tilt=0):
    """For a given input file, apply corrections for wavelength, 
       distortion, and bad pixels

    Parameters
    ----------
    input_file: astropy.io.fits.HDUList

    tilt: (float)
        change in row from col = 0 to cols
    """
    
    cbin, rbin = [int(x) for x in hdu[0].header['CCDSUM'].split(" ")]
    beams, rows, cols = hdu[1].data.shape
    
    #temporary cludge
    thdu = fits.HDUList([fits.PrimaryHDU(), fits.ImageHDU(hdu[1].data[0])])
    thdu[0].header = hdu[0].header
    thdu[1].name = 'SCI'
    rpix_oc = read_wollaston(thdu, wollaston_file=datadir+"wollaston.txt")
    drow_oc = (rpix_oc-rpix_oc[:,cols/2][:,None])/rbin
    drow_oc += -tilt*(np.arange(cols) - cols/2)/cols
 
    for i in range(1, len(hdu)):
       for o in range(beams):

          if hdu[i].name == 'BPM' :
                tdata = hdu[i].data[o].astype('float')                          
          else:                     
                tdata = hdu[i].data[o]
          tdata = correct_wollaston(tdata, -drow_oc[o])
          if hdu[i].name == 'BPM' : 
                hdu[i].data[o] = (tdata > 0.1).astype('uint')
          else:                     
                hdu[i].data[o] = tdata 
        
    return hdu
コード例 #2
0
def correct_files(hdu, tilt=0):
    """For a given input file, apply corrections for wavelength, 
       distortion, and bad pixels

    Parameters
    ----------
    input_file: astropy.io.fits.HDUList

    tilt: (float)
        change in row from col = 0 to cols
    """

    cbin, rbin = [int(x) for x in hdu[0].header['CCDSUM'].split(" ")]
    beams, rows, cols = hdu[1].data.shape

    #temporary cludge
    thdu = fits.HDUList([fits.PrimaryHDU(), fits.ImageHDU(hdu[1].data[0])])
    thdu[0].header = hdu[0].header
    thdu[1].name = 'SCI'
    rpix_oc = read_wollaston(thdu, wollaston_file=datadir + "wollaston.txt")
    drow_oc = (rpix_oc - rpix_oc[:, cols / 2][:, None]) / rbin
    drow_oc += -tilt * (np.arange(cols) - cols / 2) / cols

    for i in range(1, len(hdu)):
        for o in range(beams):

            if hdu[i].name == 'BPM':
                tdata = hdu[i].data[o].astype('float')
            else:
                tdata = hdu[i].data[o]
            tdata = correct_wollaston(tdata, -drow_oc[o])
            if hdu[i].name == 'BPM':
                hdu[i].data[o] = (tdata > 0.1).astype('uint')
            else:
                hdu[i].data[o] = tdata

    return hdu
コード例 #3
0
def specpolwavmap(infilelist,
                  linelistlib="",
                  automethod='Matchlines',
                  function='legendre',
                  order=3,
                  debug=False,
                  logfile='salt.log'):
    obsdate = os.path.basename(infilelist[0])[7:15]

    with logging(logfile, debug) as log:
        log.message('Pysalt Version: ' + pysalt.verno, with_header=False)
        log.message('specpolwavmap version: 20180804', with_header=False)
        # group the files together
        config_dict = list_configurations(infilelist, log)
        usesaltlinelist = (len(linelistlib) > 0)

        for config in config_dict:
            if len(config_dict[config]['arc']) == 0:
                log.message('No Arc for this configuration:',
                            with_header=False)
                continue
            isdualarc = len(config_dict[config]['arc']) > 1

            iarc = config_dict[config]['arc'][0]
            hduarc = pyfits.open(iarc)
            image_id = str(image_number(iarc))
            rows, cols = hduarc[1].data.shape
            grating = hduarc[0].header['GRATING'].strip()
            grang = hduarc[0].header['GR-ANGLE']
            artic = hduarc[0].header['CAMANG']
            filter = hduarc[0].header['FILTER'].strip()
            lamp = hduarc[0].header['LAMPID'].strip().replace(' ', '')
            if lamp == 'NONE': lamp = 'CuAr'
            cbin, rbin = [
                int(x) for x in hduarc[0].header['CCDSUM'].split(" ")
            ]

            if isdualarc:
                iarc2 = config_dict[config]['arc'][1]
                hduarc2 = pyfits.open(iarc2)
                ratio21 = np.percentile(hduarc2[1].data, 99.9) / np.percentile(
                    hduarc[1].data, 99.9)
                image_id = image_id + '_' + str(image_number(iarc2))
                hduarc[1].data += hduarc2[1].data / ratio21
                lamp2 = hduarc2[0].header['LAMPID'].strip().replace(' ', '')
                log.message(('\nDual Arcs: ' + lamp + ' + ' + lamp2 +
                             '/ %8.4f' % ratio21),
                            with_header=False)

            # need this for the distortion correction
            rpix_oc = read_wollaston(hduarc,
                                     wollaston_file=datadir + "wollaston.txt")

            #split the arc into the two beams
            hduarc, splitrow = specpolsplit(hduarc,
                                            splitrow=None,
                                            wollaston_file=datadir +
                                            "wollaston.txt")
            rows = 2 * hduarc['SCI'].data.shape[
                1]  # allow for odd number of input rows

            if usesaltlinelist:  # if linelistlib specified, use salt-supplied
                with open(linelistlib) as fd:
                    linelistdict = dict(line.strip().split(None, 1)
                                        for line in fd)
                lampfile = iraf.osfn("pysalt$data/linelists/" +
                                     linelistdict[lamp])
                if isdualarc:
                    lamp2file = iraf.osfn("pysalt$data/linelists/" +
                                          linelistdict[lamp2])
            else:  # else, use line lists in polarimetry area for 300l
                if grating == "PG0300":
                    linelistlib = datadir + "linelistlib_300.txt"
                    lib_lf = list(
                        np.loadtxt(linelistlib, dtype=str,
                                   usecols=(0, 1, 2)))  # lamp,filter,file
                    linelistdict = defaultdict(dict)
                    for ll in range(len(lib_lf)):
                        linelistdict[lib_lf[ll][0]][int(
                            lib_lf[ll][1])] = lib_lf[ll][2]
                    filter_l = np.sort(np.array(linelistdict[lamp].keys()))
                    usefilter = filter_l[np.where(
                        int(filter[-5:-1]) < filter_l)[0][0]]
                    lampfile = datadir + linelistdict[lamp][usefilter]
                    if isdualarc:
                        lamp2file = datadir + linelistdict[lamp2][usefilter]
                else:
                    linelistlib = datadir + "linelistlib.txt"
                    with open(linelistlib) as fd:
                        linelistdict = dict(line.strip().split(None, 1)
                                            for line in fd)
                    lampfile = iraf.osfn("pysalt$data/linelists/" +
                                         linelistdict[lamp])
                    if isdualarc:
                        lamp2file = iraf.osfn("pysalt$data/linelists/" +
                                              linelistdict[lamp2])
            if isdualarc:
                lamp_dl = np.loadtxt(lampfile, usecols=(0, 1), unpack=True)
                lamp2_dl = np.loadtxt(lamp2file, usecols=(0, 1), unpack=True)
                duallamp_dl = np.sort(np.hstack((lamp_dl, lamp2_dl)))
                np.savetxt('duallamp.txt', duallamp_dl.T, fmt='%10.3f %8i')
                lampfile = 'duallamp.txt'
                lamp = lamp + ',' + lamp2

            # some housekeeping for bad keywords
            if hduarc[0].header['MASKTYP'].strip(
            ) == 'MOS':  # for now, MOS treated as single, short 1 arcsec longslit
                hduarc[0].header['MASKTYP'] = 'LONGSLIT'
                hduarc[0].header['MASKID'] = 'P001000P99'
            del hduarc['VAR']
            del hduarc['BPM']

            # log the information about the arc
            log.message('\nARC: image '+image_id+' GRATING '+grating\
                        +' GRANG '+("%8.3f" % grang)+' ARTIC '+("%8.3f" % artic)+' LAMP '+lamp, with_header=False)
            log.message('  Split Row: ' + ("%4i " % splitrow),
                        with_header=False)

            # set up the correction for the beam splitter
            drow_oc = (rpix_oc - rpix_oc[:, cols / 2][:, None]) / rbin

            wavmap_orc = pol_wave_map(hduarc,
                                      image_id,
                                      drow_oc,
                                      rows,
                                      cols,
                                      lampfile=lampfile,
                                      function=function,
                                      order=order,
                                      automethod=automethod,
                                      log=log,
                                      logfile=logfile)

            # if image not already cleaned,
            # use upper outlier quartile fence of 3 column subarray across normalized configuration
            #     or 10-sigma spike to cull cosmic rays.  Normalize by rows
            images = len(config_dict[config]['object'])
            historylist = list(
                pyfits.open(
                    config_dict[config]['object'][0])[0].header['HISTORY'])
            cleanhistory = next((x for x in historylist if x[:7] == "CRCLEAN"),
                                "None")
            iscr_irc = np.zeros((images, rows, cols), dtype='bool')

            if cleanhistory == 'CRCLEAN: None':
                historyidx = historylist.index(cleanhistory)
                upperfence = 4.0
                lowerfence = 1.5
                sigmaveto = 2.0
                sci_irc = np.zeros((images, rows, cols))
                var_irc = np.zeros((images, rows, cols))

                for (i, image) in enumerate(config_dict[config]['object']):
                    hdulist = pyfits.open(image)
                    okbin_rc = (hdulist['BPM'].data[:rows, :] == 0)
                    sci_irc[i][okbin_rc] = hdulist['SCI'].data[:rows, :][
                        okbin_rc]
                    var_irc[i][okbin_rc] = hdulist['VAR'].data[:rows, :][
                        okbin_rc]
                    okrow_r = okbin_rc.any(axis=1)
                    for r in np.where(okrow_r)[0]:
                        rowmean = sci_irc[i, r][okbin_rc[r]].mean()
                        sci_irc[i, r] /= rowmean
                        var_irc[i, r] /= rowmean**2

                sci_ijrc = np.zeros((images, 3, rows, cols))
                for j in range(3):
                    sci_ijrc[:, j, :, 1:-1] = sci_irc[:, :, j:cols + j - 2]
                sci_Irc = sci_ijrc.reshape((-1, rows, cols))
                sci_Irc.sort(axis=0)
                firstmthird_rc = sci_Irc[-1] - sci_Irc[-3]
                q1_rc, q3_rc = np.percentile(sci_Irc, (25, 75),
                                             axis=0,
                                             overwrite_input=True)
                dq31_rc = q3_rc - q1_rc
                okq_rc = (dq31_rc > 0.)
                oksig_rc = (var_irc.sum(axis=0) > 0.)
                sigma_rc = np.zeros_like(q1_rc)
                sigma_rc[oksig_rc] = np.sqrt(
                    var_irc.sum(axis=0)[oksig_rc] /
                    ((var_irc > 0).sum(axis=0)[oksig_rc]))
                dq31_rc = np.maximum(
                    dq31_rc, 1.35 *
                    sigma_rc)  # avoid impossibly low dq from fluctuations

                iscr1_irc = np.zeros((images, rows, cols), dtype=bool)
                iscr2_irc = np.zeros((images, rows, cols), dtype=bool)
                iscr1_irc[:, okq_rc] = (sci_irc[:, okq_rc] >
                                        (q3_rc + upperfence * dq31_rc)[okq_rc]
                                        )  # above upper outlier fence
                iscr2_irc[:,okbin_rc] = ((sci_irc[:,okbin_rc]==sci_irc[:,okbin_rc].max(axis=0)) &   \
                    (firstmthird_rc[okbin_rc] > 10*sigma_rc[okbin_rc]))                 # or a 10-sigma spike
                iscr_irc = (iscr1_irc | iscr2_irc)
                notcr3_irc = (iscr_irc & (iscr_irc.sum(axis=0) > 2)
                              )  # but >2 CR's in one place are bogus
                notcr4_irc = (iscr_irc & (firstmthird_rc < sigmaveto * dq31_rc)
                              )  # seeing/guiding errors, not CR
                iscr_irc &= (np.logical_not(notcr3_irc | notcr4_irc))

                isnearcr_irc = np.zeros((images, rows + 2, cols + 2),
                                        dtype=bool)
                for dr, dc in np.ndindex(3, 3):  # lower fence on neighbors
                    isnearcr_irc[:, dr:rows + dr, dc:cols + dc] |= iscr_irc
                isnearcr_irc = isnearcr_irc[:, 1:-1, 1:-1]
                iscr_irc[isnearcr_irc] |= (
                    okq_rc & (sci_irc >
                              (q3_rc + lowerfence * dq31_rc)))[isnearcr_irc]

                log.message('CR culling with upper quartile fence\n',
                            with_header=False)

            elif cleanhistory == 'None':
                log.message(
                    'CR clean history unknown, none applied (suggest rerunning imred)',
                    with_header=False)
            else:
                log.message('CR cleaning already done: ' + cleanhistory,
                            with_header=False)

            # for images using this arc,save split data along third fits axis,
            # add wavmap extension, save as 'w' file
            hduwav = pyfits.ImageHDU(data=wavmap_orc.astype('float32'),
                                     header=hduarc['SCI'].header,
                                     name='WAV')

            for (i, image) in enumerate(config_dict[config]['object']):
                hdu = pyfits.open(image)
                if cleanhistory == 'CRCLEAN: None':
                    hdu['BPM'].data[:rows, :][iscr_irc[i]] = 1
                    hdu[0].header['HISTORY'][historyidx] = \
                        ('CRCLEAN: upper= %3.1f, lower= %3.1f, sigmaveto= %3.1f' % (upperfence,lowerfence,sigmaveto))
                hdu, splitrow = specpolsplit(hdu, splitrow=splitrow)
                hdu['BPM'].data[:rows, :][wavmap_orc == 0.] = 1
                hdu.append(hduwav)
                for f in ('SCI', 'VAR', 'BPM', 'WAV'):
                    hdu[f].header['CTYPE3'] = 'O,E'
                hdu.writeto('w' + image, overwrite='True')
                log.message('Output file ' + 'w' + image + '  crs: ' +
                            str(iscr_irc[i].sum()),
                            with_header=False)

    return
コード例 #4
0
ファイル: specpolwavmap.py プロジェクト: dgroenewald/polsalt
def specpolwavmap(infilelist, linelistlib="", automethod='Matchlines', 
                  function='legendre', order=3, logfile='salt.log'):
    obsdate=os.path.basename(infilelist[0])[7:15]

    with logging(logfile, debug) as log:
        log.message('Pysalt Version: '+pysalt.verno, with_header=False)
      
        # group the files together
        config_dict = list_configurations(infilelist, log)

        for config in config_dict:
            if len(config_dict[config]['arc']) == 0:
                log.message('No Arc for this configuration:', with_header=False)
                continue
        #set up some information needed later
            iarc = config_dict[config]['arc'][0]
            hduarc = pyfits.open(iarc)
            image_no = image_number(iarc)
            rows, cols = hduarc[1].data.shape
            grating = hduarc[0].header['GRATING'].strip()
            grang = hduarc[0].header['GR-ANGLE']
            artic = hduarc[0].header['CAMANG']

            cbin, rbin = [int(x) for x in hduarc[0].header['CCDSUM'].split(" ")]

            # need this for the distortion correction 
            rpix_oc = read_wollaston(hduarc, wollaston_file=datadir+"wollaston.txt")

            #split the arc into the two beams
            hduarc, splitrow = specpolsplit(hduarc, splitrow=None, wollaston_file=datadir+"wollaston.txt")
            
            # set up the linelamp to be used
            if len(linelistlib) ==0: 
                linelistlib=datadir+"linelistlib.txt"   
                if grating=="PG0300": 
                    linelistlib=datadir+"linelistlib_300.txt"
            with open(linelistlib) as fd:
                linelistdict = dict(line.strip().split(None, 1) for line in fd)  

    
            #set up the lamp to be used
            lamp=hduarc[0].header['LAMPID'].strip().replace(' ', '')
            if lamp == 'NONE': lamp='CuAr'
            lampfile=iraf.osfn("pysalt$data/linelists/"+linelistdict[lamp])    

            # some housekeeping for bad keywords
            if hduarc[0].header['MASKTYP'].strip() == 'MOS':   # for now, MOS treated as single, short 1 arcsec longslit
                hduarc[0].header.update('MASKTYP','LONGSLIT')
                hduarc[0].header.update('MASKID','P001000P99')
            del hduarc['VAR']
            del hduarc['BPM']
    
            # log the information about the arc
            log.message('\nARC: image '+str(image_no)+' GRATING '+grating\
                        +' GRANG '+("%8.3f" % grang)+' ARTIC '+("%8.3f" % artic)+' LAMP '+lamp, with_header=False)
            log.message('  Split Row: '+("%4i " % splitrow), with_header=False)

            # set up the correction for the beam splitter
            drow_oc = (rpix_oc-rpix_oc[:,cols/2][:,None])/rbin

            wavmap_orc = pol_wave_map(hduarc, image_no, drow_oc, rows, cols,
                                      lampfile=lampfile, function=function, order=order,
                                      automethod=automethod, log=log, logfile=logfile)


            # for images using this arc,save split data along third fits axis, 
            # add wavmap extension, save as 'w' file
            hduwav = pyfits.ImageHDU(data=wavmap_orc.astype('float32'), header=hduarc['SCI'].header, name='WAV')                 
            for image in config_dict[config]['object']:
                hdu = pyfits.open(image)
                hdu, splitrow = specpolsplit(hdu, splitrow=splitrow)
                hdu['BPM'].data[wavmap_orc==0.] = 1 
                hdu.append(hduwav)
                for f in ('SCI','VAR','BPM','WAV'): hdu[f].header.update('CTYPE3','O,E')
                hdu.writeto('w'+image,clobber='True')
                log.message('Output file '+'w'+image, with_header=False)

    return
コード例 #5
0
ファイル: specpolwavmap.py プロジェクト: ilkiewicz/polsalt
def specpolwavmap(infilelist, linelistlib="", automethod='Matchlines', 
                  function='legendre', order=3, debug=False, logfile='salt.log'):
    obsdate=os.path.basename(infilelist[0])[7:15]

    with logging(logfile, debug) as log:
        log.message('Pysalt Version: '+pysalt.verno, with_header=False)
        log.message('specpolwavmap version: 20171226', with_header=False)         
        # group the files together
        config_dict = list_configurations(infilelist, log)
        usesaltlinelist = (len(linelistlib)>0)
        
        for config in config_dict:
            if len(config_dict[config]['arc']) == 0:
                log.message('No Arc for this configuration:', with_header=False)
                continue
        #set up some information needed later
            iarc = config_dict[config]['arc'][0]
            hduarc = pyfits.open(iarc)
            image_no = image_number(iarc)
            rows, cols = hduarc[1].data.shape
            grating = hduarc[0].header['GRATING'].strip()
            grang = hduarc[0].header['GR-ANGLE']
            artic = hduarc[0].header['CAMANG']
            filter = hduarc[0].header['FILTER'].strip()

            cbin, rbin = [int(x) for x in hduarc[0].header['CCDSUM'].split(" ")]

            # need this for the distortion correction 
            rpix_oc = read_wollaston(hduarc, wollaston_file=datadir+"wollaston.txt")

            #split the arc into the two beams
            hduarc, splitrow = specpolsplit(hduarc, splitrow=None, wollaston_file=datadir+"wollaston.txt")

            #set up the lamp to be used
            lamp=hduarc[0].header['LAMPID'].strip().replace(' ', '')
            if lamp == 'NONE': lamp='CuAr'
            
            # set up the linelist to be used
            if usesaltlinelist:                # if linelistlib specified, use salt-supplied
                with open(linelistlib) as fd:
                    linelistdict = dict(line.strip().split(None, 1) for line in fd)
                lampfile=iraf.osfn("pysalt$data/linelists/"+linelistdict[lamp]) 
            else:                               # else, use line lists in polarimetry area for 300l
                if grating=="PG0300": 
                    linelistlib=datadir+"linelistlib_300.txt"
                    lib_lf = list(np.loadtxt(linelistlib,dtype=str,usecols=(0,1,2)))    # lamp,filter,file
                    linelistdict = defaultdict(dict)
                    for ll in range(len(lib_lf)):
                        linelistdict[lib_lf[ll][0]][int(lib_lf[ll][1])] = lib_lf[ll][2] 
                    filter_l = np.sort(np.array(linelistdict[lamp].keys()))
                    usefilter = filter_l[np.where(int(filter[-5:-1]) < filter_l)[0][0]]
                    lampfile = datadir+linelistdict[lamp][usefilter]
                else:
                    linelistlib=datadir+"linelistlib.txt"
                    with open(linelistlib) as fd:
                        linelistdict = dict(line.strip().split(None, 1) for line in fd)   
                    lampfile=iraf.osfn("pysalt$data/linelists/"+linelistdict[lamp])  

            # some housekeeping for bad keywords
            if hduarc[0].header['MASKTYP'].strip() == 'MOS':   # for now, MOS treated as single, short 1 arcsec longslit
                hduarc[0].header['MASKTYP'] = 'LONGSLIT'
                hduarc[0].header['MASKID'] = 'P001000P99'
            del hduarc['VAR']
            del hduarc['BPM']
    
            # log the information about the arc
            log.message('\nARC: image '+str(image_no)+' GRATING '+grating\
                        +' GRANG '+("%8.3f" % grang)+' ARTIC '+("%8.3f" % artic)+' LAMP '+lamp, with_header=False)
            log.message('  Split Row: '+("%4i " % splitrow), with_header=False)

            # set up the correction for the beam splitter
            drow_oc = (rpix_oc-rpix_oc[:,cols/2][:,None])/rbin

            wavmap_orc = pol_wave_map(hduarc, image_no, drow_oc, rows, cols,
                                      lampfile=lampfile, function=function, order=order,
                                      automethod=automethod, log=log, logfile=logfile)

          # if image not already cleaned,
          # use upper outlier quartile fence of 3 column subarray across normalized configuration 
          #     or 10-sigma spike to cull cosmic rays.  Normalize by rows
            images = len(config_dict[config]['object'])
            historylist = list(pyfits.open(config_dict[config]['object'][0])[0].header['HISTORY'])
            cleanhistory = next((x for x in historylist if x[:7]=="CRCLEAN"),"None")
            iscr_irc = np.zeros((images,rows,cols),dtype='bool')

            if cleanhistory == 'CRCLEAN: None':
                historyidx = historylist.index(cleanhistory)
                upperfence = 4.0
                lowerfence = 1.5
                sigmaveto = 2.0
                sci_irc = np.zeros((images,rows,cols))
                var_irc = np.zeros((images,rows,cols))

                for (i,image) in enumerate(config_dict[config]['object']):
                    hdulist = pyfits.open(image)
                    okbin_rc = (hdulist['BPM'].data == 0)
                    sci_irc[i][okbin_rc] = hdulist['SCI'].data[okbin_rc]
                    var_irc[i][okbin_rc] = hdulist['VAR'].data[okbin_rc]
                    okrow_r = okbin_rc.any(axis=1)
                    for r in np.where(okrow_r)[0]:
                        rowmean = sci_irc[i,r][okbin_rc[r]].mean()
                        sci_irc[i,r] /= rowmean
                        var_irc[i,r] /= rowmean**2
                    
                sci_ijrc = np.zeros((images,3,rows,cols))
                for j in range(3):
                    sci_ijrc[:,j,:,1:-1] = sci_irc[:,:,j:cols+j-2]
                sci_Irc = sci_ijrc.reshape((-1,rows,cols))
                sci_Irc.sort(axis=0)
                firstmthird_rc = sci_Irc[-1] - sci_Irc[-3] 
                q1_rc,q3_rc = np.percentile(sci_Irc,(25,75),axis=0,overwrite_input=True)
                dq31_rc = q3_rc - q1_rc
                okq_rc = (dq31_rc > 0.)
                oksig_rc = (var_irc.sum(axis=0) > 0.)
                sigma_rc = np.zeros_like(q1_rc)
                sigma_rc[oksig_rc] = np.sqrt(var_irc.sum(axis=0)[oksig_rc]/((var_irc > 0).sum(axis=0)[oksig_rc]))
                dq31_rc = np.maximum(dq31_rc,1.35*sigma_rc)                     # avoid impossibly low dq from fluctuations

                iscr1_irc = np.zeros((images,rows,cols),dtype=bool)  
                iscr2_irc = np.zeros((images,rows,cols),dtype=bool)  
                iscr1_irc[:,okq_rc] = (sci_irc[:,okq_rc] > (q3_rc + upperfence*dq31_rc)[okq_rc])    # above upper outlier fence
                iscr2_irc[:,okbin_rc] = ((sci_irc[:,okbin_rc]==sci_irc[:,okbin_rc].max(axis=0)) &   \
                    (firstmthird_rc[okbin_rc] > 10*sigma_rc[okbin_rc]))                 # or a 10-sigma spike          
                iscr_irc = (iscr1_irc | iscr2_irc)
                notcr3_irc =(iscr_irc & (iscr_irc.sum(axis=0)>2))                      # but >2 CR's in one place are bogus
                notcr4_irc =(iscr_irc & (firstmthird_rc < sigmaveto*dq31_rc))          # seeing/guiding errors, not CR 
                iscr_irc &= (np.logical_not(notcr3_irc | notcr4_irc))

                isnearcr_irc = np.zeros((images,rows+2,cols+2),dtype=bool)
                for dr,dc in np.ndindex(3,3):                                       # lower fence on neighbors
                    isnearcr_irc[:,dr:rows+dr,dc:cols+dc] |= iscr_irc
                isnearcr_irc = isnearcr_irc[:,1:-1,1:-1]
                iscr_irc[isnearcr_irc] |= (okq_rc & (sci_irc > (q3_rc + lowerfence*dq31_rc)))[isnearcr_irc]   

                log.message('CR culling with upper quartile fence\n', with_header=False)

            elif cleanhistory == 'None':
                log.message('CR clean history unknown, none applied (suggest rerunning imred)',with_header=False)
            else:
                log.message('CR cleaning already done: '+cleanhistory,with_header=False)

            # for images using this arc,save split data along third fits axis, 
            # add wavmap extension, save as 'w' file
            hduwav = pyfits.ImageHDU(data=wavmap_orc.astype('float32'), header=hduarc['SCI'].header, name='WAV') 
              
            for (i,image) in enumerate(config_dict[config]['object']):
                hdu = pyfits.open(image)
                if cleanhistory == 'CRCLEAN: None':                
                    hdu['BPM'].data[iscr_irc[i]] = 1
                    hdu[0].header['HISTORY'][historyidx] = \
                        ('CRCLEAN: upper= %3.1f, lower= %3.1f, sigmaveto= %3.1f' % (upperfence,lowerfence,sigmaveto))
                hdu, splitrow = specpolsplit(hdu, splitrow=splitrow)
                hdu['BPM'].data[wavmap_orc==0.] = 1 
                hdu.append(hduwav)
                for f in ('SCI','VAR','BPM','WAV'): hdu[f].header['CTYPE3'] = 'O,E'
                hdu.writeto('w'+image,overwrite='True')
                log.message('Output file '+'w'+image+'  crs: '+str(iscr_irc[i].sum()), with_header=False)

    return
コード例 #6
0
ファイル: specpolsplit.py プロジェクト: ilkiewicz/polsalt
def specpolsplit(hdu, splitrow=None, wollaston_file=None):
    """ Split the O and E beams  

    Parameters
    ----------
    hdu: fits.HDUList
       Polarimetric observations data

    splitrow: None or int
       Row to split the image.  If None, the value will be calculated

    wollaston_file: None or str
       File containing the central position of the split O and E beams
 
    Return
    ------
    whdu: fits.HDUList
       New header object with extensions split between the O and E beams

    splitrow: float
       Row at which to split the images


    """

    rows, cols = hdu[1].data.shape

    #determine the row to split the file from the estimated center
    if splitrow is None and wollaston_file:
        # use arc to make first-guess wavecal from model
        # locate beamsplitter split point based on the center of the chips
        # given in the wollaston file
        cbin, rbin = [int(x) for x in hdu[0].header['CCDSUM'].split(" ")]
        woll_pix = read_wollaston(hdu, wollaston_file)
        #        print woll_pix[:, cols/2]
        axisrow_o = ((2052 + woll_pix[:, cols / 2]) / rbin).astype(int)

        data_y = hdu[1].data.sum(axis=1)
        top = axisrow_o[1] + np.argmax(
            data_y[axisrow_o[1]:] < 0.5 * data_y[axisrow_o[1]])
        bot = axisrow_o[0] - np.argmax(
            data_y[axisrow_o[0]::-1] < 0.5 * data_y[axisrow_o[0]])
        splitrow = 0.5 * (bot + top)
    elif splitrow is None and wollaston_file is None:
        splitrow = rows / 2.0

    offset = int(splitrow -
                 rows / 2)  # how far split is from center of detector

    # split arc into o/e images
    padbins = (np.indices((rows, cols))[0] < offset) | (np.indices(
        (rows, cols))[0] > rows + offset)

    image_rc = np.roll(hdu['SCI'].data, -offset, axis=0)
    image_rc[padbins] = 0.
    hdu['SCI'].data = image_rc.reshape((2, rows / 2, cols))
    var_rc = np.roll(hdu['VAR'].data, -offset, axis=0)
    var_rc[padbins] = 0.
    hdu['VAR'].data = var_rc.reshape((2, rows / 2, cols))
    bpm_rc = np.roll(hdu['BPM'].data, -offset, axis=0)
    bpm_rc[padbins] = 1
    bpm_orc = bpm_rc.reshape((2, rows / 2, cols))
    hdu['BPM'].data = bpm_orc

    return hdu, splitrow
コード例 #7
0
ファイル: specpolsplit.py プロジェクト: ilkiewicz/polsalt
def specpolsplit(hdu, splitrow=None, wollaston_file=None):
    """ Split the O and E beams  

    Parameters
    ----------
    hdu: fits.HDUList
       Polarimetric observations data

    splitrow: None or int
       Row to split the image.  If None, the value will be calculated

    wollaston_file: None or str
       File containing the central position of the split O and E beams
 
    Return
    ------
    whdu: fits.HDUList
       New header object with extensions split between the O and E beams

    splitrow: float
       Row at which to split the images


    """

    rows,cols = hdu[1].data.shape

    #determine the row to split the file from the estimated center
    if splitrow is None and wollaston_file:
        # use arc to make first-guess wavecal from model
        # locate beamsplitter split point based on the center of the chips 
        # given in the wollaston file
        cbin, rbin = [int(x) for x in hdu[0].header['CCDSUM'].split(" ")]
        woll_pix = read_wollaston(hdu, wollaston_file)
#        print woll_pix[:, cols/2]
        axisrow_o = ((2052 + woll_pix[:,cols/2])/rbin).astype(int)

        data_y = hdu[1].data.sum(axis=1)
        top = axisrow_o[1] + np.argmax(data_y[axisrow_o[1]:] <  0.5*data_y[axisrow_o[1]])
        bot = axisrow_o[0] - np.argmax(data_y[axisrow_o[0]::-1] <  0.5*data_y[axisrow_o[0]])
        splitrow = 0.5*(bot + top)
    elif splitrow is None and wollaston_file is None:
         splitrow = rows/2.0


    offset = int(splitrow - rows/2)                 # how far split is from center of detector

    # split arc into o/e images
    padbins = (np.indices((rows,cols))[0]<offset) | (np.indices((rows,cols))[0]>rows+offset)

    image_rc = np.roll(hdu['SCI'].data,-offset,axis=0)
    image_rc[padbins] = 0.
    hdu['SCI'].data = image_rc.reshape((2,rows/2,cols))
    var_rc = np.roll(hdu['VAR'].data,-offset,axis=0)
    var_rc[padbins] = 0.
    hdu['VAR'].data = var_rc.reshape((2,rows/2,cols))
    bpm_rc = np.roll(hdu['BPM'].data,-offset,axis=0)
    bpm_rc[padbins] = 1
    bpm_orc = bpm_rc.reshape((2,rows/2,cols))
    hdu['BPM'].data = bpm_orc

    return hdu, splitrow