Beispiel #1
0
def flatspectrum(xarr, yarr, mode='mean', thresh=3, iter=5, order=3):
    """Remove the continuum from a spectrum either by masking it or fitting
       and subtracting it.

       xarr= input x-vales (pixels or wavelength)
       yarr= flux or counts for the spectrum
       mode=None--no subtraction
       mean--subtract off the mean
       poly--subtact off a fit
       mask--return a spectra with continuum set to zero
    """
    if mode == 'mean':
        # subtract off the mean value
        sarr = yarr - clipstats(yarr, thresh, iter)[0]
    elif mode == 'poly':
        # calculate the statistics and mask all of the mask with values above
        # these
        it = interfit(xarr, yarr, function='poly', order=order)
        it.interfit()
        sarr = yarr - it(xarr)
    elif mode == 'mask':
        # mask the values
        mean, std = clipstats(yarr, thresh, iter)
        mask = (yarr < mean + thresh * std)
        sarr = yarr.copy()
        sarr[mask] = 0
    else:
        sarr = yarr.copy()
    return sarr
Beispiel #2
0
def flatspectrum(xarr, yarr, mode='mean', thresh=3, iter=5, order=3):
    """Remove the continuum from a spectrum either by masking it or fitting and subtracting it.  
       xarr= input x-vales (pixels or wavelength)
       yarr= flux or counts for the spectrum
       mode=None--no subtraction
            mean--subtract off the mean
            poly--subtact off a fit
            mask--return a spectra with continuum set to zero
    """
    if mode=='mean':
       #subtract off the mean value
       sarr=yarr-clipstats(yarr, thresh, iter)[0]
    elif mode=='poly':
       #calculate the statistics and mask all of the mask with values above these
       it=interfit(xarr, yarr, function='poly', order=2)
       it.interfit()
       sarr=yarr-it(xarr)
    elif mode=='mask':
       #mask the values
       mean, std = clipstats(yarr, thresh, iter)
       mask=(yarr<mean+thresh*std)
       sarr=yarr.copy()
       sarr[mask]=0
    else:  
       sarr=yarr.copy()
    return sarr
Beispiel #3
0
 def set_func(self):
     if self.function in [
             'poly', 'polynomial', 'spline', 'legendre', 'chebyshev']:
         self.func = interfit(self.x_arr, self.w_arr,
                              function=self.function,
                              order=self.order, niter=self.niter,
                              thresh=self.thresh)
     if self.function == 'model':
         self.func = ModelSolution(self.x_arr, self.w_arr,
                                   sgraph=self.sgraph, xlen=self.xlen,
                                   yval=self.yval, order=self.order)
Beispiel #4
0
 def set_func(self):
     if self.function in [
             'poly', 'polynomial', 'spline', 'legendre', 'chebyshev'
     ]:
         self.func = interfit(self.x_arr,
                              self.w_arr,
                              function=self.function,
                              order=self.order,
                              niter=self.niter,
                              thresh=self.thresh)
     if self.function == 'model':
         self.func = ModelSolution(self.x_arr,
                                   self.w_arr,
                                   sgraph=self.sgraph,
                                   xlen=self.xlen,
                                   yval=self.yval,
                                   order=self.order)
Beispiel #5
0
def bias(struct,subover=True,trim=True, subbias=False, bstruct=None, 
         median=False, function='polynomial',order=3,rej_lo=3,rej_hi=3,niter=10,
         plotover=False, log=None, verbose=True):
   """Bias subtracts the bias levels from a frame.  It will fit and subtract the overscan
      region, trim the images, and subtract a master bias if required.

      struct--image structure 
      subover--subtract the overscan region
      trim--trim the image
      subbias--subtract master bias
      bstruct--master bias image structure
      median--use the median instead of mean in image statistics
      function--form to fit to the overscan region
      order--order for the function
      rej_lo--sigma  of low points to reject in the fit
      rej_hi--sigma of high points to reject in the fit
      niter--number of iterations
      log--saltio log for recording information
      verbose--whether to print to stdout 
   """
   infile=saltkey.getimagename(struct[0])

   # how many extensions?
   nsciext = saltkey.get('NSCIEXT',struct[0])
   nextend = saltkey.get('NEXTEND',struct[0])
   nccd = saltkey.get('NCCDS',struct[0])

   # how many amplifiers?--this is hard wired
   amplifiers = 2 * nccd


   #log the process
   if subover and log:
        message = '%28s %7s %5s %4s %6s' % \
            ('HDU','Overscan','Order','RMS','Niter')
        log.message('\n     --------------------------------------------------', 
                   with_header=False, with_stdout=verbose)
        log.message(message, with_header=False, with_stdout=verbose)
        log.message('     --------------------------------------------------', 
                   with_header=False, with_stdout=verbose)

   if (plotover): 
       plt.figure(1)
       plt.axes([0.1,0.1,0.8,0.8])
       plt.xlabel('CCD Column')
       plt.ylabel('Pixel Counts (e-)')
       plt.ion()


   #loop through the extensions and subtract the bias
   for i in range(1,nsciext+1):
     if struct[i].name=='SCI':

       #get the bias section
       biassec = saltkey.get('BIASSEC',struct[i])
       y1,y2,x1,x2 = saltio.getSection(biassec, iraf_format=True)
       #get the data section
       datasec = saltkey.get('DATASEC',struct[i])
       dy1,dy2, dx1, dx2 = saltio.getSection(datasec, iraf_format=True)
  
       #setup the overscan region
       if subover:
           yarr=np.arange(y1,y2, dtype=float)
           data=struct[i].data
           odata=struct[i].data[y1:y2,x1:x2]
           if median:
              odata=np.median((struct[i].data[y1:y2,x1:x2]),axis=1)
              olevel=np.median((struct[i].data[y1:y2,x1:x2]))
              saltkey.new('OVERSCAN','%f' % (olevel),'Overscan median value', struct[i])
           else:
              odata=np.mean((struct[i].data[y1:y2,x1:x2]),axis=1)
              olevel=np.mean((struct[i].data[y1:y2,x1:x2]))
              saltkey.new('OVERSCAN','%f' % (olevel),'Overscan mean value', struct[i])

           #fit the overscan region
           ifit=saltfit.interfit(yarr, odata, function=function, \
                                 order=order, thresh=rej_hi, niter=niter)
           try:
               ifit.interfit()
               coeffs=ifit.coef
               ofit=ifit(yarr)
               omean, omed, osigma=saltstat.iterstat((odata-ofit), sig=3, niter=5)
           except ValueError:
               #catch the error if it is a zero array
               ofit=np.array(yarr)*0.0
               osigma=0.0
           except TypeError:
               #catch the error if it is a zero array
               ofit=np.array(yarr)*0.0
               osigma=0.0

           #if it hasn't been already, convert image to
           #double format
           struct[i].data = 1.0 * struct[i].data
           try:
               struct[i].header.remove('BZERO')
               struct[i].header.remove('BSCALE')
           except:
               pass


           #subtract the overscan region
           for j in range(len(struct[i].data[0])):
               struct[i].data[y1:y2,j] -= ofit

           #report the information 
           if log:
                message = '%25s[%1d] %8.2f %3d %7.2f %3d' % \
                    (infile, i, olevel, order, osigma, niter)
                log.message(message, with_stdout=verbose, with_header=False)

           #add the statistics to the image header
           saltkey.new('OVERRMS','%f' % (osigma),'Overscan RMS value', struct[i])

           #update the variance frame
           if saltkey.found('VAREXT', struct[i]):
               vhdu=saltkey.get('VAREXT', struct[i])
               try:
                   vdata=struct[vhdu].data
                   #The bias level should not be included in the noise from the signal
                   for j in range(len(struct[i].data[0])):
		       vdata[y1:y2,j] -= ofit
                   #add a bit to make sure that the minimum error is the rednoise
                   rdnoise= saltkey.get('RDNOISE',struct[i])
                   vdata[vdata<rdnoise**2]=rdnoise**2
                   struct[vhdu].data=vdata+osigma**2

               except Exception, e:
                    msg='Cannot update the variance frame in %s[%i] because %s' % (infile, vhdu, e)
                    raise SaltError(msg)


           #plot the overscan region
           if plotover:  
              plt.plot(yarr, odata)
              plt.plot(yarr, ofit)

       #trim the data and update the headers
       if trim:
           struct[i].data=struct[i].data[dy1:dy2,dx1:dx2]
           datasec = '[1:'+str(dx2-dx1)+',1:'+str(dy2-dy1)+']'
           saltkey.put('DATASEC',datasec,struct[i])

           #update the variance frame
           if saltkey.found('VAREXT', struct[i]):
               vhdu=saltkey.get('VAREXT', struct[i])
               struct[vhdu].data=struct[vhdu].data[dy1:dy2,dx1:dx2]
               datasec = '[1:'+str(dx2-dx1)+',1:'+str(dy2-dy1)+']'
               saltkey.put('DATASEC',datasec,struct[vhdu])
           #update the BPM frame
           if saltkey.found('BPMEXT', struct[i]):
               bhdu=saltkey.get('BPMEXT', struct[i])
               struct[bhdu].data=struct[bhdu].data[dy1:dy2,dx1:dx2]
               datasec = '[1:'+str(dx2-dx1)+',1:'+str(dy2-dy1)+']'
               saltkey.put('DATASEC',datasec,struct[bhdu])

       #subtract the master bias if necessary
       if subbias and bstruct:
           struct[i].data -= bstruct[i].data

           #update the variance frame
           if saltkey.found('VAREXT', struct[i]):
               vhdu=saltkey.get('VAREXT', struct[i])
               try:
                   vdata=struct[vhdu].data
                   struct[vhdu].data=vdata+bstruct[vhdu].data
               except Exception, e:
                    msg='Cannot update the variance frame in %s[%i] because %s' % (infile, vhdu, e)
                    raise SaltError(msg)
Beispiel #6
0
def bias(struct,
         subover=True,
         trim=True,
         subbias=False,
         bstruct=None,
         median=False,
         function='polynomial',
         order=3,
         rej_lo=3,
         rej_hi=3,
         niter=10,
         plotover=False,
         log=None,
         verbose=True):
    """Bias subtracts the bias levels from a frame.  It will fit and subtract the overscan
      region, trim the images, and subtract a master bias if required.

      struct--image structure 
      subover--subtract the overscan region
      trim--trim the image
      subbias--subtract master bias
      bstruct--master bias image structure
      median--use the median instead of mean in image statistics
      function--form to fit to the overscan region
      order--order for the function
      rej_lo--sigma  of low points to reject in the fit
      rej_hi--sigma of high points to reject in the fit
      niter--number of iterations
      log--saltio log for recording information
      verbose--whether to print to stdout 
   """
    infile = saltkey.getimagename(struct[0])

    # how many extensions?
    nsciext = saltkey.get('NSCIEXT', struct[0])
    nextend = saltkey.get('NEXTEND', struct[0])
    nccd = saltkey.get('NCCDS', struct[0])

    # how many amplifiers?--this is hard wired
    amplifiers = 2 * nccd

    #log the process
    if subover and log:
        message = '%28s %7s %5s %4s %6s' % \
            ('HDU','Overscan','Order','RMS','Niter')
        log.message(
            '\n     --------------------------------------------------',
            with_header=False,
            with_stdout=verbose)
        log.message(message, with_header=False, with_stdout=verbose)
        log.message('     --------------------------------------------------',
                    with_header=False,
                    with_stdout=verbose)

    if (plotover):
        plt.figure(1)
        plt.axes([0.1, 0.1, 0.8, 0.8])
        plt.xlabel('CCD Column')
        plt.ylabel('Pixel Counts (e-)')
        plt.ion()

    #loop through the extensions and subtract the bias
    for i in range(1, nsciext + 1):
        if struct[i].name == 'SCI':

            #get the bias section
            biassec = saltkey.get('BIASSEC', struct[i])
            y1, y2, x1, x2 = saltio.getSection(biassec, iraf_format=True)
            #get the data section
            datasec = saltkey.get('DATASEC', struct[i])
            dy1, dy2, dx1, dx2 = saltio.getSection(datasec, iraf_format=True)

            #setup the overscan region
            if subover:
                yarr = np.arange(y1, y2, dtype=float)
                data = struct[i].data
                odata = struct[i].data[y1:y2, x1:x2]
                if median:
                    odata = np.median((struct[i].data[y1:y2, x1:x2]), axis=1)
                    olevel = np.median((struct[i].data[y1:y2, x1:x2]))
                    saltkey.new('OVERSCAN', '%f' % (olevel),
                                'Overscan median value', struct[i])
                else:
                    odata = np.mean((struct[i].data[y1:y2, x1:x2]), axis=1)
                    olevel = np.mean((struct[i].data[y1:y2, x1:x2]))
                    saltkey.new('OVERSCAN', '%f' % (olevel),
                                'Overscan mean value', struct[i])

                #fit the overscan region
                ifit=saltfit.interfit(yarr, odata, function=function, \
                                      order=order, thresh=rej_hi, niter=niter)
                try:
                    ifit.interfit()
                    coeffs = ifit.coef
                    ofit = ifit(yarr)
                    omean, omed, osigma = saltstat.iterstat((odata - ofit),
                                                            sig=3,
                                                            niter=5)
                except ValueError:
                    #catch the error if it is a zero array
                    ofit = np.array(yarr) * 0.0
                    osigma = 0.0
                except TypeError:
                    #catch the error if it is a zero array
                    ofit = np.array(yarr) * 0.0
                    osigma = 0.0

                #if it hasn't been already, convert image to
                #double format
                struct[i].data = 1.0 * struct[i].data
                try:
                    struct[i].header.remove('BZERO')
                    struct[i].header.remove('BSCALE')
                except:
                    pass

                #subtract the overscan region
                for j in range(len(struct[i].data[0])):
                    struct[i].data[y1:y2, j] -= ofit

                #report the information
                if log:
                    message = '%25s[%1d] %8.2f %3d %7.2f %3d' % \
                        (infile, i, olevel, order, osigma, niter)
                    log.message(message,
                                with_stdout=verbose,
                                with_header=False)

                #add the statistics to the image header
                saltkey.new('OVERRMS', '%f' % (osigma), 'Overscan RMS value',
                            struct[i])

                #update the variance frame
                if saltkey.found('VAREXT', struct[i]):
                    vhdu = saltkey.get('VAREXT', struct[i])
                    try:
                        vdata = struct[vhdu].data
                        #The bias level should not be included in the noise from the signal
                        for j in range(len(struct[i].data[0])):
                            vdata[y1:y2, j] -= ofit
                        #add a bit to make sure that the minimum error is the rednoise
                        rdnoise = saltkey.get('RDNOISE', struct[i])
                        vdata[vdata < rdnoise**2] = rdnoise**2
                        struct[vhdu].data = vdata + osigma**2

                    except Exception, e:
                        msg = 'Cannot update the variance frame in %s[%i] because %s' % (
                            infile, vhdu, e)
                        raise SaltError(msg)

                #plot the overscan region
                if plotover:
                    plt.plot(yarr, odata)
                    plt.plot(yarr, ofit)

            #trim the data and update the headers
            if trim:
                struct[i].data = struct[i].data[dy1:dy2, dx1:dx2]
                datasec = '[1:' + str(dx2 - dx1) + ',1:' + str(dy2 - dy1) + ']'
                saltkey.put('DATASEC', datasec, struct[i])

                #update the variance frame
                if saltkey.found('VAREXT', struct[i]):
                    vhdu = saltkey.get('VAREXT', struct[i])
                    struct[vhdu].data = struct[vhdu].data[dy1:dy2, dx1:dx2]
                    datasec = '[1:' + str(dx2 - dx1) + ',1:' + str(dy2 -
                                                                   dy1) + ']'
                    saltkey.put('DATASEC', datasec, struct[vhdu])
                #update the BPM frame
                if saltkey.found('BPMEXT', struct[i]):
                    bhdu = saltkey.get('BPMEXT', struct[i])
                    struct[bhdu].data = struct[bhdu].data[dy1:dy2, dx1:dx2]
                    datasec = '[1:' + str(dx2 - dx1) + ',1:' + str(dy2 -
                                                                   dy1) + ']'
                    saltkey.put('DATASEC', datasec, struct[bhdu])

            #subtract the master bias if necessary
            if subbias and bstruct:
                struct[i].data -= bstruct[i].data

                #update the variance frame
                if saltkey.found('VAREXT', struct[i]):
                    vhdu = saltkey.get('VAREXT', struct[i])
                    try:
                        vdata = struct[vhdu].data
                        struct[vhdu].data = vdata + bstruct[vhdu].data
                    except Exception, e:
                        msg = 'Cannot update the variance frame in %s[%i] because %s' % (
                            infile, vhdu, e)
                        raise SaltError(msg)
Beispiel #7
0
def fitradius(radius, data):
    """Fit a line to the data"""
    it=interfit(radius, data, function='polynomial', order=3)
    it.interfit()
    d=it(radius)
    return radius[d.argmax()]
Beispiel #8
0
def specsens(specfile, outfile, stdfile, extfile, airmass=None, exptime=None,
             stdzp=3.68e-20, function='polynomial', order=3, thresh=3, niter=5,
             fitter='gaussian', clobber=True, logfile='salt.log', verbose=True):

    with logging(logfile, debug) as log:

        # read in the specfile and create a spectrum object
        obs_spectra = st.readspectrum(specfile.strip(), error=True, ftype='ascii')

        # smooth the observed spectrum
        # read in the std file and convert from magnitudes to fnu
        # then convert it to fwave (ergs/s/cm2/A)
        std_spectra = st.readspectrum(stdfile.strip(), error=False, ftype='ascii')
        std_spectra.flux = Spectrum.magtoflux(std_spectra.flux, stdzp)
        std_spectra.flux = Spectrum.fnutofwave(
            std_spectra.wavelength, std_spectra.flux)

        # Get the typical bandpass of the standard star,
        std_bandpass = np.diff(std_spectra.wavelength).mean()
        # Smooth the observed spectrum to that bandpass
        obs_spectra.flux = st.boxcar_smooth(obs_spectra, std_bandpass)
        # read in the extinction file (leave in magnitudes)
        ext_spectra = st.readspectrum(extfile.strip(), error=False, ftype='ascii')

        # determine the airmass if not specified
        if saltio.checkfornone(airmass) is None:
            message = 'Airmass was not supplied'
            raise SALTSpecError(message)

        # determine the exptime if not specified
        if saltio.checkfornone(exptime) is None:
            message = 'Exposure Time was not supplied'
            raise SALTSpecError(message)

        # calculate the calibrated spectra
        log.message('Calculating the calibration curve for %s' % specfile)
        cal_spectra = sensfunc(
            obs_spectra, std_spectra, ext_spectra, airmass, exptime)

        # plot(cal_spectra.wavelength, cal_spectra.flux * std_spectra.flux)
        # fit the spectra--first take a first cut of the spectra
        # using the median absolute deviation to throw away bad points
        cmed = np.median(cal_spectra.flux)
        cmad = saltstat.mad(cal_spectra.flux)
        mask = (abs(cal_spectra.flux - cmed) < thresh * cmad)
        mask = np.logical_and(mask, (cal_spectra.flux > 0))

        # now fit the data
        # Fit using a gaussian process.
        if fitter=='gaussian':
            from sklearn.gaussian_process import GaussianProcess
            #Instanciate a Gaussian Process model

            dy = obs_spectra.var[mask] ** 0.5
            dy /= obs_spectra.flux[mask] / cal_spectra.flux[mask]
            y = cal_spectra.flux[mask]
            gp = GaussianProcess(corr='squared_exponential', theta0=1e-2,
                                 thetaL=1e-4, thetaU=0.1, nugget=(dy / y) ** 2.0)
            X = np.atleast_2d(cal_spectra.wavelength[mask]).T
            # Fit to data using Maximum Likelihood Estimation of the parameters
            gp.fit(X, y)
    
            x = np.atleast_2d(cal_spectra.wavelength).T
            # Make the prediction on the meshed x-axis (ask for MSE as well)
            y_pred = gp.predict(x)

            cal_spectra.flux = y_pred

        else:
            fit=interfit(cal_spectra.wavelength[mask], cal_spectra.flux[mask], function=function, order=order, thresh=thresh, niter=niter)
            fit.interfit()
            cal_spectra.flux=fit(cal_spectra.wavelength)

        # write the spectra out
        st.writespectrum(cal_spectra, outfile, ftype='ascii')
Beispiel #9
0
def specsens(specfile,
             outfile,
             stdfile,
             extfile,
             airmass=None,
             exptime=None,
             stdzp=3.68e-20,
             function='polynomial',
             order=3,
             thresh=3,
             niter=5,
             fitter='gaussian',
             clobber=True,
             logfile='salt.log',
             verbose=True):

    with logging(logfile, debug) as log:

        # read in the specfile and create a spectrum object
        obs_spectra = st.readspectrum(specfile.strip(),
                                      error=True,
                                      ftype='ascii')

        # smooth the observed spectrum
        # read in the std file and convert from magnitudes to fnu
        # then convert it to fwave (ergs/s/cm2/A)
        std_spectra = st.readspectrum(stdfile.strip(),
                                      error=False,
                                      ftype='ascii')
        std_spectra.flux = Spectrum.magtoflux(std_spectra.flux, stdzp)
        std_spectra.flux = Spectrum.fnutofwave(std_spectra.wavelength,
                                               std_spectra.flux)

        # Get the typical bandpass of the standard star,
        std_bandpass = np.diff(std_spectra.wavelength).mean()
        # Smooth the observed spectrum to that bandpass
        obs_spectra.flux = st.boxcar_smooth(obs_spectra, std_bandpass)
        # read in the extinction file (leave in magnitudes)
        ext_spectra = st.readspectrum(extfile.strip(),
                                      error=False,
                                      ftype='ascii')

        # determine the airmass if not specified
        if saltio.checkfornone(airmass) is None:
            message = 'Airmass was not supplied'
            raise SALTSpecError(message)

        # determine the exptime if not specified
        if saltio.checkfornone(exptime) is None:
            message = 'Exposure Time was not supplied'
            raise SALTSpecError(message)

        # calculate the calibrated spectra
        log.message('Calculating the calibration curve for %s' % specfile)
        cal_spectra = sensfunc(obs_spectra, std_spectra, ext_spectra, airmass,
                               exptime)

        # plot(cal_spectra.wavelength, cal_spectra.flux * std_spectra.flux)
        # fit the spectra--first take a first cut of the spectra
        # using the median absolute deviation to throw away bad points
        cmed = np.median(cal_spectra.flux)
        cmad = saltstat.mad(cal_spectra.flux)
        mask = (abs(cal_spectra.flux - cmed) < thresh * cmad)
        mask = np.logical_and(mask, (cal_spectra.flux > 0))

        # now fit the data
        # Fit using a gaussian process.
        if fitter == 'gaussian':
            from sklearn.gaussian_process import GaussianProcess
            #Instanciate a Gaussian Process model

            dy = obs_spectra.var[mask]**0.5
            dy /= obs_spectra.flux[mask] / cal_spectra.flux[mask]
            y = cal_spectra.flux[mask]
            gp = GaussianProcess(corr='squared_exponential',
                                 theta0=1e-2,
                                 thetaL=1e-4,
                                 thetaU=0.1,
                                 nugget=(dy / y)**2.0)
            X = np.atleast_2d(cal_spectra.wavelength[mask]).T
            # Fit to data using Maximum Likelihood Estimation of the parameters
            gp.fit(X, y)

            x = np.atleast_2d(cal_spectra.wavelength).T
            # Make the prediction on the meshed x-axis (ask for MSE as well)
            y_pred = gp.predict(x)

            cal_spectra.flux = y_pred

        else:
            fit = interfit(cal_spectra.wavelength[mask],
                           cal_spectra.flux[mask],
                           function=function,
                           order=order,
                           thresh=thresh,
                           niter=niter)
            fit.interfit()
            cal_spectra.flux = fit(cal_spectra.wavelength)

        # write the spectra out
        st.writespectrum(cal_spectra, outfile, ftype='ascii')
Beispiel #10
0
def fitradius(radius, data):
    """Fit a line to the data"""
    it = interfit(radius, data, function='polynomial', order=3)
    it.interfit()
    d = it(radius)
    return radius[d.argmax()]