Beispiel #1
0
def wavelength_array(spec):
    """ Produces array for wavelenght of a given array. """
    w0 = pf.getval(spec, "CRVAL1")
    deltaw = pf.getval(spec, "CDELT1")
    pix0 = pf.getval(spec, "CRPIX1")
    npix = pf.getval(spec, "NAXIS1")
    return w0 + deltaw * (np.arange(npix) + 1 - pix0)
Beispiel #2
0
def stdsensfunc(fs=None):
    iraf.cd('work')
    if fs is None:
        fs = glob('x1d/sci*x1d*c?.fits')
    if len(fs) == 0:
        print "WARNING: No extracted spectra to create sensfuncs from."
        iraf.cd('..')
        return

    if not os.path.exists('std'):
        os.mkdir('std')
    for f in fs:
        # Put the file in the std directory, but last 3 letters of sens
        outfile = 'std/' + f.split('/')[1]
        outfile = outfile.replace('x1d', 'sens').replace('sci', 'std')
        outfile = outfile.replace('.fits', '.dat')
        # if the object name is in the list of standard stars from pysalt
        if isstdstar(f):
            # We use pysalt here because standard requires a
            # dispersion correction which was already taken care of above
            # Write out an ascii file that pysalt.specsens can read
            asciispec = 'std/std.ascii.dat'
            spectoascii(f, asciispec)
            # run specsens
            stdfile = pysaltpath + '/data/standards/spectroscopic/m%s.dat' % pyfits.getval(f, 'OBJECT').lower().replace('-','_')
            extfile = pysaltpath + '/data/site/suth_extinct.dat'
            iraf.unlearn(iraf.specsens)
            iraf.specsens(asciispec, outfile, stdfile, extfile,
                          airmass=pyfits.getval(f, 'AIRMASS'),
                          exptime=pyfits.getval(f, 'EXPTIME'), function='poly',
                          order=11, clobber=True, mode='h', thresh=1e10)
            # delete the ascii file
            os.remove(asciispec)
    iraf.cd('..')
Beispiel #3
0
def wavelength_array(spec, axis=1, extension=0):
    """ Produces array for wavelenght of a given array. """
    w0 = pf.getval(spec, "CRVAL{0}".format(axis), extension)
    deltaw = pf.getval(spec, "CD{0}_{0}".format(axis), extension)
    pix0 = pf.getval(spec, "CRPIX{0}".format(axis), extension)
    npix = pf.getval(spec, "NAXIS{0}".format(axis), extension)
    return w0 + deltaw * (np.arange(npix) + 1 - pix0)
Beispiel #4
0
def mark_boundaries(flist, slit_size, combined_img, rot_img, ax1, img_angle, ref_angle, fudge_factor, target_font_size):
    flist_r = flist[::-1]

    angle =  (ref_angle - img_angle) - 90
    angle_rad = angle/180.0 * math.pi
    ydim, xdim = np.shape(combined_img)
    x = np.arange( (ydim) * math.cos(angle_rad))
    
    slope = -math.tan(angle_rad)
    for indx, i in enumerate(np.arange(xdim/int(slit_size)+1)):                             
        intercept = (ydim) * math.sin(angle_rad) + ((i)*slit_size/math.cos(angle_rad))
        xnew = x+((i)*math.sin(angle_rad)*slit_size)
        y = slope*xnew+intercept
        ax1.plot(xnew, y, 'k')
        try:
            if np.mod(i, 2) == 1:
                ax1.text(math.ceil(xnew[-1]+1), math.ceil(y[-1]), '%s' %(pyfits.getval(flist_r[indx].replace('collapse_flt', 'combined_img'), 'targname', 0)) , fontsize = target_font_size, ha = 'left', va = 'bottom')
                ax1.plot(np.arange(50) +  math.ceil(xnew[-1]), np.ones(50) * math.ceil(y[-1]) - 1, 'k')
                ax1.plot((-np.arange(50)) +  math.ceil(xnew[0]), np.ones(50) * math.ceil(y[0]) - 1, 'k')
            else:
                ax1.text(math.ceil(xnew[0]-1), math.ceil(y[0]), '%s' %(pyfits.getval(flist_r[indx].replace('collapse_flt', 'combined_img'), 'targname', 0)) , fontsize = target_font_size, ha = 'right', va = 'bottom')
                ax1.plot((-np.arange(50)) +  math.ceil(xnew[0]) - 1, np.ones(50) * math.ceil(y[0]) - 1, 'k')
                ax1.plot(np.arange(50) +  math.ceil(xnew[-1]) - 1, np.ones(50) * math.ceil(y[-1]) - 1, 'k')
        except:
            pass
    ax1 = mark_pix_loc(xdim, ydim, angle_rad, ax1)
    #pdb.set_trace()
    return ax1
Beispiel #5
0
def update_wcs_from_pointing(files,pixscale):
   for f in files:
      if f[-5:]!=".fits":
         continue
      print "%s ....Updating values" % f
      fullfits = pyfits.open(f,mode='update')
      sci = fullfits[0]
      shape = sci.data.shape
      ra = wcs.ra2deg(sci.header['RA'].strip())
      dec = wcs.dec2deg(sci.header['DEC'].strip())
      sci.header.update('CRPIX1',shape[1]/2.)
      sci.header.update('CRVAL1',ra)
      sci.header.update('CRPIX2',shape[0]/2.)
      sci.header.update('CRVAL2',dec)
      sci.header.update('CD1_1',-pixscale/3600.)
      sci.header.update('CD1_2',0.)
      sci.header.update('CD2_1',0.)
      sci.header.update('CD2_2',pixscale/3600.)
      sci.header.update('CTYPE1','RA---TAN')
      sci.header.update('CTYPE2','DEC--TAN')
      sci.header.update('EQUINOX',2000.0)
      sci.header.update('RADESYS','FK5')
      del sci.header['CDELT1']
      del sci.header['CDELT2']
      fullfits.flush()
      print "  n   CTYPEn  CRPIXn    CRVALn       CDn_1        CDn_2"
      print " --- -------- ------- ---------- ------------- -------------"
      print "  1  %s %7.2f %10.6f %13.6e %13.6e" \
       % (getval(f,'CTYPE1'),getval(f,'CRPIX1'),getval(f,'CRVAL1'), \
       getval(f,'CD1_1'),getval(f,'CD1_2'))
      print "  2  %s %7.2f %10.6f %13.6e %13.6e" \
       % (getval(f,'CTYPE2'),getval(f,'CRPIX2'),getval(f,'CRVAL2'), \
       getval(f,'CD2_1'),getval(f,'CD2_2'))
def combine_medium(file_list , num=70):
    '''(str, int) -> ndarray, asciidata

    Puts list of fits files into correct order for use with
    median function. Adds to fits header histrory.
    '''
    #does medium assumes all have same fits header
    #high ram but quick
    if len(file_list) < num:
        shape = (fits.getval(file_list[0],'NAXIS1'),
                 fits.getval(file_list[0],'NAXIS2'),
                 len(file_list))
        temp = np.zeros(shape)
        for i,j in enumerate(file_list):
            temp[:,:,i],hdr = util.fromfits(j,verbose=False)
        out = np.median(temp,2)
    else:
        #low ram but takes longer
        shape = (fits.getval(file_list[0],'NAXIS1'),
                 fits.getval(file_list[0],'NAXIS2'))
        out = np.zeros(shape)
        temp = np.zeros((shape[0],len(file_list)))
        for i in xrange(shape[1]):
            for j,k in enumerate(file_list):
                temp[:,j] = util.fromfits(k, verbose=False)[0][:,i]
            out[:,i] = np.median(temp,1)
        temp,hdr = util.fromfits(k, verbose=False)
    hdr.add_history('Medium combine')
    return out, hdr
Beispiel #7
0
    def inputs(cls, gainmap):
        """ return the pulse height image name from the input gainmap

        """

        gainmap_path, gainmap_name = os.path.split(gainmap)
        segment = pyfits.getval(gainmap, 'SEGMENT')
        dethv = int(pyfits.getval(gainmap, 'DETHV'))

        both_inputs = [gainmap]

        if segment == 'FUVA':
            other_root = gainmap.replace(FUVA_string, FUVB_string).replace('_{}_'.format(dethv), '_???_')
        elif segment == 'FUVB':
            other_root = gainmap.replace(FUVB_string, FUVA_string).replace('_{}_'.format(dethv), '_???_')

        other_gainmap = glob.glob(other_root)
        if len(other_gainmap) != 1:
            raise IOError("too many gainmaps found {}".format(other_gainmap))
        else:
            other_gainmap = other_gainmap[0] 

        both_inputs.append(other_gainmap)
        both_inputs.sort()

        return tuple(both_inputs)
Beispiel #8
0
def stdsensfunc(fs=None):
    iraf.cd('work')
    if fs is None:
        fs = glob('x1d/sci*x1d*c?.fits')
    if len(fs) == 0:
        print "WARNING: No extracted spectra to create sensfuncs from."
        iraf.cd('..')
        return

    if not os.path.exists('std'):
        os.mkdir('std')
    for f in fs:
        # Put the file in the std directory, but last 3 letters of sens
        outfile = 'std/' + f.split('/')[1]
        outfile = outfile.replace('x1d', 'sens').replace('sci', 'std')
        outfile = outfile.replace('.fits', '.dat')
        # if the object name is in the list of standard stars from pysalt
        if isstdstar(f):
            # We use pysalt here because standard requires a
            # dispersion correction which was already taken care of above
            # Write out an ascii file that pysalt.specsens can read
            asciispec = 'std/std.ascii.dat'
            spectoascii(f, asciispec)
            # run specsens
            stdfile = pysaltpath + '/data/standards/spectroscopic/m%s.dat' % pyfits.getval(f, 'OBJECT').lower().replace('-','_')
            extfile = pysaltpath + '/data/site/suth_extinct.dat'
            iraf.unlearn(iraf.specsens)
            iraf.specsens(asciispec, outfile, stdfile, extfile,
                          airmass=pyfits.getval(f, 'AIRMASS'),
                          exptime=pyfits.getval(f, 'EXPTIME'), function='poly',
                          order=11, clobber=True, mode='h', thresh=1e10)
            # delete the ascii file
            os.remove(asciispec)
    iraf.cd('..')
Beispiel #9
0
def update_psfdat():
    import os
    from astropy.io import ascii
    from astropy.table import Column
    import pyfits
    # read in the psf mags computed by Dan
    psfdat = ascii.read( 'psfphot.dat')

    # get exposure time and number of exposures from the header
    etimelist, nexplist, etcSNRlist, etcOSNRlist, psfSNRlist = [], [], [], [], []
    for row in psfdat :
        imdir = '/store/snreproc/%s.090mas/'%(row['sn'])
        suffix = '-e00_sub_masked' if row['image']=='sub' else '_reg_drz_sci'
        imfile = os.path.join( imdir,
            '%s_%s_%s%s.fits'%(
                row['sn'], row['band'], row['epoch'], suffix ) )
        etime = pyfits.getval( imfile, 'EXPTIME' )
        nexp = pyfits.getval( imfile, 'NCOMBINE' )
        etimelist.append( etime )
        nexplist.append( nexp )
        etcSNR, etcSNRopt = synphotSNR( row['band'], row['stack'], etime=etime, nexp=nexp)
        etcSNRlist.append( etcSNR )
        etcOSNRlist.append( etcSNRopt )
        psfSNR = 1.08574 / row['err']
        psfSNRlist.append( psfSNR )

    nexpCol = Column( nexplist, 'nexp', dtype=int, format='%i')
    etimeCol = Column( etimelist, 'etime', dtype=float, format='%8.2f' )
    etcSNRCol = Column( etcSNRlist, 'SNRetc', dtype=float, format='%6.2f' )
    etcOSNRCol = Column( etcOSNRlist, 'optSNRetc', dtype=float, format='%6.2f' )
    psfSNRCol = Column( psfSNRlist, 'SNRpsf', dtype=float, format='%6.2f' )

    psfdat.add_columns( [ nexpCol, etimeCol, etcSNRCol, etcOSNRCol, psfSNRCol], indexes=[4,4,9,9,9])
    psfdat.write( 'psfphot2.dat', format='ascii.commented_header' )
    psfdat.write( 'psfphot3.dat', format='ascii.fixed_width' )
Beispiel #10
0
def wavelength_array(spec):
    """ Produces array for wavelenght of a given array. """
    w0 = pf.getval(spec, "CRVAL1")
    deltaw = pf.getval(spec, "CD1_1")
    pix0 = pf.getval(spec, "CRPIX1")
    npix = pf.getval(spec, "NAXIS1")
    return w0 + deltaw * (np.arange(npix) + 1 - pix0)
Beispiel #11
0
    def __process_fits_object(self, fits_string):
        '''
        Process FITS file object and extract info.
        http://stackoverflow.com/questions/11892623/python-stringio-and-compatibility-with-with-statement-context-manager
        Returns the temporary file name and DataStream object.
        '''
        test = ''
        with self.__tempinput(fits_string) as tempfilename:
            test = tempfilename
            fitsdata = pyfits.getdata(tempfilename)
            bjd_trunci = float(pyfits.getval(tempfilename, 'bjdrefi', ext=1))
            bjd_truncf = float(pyfits.getval(tempfilename, 'bjdreff', ext=1))

            # Note: Times are updated to be in proper reduced barycentric Julian date,
            # RBJD = BJD - 2400000.0
            time = fitsdata['TIME'] + bjd_trunci + bjd_truncf - 2400000.
            pdcflux = fitsdata['PDCSAP_FLUX']
            pdcerror = fitsdata['PDCSAP_FLUX_ERR']
            errorstat = fitsdata['SAP_QUALITY']

            ndx = np.where(errorstat == 0)
            retval = DataStream(arrays=(time[ndx], pdcflux[ndx],
                                        pdcerror[ndx]))

            # Fix for windows, returns the filename into main so that os.unlink can be called there
            return test, retval
Beispiel #12
0
def create_reference_file(input_dir, input_flist):
    '''
    This function will point Justin's refstis scripts to a folder containing darks or biases
    and have it create a superdark or super bias. The script will automatically try to create darks
    and biases for with all files in a directory. Biases and darks should be kept in separate 
    folders. This script automatically divided observations into individual weeks
    '''
    date = []
    for ifile in input_flist:
        date.append(
            pyfits.getval(os.path.join(input_dir, ifile), 'texpstrt', 0))
        date.append(pyfits.getval(os.path.join(input_dir, ifile), 'texpend',
                                  0))
    filetype = pyfits.getval(os.path.join(input_dir, ifile), 'targname', 0)
    date = np.array(date)
    month_begin = np.min(date)
    month_end = np.max(date)
    if filetype == 'DARK':
        REFSTIS_wrapper.separate_obs(input_dir,
                                     month_begin,
                                     month_end,
                                     input_filetype='flc',
                                     move_files=False)
    else:
        REFSTIS_wrapper.separate_obs(input_dir,
                                     month_begin,
                                     month_end,
                                     move_files=False)
    make_reffiles(input_dir, filetype)

    #Copy reference file from each week folder to the reffiles folder
    os.path.walk(input_dir, copy_reference_files_to_reffiles, '')
Beispiel #13
0
    def __process_fits_object(self, fits_string):
        '''
        Process FITS file object and extract info.
        http://stackoverflow.com/questions/11892623/python-stringio-and-compatibility-with-with-statement-context-manager
        Returns the temporary file name and DataStream object.
        '''
        test = ''
        with self.__tempinput(fits_string) as tempfilename:
            test = tempfilename
            fitsdata = pyfits.getdata(tempfilename)
            bjd_trunci = float(pyfits.getval(tempfilename, 'bjdrefi', ext=1))
            bjd_truncf = float(pyfits.getval(tempfilename, 'bjdreff', ext=1))

            # Note: Times are updated to be in proper reduced barycentric Julian date,
            # RBJD = BJD - 2400000.0
            time = fitsdata['TIME'] + bjd_trunci + bjd_truncf - 2400000.
            pdcflux = fitsdata['PDCSAP_FLUX']
            pdcerror = fitsdata['PDCSAP_FLUX_ERR']
            errorstat = fitsdata['SAP_QUALITY']

            ndx = np.where(errorstat == 0)
            retval = DataStream(arrays=(time[ndx], pdcflux[ndx], pdcerror[ndx]))

            # Fix for windows, returns the filename into main so that os.unlink can be called there
            return test, retval
def combine_sigmaclip(file_list, num=70):
    '''(str, int) -> ndarray, asciidata

    Puts list of fits files into correct order for use with
    sigma clipping function. Adds to fits header histrory.
    '''
    #fast but uses lots of ram
    if len(file_list) < num:
        shape = (fits.getval(file_list[0],'NAXIS1'),
                 fits.getval(file_list[0],'NAXIS2'),
                 len(file_list))
        temp = np.zeros(shape)
        for i,j in enumerate(file_list):
            temp[:,:,i],hdr = fits.fromfits(j,verbose=False)
        out = Sigmaclip(temp,axis=2)
    else:
        #slow ram but takes longer
        shape = (fits.getval(file_list[0],'NAXIS1'),
                 fits.getval(file_list[0],'NAXIS2'))
        out = np.zeros(shape)
        temp = np.zeros((shape[0],len(file_list)))
        for i in xrange(shape[1]):
            for j,k in enumerate(file_list):
                temp[:,j] = util.fromfits(k,verbose=False)[0][:,i]
            out[:,i] = Sigmaclip(temp,axis=1)
        temp, hdr = util.fromfits(k,verbose=False)
    hdr.add_history('Sigmaclip combine')
    return out, hdr
Beispiel #15
0
def wavelength_array(spec, axis=1, extension=0):
    """ Produces array for wavelenght of a given array. """
    w0 = pf.getval(spec, "CRVAL{0}".format(axis), extension)
    deltaw = pf.getval(spec, "CD{0}_{0}".format(axis), extension)
    pix0 = pf.getval(spec, "CRPIX{0}".format(axis), extension)
    npix = pf.getval(spec, "NAXIS{0}".format(axis), extension)
    return w0 + deltaw * (np.arange(npix) + 1 - pix0)
Beispiel #16
0
def identify2d(fs=None):
    iraf.cd('work')
    if fs is None:
        fs = glob('mos/arc*mos*.fits')
    if len(fs) == 0:
        print "WARNING: No mosaiced (2D) specidentify."
        # Change directories to fail gracefully
        iraf.cd('..')
        return
    arcfs, arcgas = get_ims(fs, 'arc')
    if not os.path.exists('id2'):
        os.mkdir('id2')

    lampfiles = {
        'Th Ar': 'ThAr.salt',
        'Xe': 'Xe.salt',
        'Ne': 'NeAr.salt',
        'Cu Ar': 'CuAr.salt',
        'Ar': 'Argon_hires.salt',
        'Hg Ar': 'HgAr.salt'
    }
    for i, f in enumerate(arcfs):
        ga = arcgas[i]

        # find lamp and corresponding linelist
        lamp = pyfits.getval(f, 'LAMPID')
        lampfn = lampfiles[lamp]
        if pyfits.getval(f, 'GRATING') == 'PG0300' and lamp == 'Ar':
            lampfn = 'Argon_lores.swj'

        ccdsum = int(pyfits.getval(f, 'CCDSUM').split()[1])

        # linelistpath is a global variable defined in beginning, path to
        # where the line lists are.
        lamplines = pysaltpath + '/data/linelists/' + lampfn
        print(lamplines)

        # img num should be right before the .fits
        imgnum = f[-9:-5]
        # run pysalt specidentify
        idfile = 'id2/arc%05.2fid2%04i' % (float(ga), int(imgnum)) + '.db'
        iraf.unlearn(iraf.specidentify)
        iraf.flpr()
        iraf.specidentify(
            images=f,
            linelist=lamplines,
            outfile=idfile,
            guesstype='rss',
            inter=True,  # automethod='FitXcor',
            rstep=600 / ccdsum,
            rstart=200 / ccdsum,
            startext=1,
            clobber='yes',
            #startext=1, clobber='yes',
            verbose='no',
            mode='hl',
            logfile='salt.log',
            mdiff=2,
            function='legendre')
    iraf.cd('..')
Beispiel #17
0
def fluxcal(stdsfolder='./', fs=None):
    iraf.cd('work')
    if fs is None:
        fs = glob('x1d/sci*x1d*c*.fits')
    if len(fs) == 0:
        print "WARNING: No science chip spectra to flux calibrate."
        iraf.cd('..')
        return

    if not os.path.exists('flx'):
        os.mkdir('flx')
    extfile = pysaltpath + '/data/site/suth_extinct.dat'
    stdfiles = glob(stdsfolder + '/std/*sens*c?.dat')
    print(stdfiles)
    for f in fs:
        outfile = f.replace('x1d', 'flx')
        chip = outfile[-6]
        hdu = pyfits.open(f)
        ga = f.split('/')[1][3:8]
        # Get the standard sensfunc with the same grating angle
        stdfile = None
        for stdf in stdfiles:
            if np.isclose(float(ga),
                          float(stdf.split('/')[stdf.count('/')][3:8]),
                          rtol=1e-2):
                # Get the right chip number
                if chip == stdf[-5]:
                    stdfile = stdf
                    break
        if stdfile is None:
            print('No standard star with grating-angle %s' % ga)
            continue
        # for each extracted aperture
        for i in range(hdu[0].data.shape[1]):
            # create an ascii file that pysalt can read
            asciiname = 'flx/sciflx.dat'
            outtmpname = 'flx/scical.dat'
            spectoascii(f, asciiname, i)
            # Run pysalt.speccal
            iraf.unlearn(iraf.speccal)
            iraf.flpr()
            iraf.speccal(asciiname,
                         outtmpname,
                         stdfile,
                         extfile,
                         airmass=pyfits.getval(f, 'AIRMASS'),
                         exptime=pyfits.getval(f, 'EXPTIME'),
                         clobber=True,
                         mode='h')
            # read in the flux calibrated ascii file and copy its
            # contents into a fits file
            flxcal = np.genfromtxt(outtmpname).transpose()
            hdu[0].data[0, i] = flxcal[1]
            hdu[0].data[2, i] = flxcal[2]
            # delete the ascii file
            os.remove(asciiname)
            os.remove(outtmpname)
        hdu.writeto(outfile, clobber=True)
    iraf.cd('..')
Beispiel #18
0
def read_PHOENIX(chosen_path):
    mu = fits.getdata(chosen_path, 'MU')
    data = fits.getdata(chosen_path)
    CDELT1 = fits.getval(chosen_path, 'CDELT1')
    CRVAL1 = fits.getval(chosen_path, 'CRVAL1')
    wavelengths = np.arange(data.shape[1]) * CDELT1 + CRVAL1
    I = data.transpose()
    return wavelengths, I, mu
Beispiel #19
0
def get_ims(fs, imtype):
    imtypekeys = {'sci': 'OBJECT', 'arc': 'ARC', 'flat': 'FLAT'}
    ims = []
    grangles = []
    for f in fs:
        if pyfits.getval(f, 'OBSTYPE') == imtypekeys[imtype]:
            ims.append(f)
            grangles.append(pyfits.getval(f, 'GR-ANGLE'))
    return np.array(ims), np.array(grangles)
Beispiel #20
0
def get_ims(fs, imtype):
    imtypekeys = {'sci': 'OBJECT', 'arc': 'ARC', 'flat': 'FLAT'}
    ims = []
    grangles = []
    for f in fs:
        if pyfits.getval(f, 'OBSTYPE') == imtypekeys[imtype]:
            ims.append(f)
            grangles.append(pyfits.getval(f, 'GR-ANGLE'))
    return np.array(ims), np.array(grangles)
Beispiel #21
0
def make_counts_image(image, outfile='default'):
    '''FUNCTION TO CONVERT CNTS/SEC IMAGE TO COUNTS (IF NECESSARY)'''

    # -- parse output filename & save a copy to file (NOTE: if outfile == input image, data is overwritten).
    if (image != outfile):
        if outfile == 'default': outfile = image.split('.fits')[0] + '_cnts.fits'
        shutil.copy(image,outfile)
    else: print 'OVERWRITING DATA FOR IMAGE: '+image+'.'


    # -- determine if image is flt/flc, crclean, or drz/drc
    prihdr = pyfits.getheader(outfile,ext=0)
    pscale = prihdr.get('D001SCAL',default='NA')
    if pscale != 'NA': imtype = 'drz'
    elif len(image.split('crclean.fits')) > 1: imtype = 'crclean'
    else: imtype = 'flt'


    # -- initialize a few required parameters
    detector = prihdr['DETECTOR']
    exptime = prihdr['EXPTIME']


    # -- multiply by exposure time (only if image is already in cnts/sec)
    #      [notes] -- IR crcleans are actually in cnts, but "BUNIT" still says per second (can't trust bunit for now).
    #              -- We assume drz are cnts/sec, but this does not have to be true.
    #
    if imtype == 'drz':
        # -- save background & pixel scale info
        if prihdr['extend'] == True: back = pyfits.getval(outfile,'MDRIZSKY',ext=1)  
        else: back = pyfits.getval(outfile,'MDRIZSKY',ext=0)
        pscale_nat = pyfits.getval(outfile,'D001ISCL',ext=0)
        pscale_img = pyfits.getval(outfile,'D001SCAL',ext=0)

        # -- assign the number of chips associated with this image
        if (prihdr['detector'] == 'IR'): nchips = 1.0                                          # IR
        elif (prihdr['subarray'] == True) and (len(prihdr['CCDAMP']) == 1): nchips = 1.0      # UVIS sub-array
        elif (prihdr['detector'] == 'UVIS') and (prihdr['subarray'] == False): nchips = 2.0   # UVIS full-frame
        else: raise exception('Image type is not defined.')

        # -- add background and correct for different pixel scale (original backgrd is measured in raw images)
        fdata = pyfits.getdata(outfile,ext=0)
        fdata_cnts = np.copy(fdata) * exptime + np.sum(back)/nchips * (pscale_img/pscale_nat)**2
        hdulist = pyfits.open(outfile,mode='update')
        hdulist[0].data = fdata_cnts
        hdulist.close()

    elif ((detector == 'IR') & (imtype == 'flt')):
        hdulist = pyfits.open(outfile,mode='update')
        for ff in xrange(len(hdulist)):
            if hdulist[ff].name == 'SCI': hdulist[ff].data = hdulist[ff].data * exptime
        hdulist.close()

    else: print 'IMAGE SHOULD ALREADY BE IN UNITS OF COUNTS. RETURNING...'

    return outfile
Beispiel #22
0
def info_license(fitsfile):
    """Print license information stored in FITS"""
    try:
        license = pyfits.getval(fitsfile, "LICENSE")
    except KeyError:
        print("License information not found.")
    else:
        licver = pyfits.getval(fitsfile, "LICVER")
        licurl = pyfits.getval(fitsfile, "LICURL")
        print("{lic} {ver} ({url})".format(lic=license, ver=licver, url=licurl))
def get_darks(path=None, combine_type='median', outdir=None,
              Filter=('SET-TEMP','EXPTIME')):
    '''(str,str,str,tuple(str) or str) -> dict(ndarry), dict(asciidata)

    Opens dark directory, combines all *.fits or *.fit files in the 
    directory, sorts by Filter options (can have multiple) and out 
    puts fits file to outdir and also out ndarray of combined fits 
    files and fits header with modified history.  Combine types can 
    include: "sigmaclip","median","sum" and "sigmaclip."
    Known Issues:
    Median and sigmaclip combine give artifacts when use.'''

    comm = comb_Type(combine_type)
    #gui select directory
    if path is None:
        path = gui_getdir(title='Please Select Dark Directory')
        if not path:
            raise ValueError('Must specify directory where files are.')
    if not path.endswith('/'):
        path += '/'
    if outdir is None:
        outdir = gui_getdir(title='Please Select Save Directory')
    #load paths to fits
    fits_path = sorted(glob(path+'*'))
    fits_path = util.get_fits_type(fits_path,'dark')
    #sort by time and temp
    filters = {}
    for i in fits_path:
	if type(Filter) is tuple:
            filt = ''
            for j in Filter:
                filt += str(fits.getval(i,j)) + '_'
        else:
            filt = fits.getval(i,Filter)
        if not filt in filters.keys():
            filters[filt] = [i]
        else:
            filters[filt].append(i)

    out,hdr = {},{}
    for i in filters.keys():
        out[i],hdr[i] = comm(filters[i])
    #save as fits?
    if outdir is None:
        outdir = path
    else:
        # make dir if don't exisits
        if  not os.path.exists(outdir):
            os.mkdir(outdir)
    for i in out.keys():
        basename = os.path.split(filters[i][0])[-1]
        basename = os.path.splitext(basename)[0]
        util.tofits(outdir+basename+'_%s.fits'%combine_type.lower(), out[i],
               hdr[i],verbose=False)
    return out,hdr
def determine_correct_reference_files(input_dir, input_list, filetype):
    '''
    This function creates a nested dictionary which assigns a reference file name (from 
    the reffiles folder) to each gain, binaxis1, binaxis2, useafter combination
    Inputs:
        input_dir: directory of files whose headers you want to update
        input_list: names of files whose headers you want to update
        filetype: either drk or bia - the reference file whose name you want to update
    Output:
        mode_dict: a nested dictionary which hich assigns a reference file name (from 
    the reffiles folder) to each gain, binaxis1, binaxis2, useafter combination

    This code assumes that you have set the environment variable myref to point to the 
    reffiles folder
    '''
    dates = []
    for ifile in input_list:
        filename = os.path.join(input_dir, ifile)
        dates.append(pyfits.getval(filename, 'texpstrt', 0))
        dates.append(pyfits.getval(filename, 'texpend', 0))
    data_start = min(dates)
    data_end = max(dates)
    anneal_weeks_4 = divide_anneal_month(data_start, data_end, '/Users/bostroem/science/cte/', 4)
    anneal_weeks_2 = divide_anneal_month(data_start, data_end, '/Users/bostroem/science/cte/', 2)
    #nested dictionary: gain, binaxis1, binaxis2, week
                #gain
    mode_dict = {1:
                    #binaxis1
                    {1:
                        #binaxis2
                        #1x1 gain = 1
                        {1:{round(anneal_weeks_4[0][0], 4):'', round(anneal_weeks_4[1][0], 4):'', round(anneal_weeks_4[2][0], 4):'', round(anneal_weeks_4[3][0], 4):''},   
                        # 1x2 gain = 1
                        2:{round(anneal_weeks_2[0][0], 4):'', round(anneal_weeks_2[1][0], 4):''}},    
                    2: 
                        #2x1 gain = 1
                        {1:{round(anneal_weeks_2[0][0], 4):'', round(anneal_weeks_2[1][0], 4):''},  
                        #2x2 gain = 1
                        2:{round(anneal_weeks_2[0][0], 4):'', round(anneal_weeks_2[1][0], 4):''}}}, 
                4:  
                    #binaxis1
                    {1:
                        #binaxis2
                        #1x1 gain = 4
                        {1:{round(anneal_weeks_2[0][0], 4):'', round(anneal_weeks_2[1][0], 4):''}}}}   
    reffile_list = glob.glob(os.path.join(os.environ['myref'], '*_%s*.fits' %(filetype)))
    for reffile in reffile_list:
        hdr0 = pyfits.getheader(reffile, 0)
        gain = hdr0['ccdgain']
        binaxis1 = hdr0['binaxis1']
        binaxis2 = hdr0['binaxis2']
        useafter = convert_useafter_to_mjd(hdr0['useafter'])
        mode_dict[gain][binaxis1][binaxis2][round(useafter, 4)] = 'myref$%s' %(reffile.split('/')[-1])
                       
    return mode_dict
def run(image, aper="4.", sky=8., width=3., plots=False):
    """
    Call this to perform all the following fucntions
    
    Parameters
        image:  string, name of the image
        aper:   string, size of the photometry aperture(s)
        sky:    float, where to start the sky aperture
        width:  float, how wide should the sky annulus be
        plots:  bool, save plots of the results for quicklook
        
        
    example:
    
    aperphot.run('myimage.fits',aper="2.")
    
    """

    message("Setting up photometry for %s" % (image))

    #make sure that we are using data we are prepared for, you can write other functions
    #to deal with this or help set alternate parameters. An ideal setup might involve a class structure
    #but we'll keep it more simple here
    instrument = pyfits.getval(image, "INSTRUME")
    if "NICMOS" not in instrument:
        raise IOError("Program only valid for NICMOS images, check inputs")

    #calculate the zeropoint from the header, these are set for NICMOS
    #you could make functions for different instruments here
    photfnu = pyfits.getval(image, "PHOTFNU")
    abzpt = -2.5 * np.log10(photfnu * 1.0 * 1e-23) - 48.6

    print "zeropoint: %f" % (abzpt)

    #calculate the effective gain for the image in NICMOS
    exp = float(pyfits.getval(image, 'EXPTIME'))
    hgain = float(pyfits.getval(image, 'ADCGAIN'))
    epadu = hgain * exp  # to get the errors better, nicmos is actually dn/s

    print "Setting effective gain = %f " % (
        epadu)  #to make sure errors and chi are computed as best as possible

    set_daopars(epadu)

    coord_list = find_objects(image)

    photometry = do_phot(image,
                         coord_list,
                         aper=aper,
                         sky_annulus=sky,
                         width_sky=width,
                         zeropoint=abzpt)

    if plots: plotphot(photometry)
Beispiel #26
0
 def getcrpix(self):
     '''
     Get the cr pixel information from the single_sci.fits file.
     '''
     fitsfile = master_images_query = session.query(MasterImages.fits_file).filter(\
         MasterImages.name == os.path.basename(self.filename)).one()[0]
     png_path = os.path.split(self.filename)[0]
     png_path = os.path.split(png_path)[0]
     fitsfile = os.path.join(png_path, fitsfile)
     self.crpix1 = pyfits.getval(fitsfile, 'CRPIX1', 0)
     self.crpix2 = pyfits.getval(fitsfile, 'CRPIX2', 0)
Beispiel #27
0
def get_incl(name, ritter_data = "ritter_v7.19.fits"):

	from math import fabs

	ra = py.getval(name,'RAOBJ',0)
	de = py.getval(name,'DECOBJ',0)

	#print "Finding", ra, de

	array = py.getdata(ritter_data, 0)

	ra2000 = array["_RAJ2000"]
	de2000 = array["_DEJ2000"]

	bool_ra = ( np.fabs(ra2000 -ra) < POSITION_DELTA )
	bool_dec = ( np.fabs(de2000 - de) < POSITION_DELTA)

	#print ra2000

	#print np.sum(bool_ra*bool_dec)

	Found = False

	#Found = (np.sum(bool_ra*bool_dec) != 1)

	for i in range(len(ra2000)):
		if fabs(ra2000[i] - ra) < POSITION_DELTA:
			if fabs(de2000[i] - de) < POSITION_DELTA:
				Found = True
				ipt = i

	if Found:

		inc = array["Incl"]

		types = array["Type1"]

		inclin = inc[ipt]

		cv_type = types[ipt]

		#if inclin == "":
		#	print "Found match,", name, ra, de, "None"
		if inclin < 90.0 and inclin > 0.0:
			print "Found match, and inclination!", name, ra, de, inclin 
		#else:
		#	print "Found match,", name, ra, de, "--" 

		return inclin, cv_type


	else:
		print "no RA/dec match", name, ra, de
		return 0
Beispiel #28
0
def make_fits(spec, outfile):
    hdu = pf.PrimaryHDU(spec)
    miles = [x for x in os.listdir(".") if x.startswith("Mun") and x.endswith(".fits")][0]
    w0 = pf.getval(miles, "CRVAL1")
    deltaw = pf.getval(miles, "CDELT1")
    pix0 = pf.getval(miles, "CRPIX1")
    hdu.header["CRVAL1"] = w0
    hdu.header["CDELT1"] = deltaw
    hdu.header["CRPIX1"] = pix0
    pf.writeto(outfile, hdu.data, hdu.header, clobber=True)
    return
Beispiel #29
0
def getobstypes(fs):
    # get the type of observation for each file
    obstypes = []
    obsclasses = []
    for f in fs: 
        obstypes.append(pyfits.getval(f, 'OBSTYPE', ext=0))
        obsclasses.append(pyfits.getval(f, 'OBSCLASS', ext=0))
        
    obstypes = np.array(obstypes)
    obsclasses = np.array(obsclasses)
    return obstypes, obsclasses
Beispiel #30
0
def fluxcal(stdsfolder='./', fs=None):
    iraf.cd('work')
    if fs is None:
        fs = glob('x1d/sci*x1d*c*.fits')
    if len(fs) == 0:
        print "WARNING: No science chip spectra to flux calibrate."
        iraf.cd('..')
        return

    if not os.path.exists('flx'):
        os.mkdir('flx')
    extfile = pysaltpath + '/data/site/suth_extinct.dat'
    stdfiles = glob(stdsfolder + '/std/*sens*c?.dat')
    print(stdfiles)
    for f in fs:
        outfile = f.replace('x1d', 'flx')
        chip = outfile[-6]
        hdu = pyfits.open(f)
        ga = f.split('/')[1][3:8]
        # Get the standard sensfunc with the same grating angle
        stdfile = None
        for stdf in stdfiles:
            if ga in stdf:
                # Get the right chip number
                if chip == stdf[-5]:
                    stdfile = stdf
                    break
        if stdfile is None:
            print('No standard star with grating-angle %s' % ga)
            continue
        # for each extracted aperture
        for i in range(hdu[0].data.shape[1]):
            # create an ascii file that pysalt can read
            asciiname = 'flx/sciflx.dat'
            outtmpname = 'flx/scical.dat'
            spectoascii(f, asciiname, i)
            # Run pysalt.speccal
            iraf.unlearn(iraf.speccal)
            iraf.flpr()
            iraf.speccal(asciiname, outtmpname, stdfile, extfile,
                         airmass=pyfits.getval(f, 'AIRMASS'),
                         exptime=pyfits.getval(f, 'EXPTIME'),
                         clobber=True, mode='h')
            # read in the flux calibrated ascii file and copy its
            # contents into a fits file
            flxcal = np.genfromtxt(outtmpname).transpose()
            hdu[0].data[0, i] = flxcal[1]
            hdu[0].data[2, i] = flxcal[2]
            # delete the ascii file
            os.remove(asciiname)
            os.remove(outtmpname)
        hdu.writeto(outfile, clobber=True)
    iraf.cd('..')
Beispiel #31
0
def make_fits(spec, outfile):
    hdu = pf.PrimaryHDU(spec)
    miles = [x for x in os.listdir(".") if x.startswith("Mun") and
             x.endswith(".fits")][0]
    w0 = pf.getval(miles, "CRVAL1")
    deltaw = pf.getval(miles, "CDELT1")
    pix0 = pf.getval(miles, "CRPIX1")
    hdu.header["CRVAL1"] = w0
    hdu.header["CDELT1"] = deltaw
    hdu.header["CRPIX1"] = pix0
    pf.writeto(outfile, hdu.data, hdu.header, clobber=True)
    return
Beispiel #32
0
def plot_cat(fitsfile, catfile, xcol=0, ycol=1, marksize=20., markcolor='g',
             inhdu=0, cmap='gray', siglow=1.0, sighigh=10.0):
   """
   Plots a catalog (e.g., one generated by SExtractor), on top of a fits
   image.

   Inputs:
      fitsfile  - input fits data file containing the image
      catfile   - input file containing the object catalog
      xcol      - column in the input file with the object x coordinates
                  (remember that the first column corresponds to xcol=0)
                  default value: 0
      ycol      - column in the input file with the object y coordinates
                  (remember that the second column corresponds to ycol=1)
                  default value: 1
      marksize  - size of circles marking the objects on the image, in points
                  default value: 20.0
      markcolor - color of circles marking the objects
                  default value: 'g'
      inhdu     - header-data unit containing the image data in the input fits
                  image.  The default value of 0 is appropriate for all simple
                  fits images (i.e., those without multiple extensions).
                  default value: 0
      cmap      - color map used to present the image data
                  default value: 'gray'
      siglow    - sets display range for input image
                  default value: 1.0 (1-sigma below the clipped mean)
      siglow    - sets display range for input image
                  default value: 10.0 (10-sigma below the clipped mean)
   """

   """ Plot the image """
   try:
      display_image(fitsfile,inhdu=inhdu,cmap=cmap,siglow=siglow,sighigh=sighigh)
   except:
      print ""
      print "Image display failed when called from plot_cat."
      print ""
      return
   nx = pf.getval(fitsfile,'naxis1')
   ny = pf.getval(fitsfile,'naxis2')

   """ Read in the catalog and extract the x and y coordinates """
   data = n.loadtxt(catfile)
   x = data[:,xcol]
   y = data[:,ycol]

   """ Mark the catalog objects """
   plt.plot(x,y,'o',ms=marksize,mec=markcolor,mfc="none")
   plt.xlim(0,nx-1)
   plt.ylim(0,ny-1)
Beispiel #33
0
def plot_cat(fitsfile, catfile, xcol=0, ycol=1, marksize=20., markcolor='g',
             inhdu=0, cmap='gray', siglow=1.0, sighigh=10.0):
   """
   Plots a catalog (e.g., one generated by SExtractor), on top of a fits
   image.

   Inputs:
      fitsfile  - input fits data file containing the image
      catfile   - input file containing the object catalog
      xcol      - column in the input file with the object x coordinates
                  (remember that the first column corresponds to xcol=0)
                  default value: 0
      ycol      - column in the input file with the object y coordinates
                  (remember that the second column corresponds to ycol=1)
                  default value: 1
      marksize  - size of circles marking the objects on the image, in points
                  default value: 20.0
      markcolor - color of circles marking the objects
                  default value: 'g'
      inhdu     - header-data unit containing the image data in the input fits
                  image.  The default value of 0 is appropriate for all simple
                  fits images (i.e., those without multiple extensions).
                  default value: 0
      cmap      - color map used to present the image data
                  default value: 'gray'
      siglow    - sets display range for input image
                  default value: 1.0 (1-sigma below the clipped mean)
      siglow    - sets display range for input image
                  default value: 10.0 (10-sigma below the clipped mean)
   """

   """ Plot the image """
   try:
      display_image(fitsfile,inhdu=inhdu,cmap=cmap,siglow=siglow,sighigh=sighigh)
   except:
      print ""
      print "Image display failed when called from plot_cat."
      print ""
      return
   nx = pf.getval(fitsfile,'naxis1')
   ny = pf.getval(fitsfile,'naxis2')

   """ Read in the catalog and extract the x and y coordinates """
   data = n.loadtxt(catfile)
   x = data[:,xcol]
   y = data[:,ycol]

   """ Mark the catalog objects """
   plt.plot(x,y,'o',ms=marksize,mec=markcolor,mfc="none")
   plt.xlim(0,nx-1)
   plt.ylim(0,ny-1)
def calc_shift(refFile, otherFile):
    """
    Runs sextractor to find compact objects, then finds the shift between the
    compact object catalogs
    """
    refCatFile = refFile.replace('.fits', '.cat')
    otherCatFile = otherFile.replace('.fits', '.cat')
    sex_conf = os.path.join('..', '..', SEX_COMPACT_CONF)

    ## Find weight files
    refWeight = refFile.replace('sci', 'weight')
    otherWeight = otherFile.replace('sci', 'weight')
    if not os.path.exists(refWeight):
        refWeight = refFile.replace('sci', 'wht')
    if not os.path.exists(otherWeight):
        otherWeight = otherFile.replace('sci', 'wht')

    message('Calculating shift for file {} using reference {}'.format(
        otherFile, refFile))
    for f in (refFile, otherFile, sex_conf):
        if not os.path.exists(f):
            raise OSError('Could not find file {}. '.format(f) +
                          'While working in {}'.format(os.getcwd()) +
                          'Unable to calculate shift.')

    ## Run sextractor for reference file
    if not os.path.exists(refCatFile):
        cmd = [SEX_COMMAND, refFile, '-c', sex_conf, '-weight_image',
               refWeight, '-catalog_name', refCatFile]
        subprocess.call(cmd)

    ## Run sextractor for file to shift
    if not os.path.exists(otherCatFile):
        cmd = [SEX_COMMAND, otherFile, '-c', sex_conf, '-weight_image',
               otherWeight, '-catalog_name', otherCatFile]
        subprocess.call(cmd)

    refCat = SexTools.read_catalog(refCatFile,
                                   keepCols=['X_IMAGE', 'Y_IMAGE'])
    refCat = refCat.view(float).reshape(-1, 2)
    otherCat = SexTools.read_catalog(otherCatFile,
                                     keepCols=['X_IMAGE', 'Y_IMAGE'])
    otherCat = otherCat.view(float).reshape(-1, 2)

    ## Returns a 2-tuple of dx, dy
    center = (pyfits.getval(otherFile, 'CRPIX1'),
              pyfits.getval(otherFile, 'CRPIX2'))
    offset, offerr = CatTools.findOffsetMCMC(refCat, otherCat,
                                             rotOrigin=center,
                                             **SHIFT_MCMC_PARAMS)
    return offset
Beispiel #35
0
	def Read_spectra(self,type='xshooter',output='input_nir',median_filter=False,resample=False,min_f=0,max_f=24):
		'''
		Reads spectra directly from fits file. Calculates Phase for every image.

		-----------

		output:  name for text file and *.npy file for easy retrieval after reading

		median_filter :  Applies a median filter to each spectra of N pixels.

		resample: 			Rebin all spectra to first linear dispersion encountered.

		'''
		self.data = {'wave': [], 'flux': [], 'err': [],'phase': []}

		if type == 'text':
			self.files['name'],self.files['phase']=n.loadtxt(Parameters.base_dir+'/'+self.list,dtype={'names': ('files', 'phase'),'formats': ('S12', 'f4')},unpack=True)
			for i in self.files['name']:
				print 'Loading: ', i.split('/')[-1][:-5]
				w,f=n.loadtxt(Parameters.base_dir+'/'+i,unpack=True)
				self.data['wave'].append(w),self.data['flux'].append(f)


		if type == 'xshooter':
			arm='NIR'
			files=glob.glob('/Users/juan/astro/SDSS1433/spectroscopy/'+arm+'/*'+arm+'.fits')

			
			wave,flux,name,mjd,ra,decl,phase,delphase,files1,hjd=[],[],[],[],[],[],[],[],[],[]
			wt,ft=cv.read_xshooter(files[0])
			f=open(output+'.txt','w')
			for i in files[min_f:max_f]:
			    wav,flu=cv.read_xshooter(i)

			    if resample:
			        flu=res.rebin2(wav,flu,wt)
			        flu[n.isnan(flu)] = 0.0
			    t1,t2,t3,t4,t5=getval(i,'OBJECT'),getval(i,'MJD-OBS'),getval(i,'RA'),getval(i,'DEC'),getval(i,'EXPTIME')
			    if median_filter:
			    	wave.append(wav),flux.append(cv.medfilt1(flu,17)),name.append(t1),mjd.append(t2),ra.append(t3),decl.append(t4),delphase.append(t5/3600/24/self.Parameters['porb'])
			    else:
			    	wave.append(wav),flux.append(flu),name.append(t1),mjd.append(t2),ra.append(t3),decl.append(t4),delphase.append(t5/3600/24/self.Parameters['porb'])

			    hjd.append(pyasl.helio_jd(t2+0.5,218.324772,10.19017))
			    phase.append(cv.phase(hjd[-1],self.Parameters['hjd0'],self.Parameters['porb']))
			    files1.append(i)
			    print 'Loading: '+i.split('/')[-1][:-5],' ' , t2,' ' ,hjd[-1],' ' ,phase[-1]
			    print >>f, i.split('/')[-1][:-5]+'.txt' ,phase[-1]
			    self.data['wave'].append(wav*10.0),self.data['flux'].append(flu),self.data['phase'].append(phase[-1])
			f.close()
			n.save(output,[wave,flux,name,hjd,phase,delphase,files1])
Beispiel #36
0
def getfltlist( drzfile ) :
    """get names of contributing _flt files from the _drz.fits header"""
    import os
    import pyfits
    import exceptions
    Nflt = pyfits.getval( drzfile, 'NDRIZIM', ext=0 )
    fltfilelist = []
    for i in range( Nflt ):
        fltfile = pyfits.getval( drzfile, 'D%03iDATA'%(i+1), ext=0 )
        fltfile = fltfile[:fltfile.find('[')]
        if not os.path.isfile( fltfile ):
            raise exceptions.RuntimeError( "%s (contributing to %s) is missing"%(fltfile,drzfile) )
        fltfilelist.append( fltfile )
    return( fltfilelist )
def run_calstis_part2(input_dir, input_flist):
    '''
    This function finishes running calstis on the CTE corrected data. Note: CTECORR should be
    set to OMIT or COMPLETE
    '''
    #biacorr, blevcorr, and dqicorr are set to complete after part1 is called. I can call calstis
    log = open('calstis_log.txt', 'a')
    log.write('calstis_part2 - calstis')
    log.close()
    for ifile in input_flist:
        filename = os.path.join(input_dir, ifile)
        assert pyfits.getval(filename.replace('raw', 'flc'), 'ctecorr', 0).upper() != 'PERFORM', 'CTECORR = PERFORM in %s' %(filename)
        wavecal = os.path.join(input_dir, pyfits.getval(filename, 'wavecal', 0))
        stistools.calstis.calstis(filename.replace('raw', 'flc'), trailer = 'calstis_log.txt', wavecal = wavecal)
Beispiel #38
0
def getStd(f1, f2):
    # check 'NFRAMES', should be 60
    dat1 = pyfits.getdata(f1, 0) / pyfits.getval(f1, 'NFRAMES')
    dat2 = pyfits.getdata(f2, 0) / pyfits.getval(f2, 'NFRAMES')
    diff = dat2 - dat1
    vlist = list()
    y = 1024
    for i in range(3):
        for j in range(4):
            x = i * 2048 + j * 512 + 256
            sub_diff = diff[(y - 50):(y + 50), (x - 50):(x + 50)]
            std = numpy.std(sub_diff)
            vlist.append(std)
    return vlist
Beispiel #39
0
def run(image,aper="4.",sky=8., width=3., plots=False):
    """
    Call this to perform all the following fucntions
    
    Parameters
        image:  string, name of the image
        aper:   string, size of the photometry aperture(s)
        sky:    float, where to start the sky aperture
        width:  float, how wide should the sky annulus be
        plots:  bool, save plots of the results for quicklook
        
        
    example:
    
    aperphot.run('myimage.fits',aper="2.")
    
    """
    
    message("Setting up photometry for %s"%(image))
    
    #make sure that we are using data we are prepared for, you can write other functions
    #to deal with this or help set alternate parameters. An ideal setup might involve a class structure
    #but we'll keep it more simple here
    instrument=pyfits.getval(image,"INSTRUME")
    if "NICMOS" not in instrument:
        raise IOError("Program only valid for NICMOS images, check inputs") 
     
        
    #calculate the zeropoint from the header, these are set for NICMOS
    #you could make functions for different instruments here
    photfnu=pyfits.getval(image,"PHOTFNU")
    abzpt=-2.5* np.log10(photfnu*1.0*1e-23) -48.6
    
    print "zeropoint: %f"%(abzpt)

	#calculate the effective gain for the image in NICMOS
    exp=float(pyfits.getval(image,'EXPTIME'))
    hgain=float(pyfits.getval(image,'ADCGAIN'))
    epadu=hgain*exp # to get the errors better, nicmos is actually dn/s

    print "Setting effective gain = %f "%(epadu) #to make sure errors and chi are computed as best as possible


    set_daopars(epadu)
    
    coord_list=find_objects(image)
    
    photometry = do_phot(image, coord_list, aper=aper, sky_annulus=sky, width_sky=width, zeropoint=abzpt)
    
    if plots: plotphot(photometry)
Beispiel #40
0
def fix_wcs_crval(files,delta_ra,delta_dec):
   for f in files:
      if f[-5:]!=".fits":
         continue
      print "%s ....Updating CRVAL values" % f
      fullfits = pyfits.open(f,mode='update')
      sci = fullfits[0]
      ra  = getval(f,'CRVAL1')
      dec = getval(f,'CRVAL2')
      ranew  = ra - delta_ra/(3600.0*cos(dec*pi/180.0))
      decnew = dec - delta_dec/3600.0
      sci.header.update('CRVAL1',ranew)
      sci.header.update('CRVAL2',decnew)
      print " %f %f --> %f %f" % (ra,dec,ranew,decnew)
      fullfits.flush()
Beispiel #41
0
def getfltlist(drzfile):
    """get names of contributing _flt files from the _drz.fits header"""
    import os
    import pyfits
    import exceptions

    Nflt = pyfits.getval(drzfile, "NDRIZIM", ext=0)
    fltfilelist = []
    for i in range(Nflt):
        fltfile = pyfits.getval(drzfile, "D%03iDATA" % (i + 1), ext=0)
        fltfile = fltfile[: fltfile.find("[")]
        if not os.path.isfile(fltfile):
            raise exceptions.RuntimeError("%s (contributing to %s) is missing" % (fltfile, drzfile))
        fltfilelist.append(fltfile)
    return fltfilelist
Beispiel #42
0
def get_header_info(filename):
    '''
    Gets the header info from the FITS file. Checks to ensure that the 
    target name, after string parsing, matches a known planet name.
    '''
    assert os.path.splitext(filename)[1] == '.fits', \
        'Expected .fits got ' + filename
    output = {}
    output['targname'] = pyfits.getval(filename, 'targname').lower().split('-')[0]
    output['date_obs'] = pyfits.getval(filename, 'date-obs')
    output['time_obs'] = pyfits.getval(filename, 'time-obs')
    planet_list = ['mars', 'jupiter', 'saturn', 'uranus', 'neptune', 'pluto']
    assert output['targname'] in planet_list, \
        'Header TARGNAME not in planet_list'
    return output
def make_declination_dict(flist):
    '''
    Create a dictionary with declination as keys and filenames as lists to each key
    This is to match 2 dithered images together

    Create a dictionary with declination as keys and filenames as lists to each key
    This is to match 2 dithered images together

    Called from: Main
    Calls to: Nothing

    Input:
        flist: list of files to make dictionary out of
    Outputs:
        Nothing
    Returns:
        dec_dict: a dictionary with declination as keys and filenames as lists to each key
    '''
    dec_dict = {}
    for ifile in flist:
        targ_dec = pyfits.getval(ifile, 'dec_targ', 0)
        if dec_dict.has_key(targ_dec):
            dec_dict[targ_dec].append(ifile)
        else:
            dec_dict[targ_dec] = [ifile]
    return dec_dict
Beispiel #44
0
def check_ccd_type(path=None):
	'''This method checks if we are dealing with spectal or photometry 
	data and splits them accordingly'''
	
	if path == None:
  		path = reduice.gui_getdir(title='Please Select raw fits Directory')
	
	if not os.path.exists(path+'Spectral/'):
  		os.makedirs(path+ '/Spectral' )
	if not os.path.exists(path+'Photometry/'):
  		os.makedirs(path+ '/Photometry' )
	
	#getting all the files in the main dir
 	files= get_filelist(path)
	#iterating through each file
	for fit in files:
		if not rem.fitype(fit): #Checking if fit is a fits file
			continue
		ccdtype = pyfits.getval(fit,'INSTRUME') #instrument used to make fit
		#moving to appropriate dir
		if ccdtype == 'SBIG ST-7/7E/7XE':
			shutil.move( fit,path+'/Spectral')
		else:
			shutil.move( fit,path+'/Photometry')
	
	clean(path)
Beispiel #45
0
def sort_filters(path=None):
	'''Sorts fits files in a directory into folders according to their filter ''' 

	if path == None:
  		path = reduice.gui_getdir(title='Please Select raw fits Directory')
	#getting all the files in the path dir
	files = get_filelist(path)
	#iterating through these files
	for fit in files:
		#removing all the non fits eg logfiles 
		if not rem.fitype(fit):
			print fit+'  is not a fits'
			continue #move on if not fit
		#get the filter of the fit
		filt = pyfits.getval(fit,'FILTER')
		tempath = path + '/'+filt+'/'
		#if no filter dir, create one
    		if not os.path.exists(tempath):
     			os.makedirs(tempath)

		try: #move the fit to its filter folder
    			shutil.move( fit,tempath)
		except shutil.Error as e:
   			if e.message == "Destination path '%s' already exists"%fit:
   				print "A file with the same name has already been"
Beispiel #46
0
def match_catalog(im):
    sex = SExtractor()
    sexcat = sex.read(im.replace(".fits", ".cat"))
    band = pf.getval(im, "filter")
    exptime = pf.getval(im, "exptime")
    airmass = pf.getval(im, "airmass")
    ra = pf.getval(im, "ra")
    dec = pf.getval(im, "dec")
    center = SkyCoord(ra=ra, dec=dec, unit=(u.hourangle, u.deg))
    c = SkyCoord(ra=sexcat["ALPHA_J2000"], dec=sexcat["DELTA_J2000"])
    ##########################################################################
    # Reading catalog from file
    stcat = os.path.join(home, "stellar_catalog/Stripe82Stars_Ivezick.fit")
    hdulist = pf.open(stcat)
    catalog = hdulist[1].data
    cradec = SkyCoord(ra=catalog["RAJ2000"] * u.degree,
                      dec=catalog["DEJ2000"] * u.degree)
    # Trimming catalog considering only regions around the observed field
    idx = np.where(cradec.separation(center) < 1.6 * u.deg)[0]
    trimcat = catalog[idx]
    cradec = SkyCoord(ra=trimcat["RAJ2000"] * u.degree,
                      dec=trimcat["DEJ2000"] * u.degree)
    ##########################################################################
    idx, d2d, d3d = c.match_to_catalog_sky(cradec)
    data = np.column_stack(
        (trimcat[idx]["RAJ2000"], trimcat[idx]["DEJ2000"],
         trimcat[idx]["umag"], trimcat[idx]["e_umag"], trimcat[idx]["gmag"],
         np.zeros(len(idx)), trimcat[idx]["rmag"], trimcat[idx]["e_rmag"],
         trimcat[idx]["imag"], trimcat[idx]["e_imag"], trimcat[idx]["zmag"],
         trimcat[idx]["e_zmag"], sexcat["MAG_AUTO"] + 2.5 * np.log10(exptime),
         sexcat["MAGERR_AUTO"], np.ones_like(idx) * airmass))
    i = np.where(d2d < 1 * u.arcsec)[0]
    data = data[i]
    ###########################################################################
    # Cleaning data from photometry with large uncertainties
    data = data[data[:, 13] < .1]
    ##########################################################################
    h = [
        "RAJ2000", "DEJ2000", "umag", "e_umag", "gmag", "e_gmag", "rmag",
        "e_rmag", "imag", "e_imag", "zmag", "e_zmag", "mag_inst", "e_mag_inst",
        "airmass"
    ]
    newcat = "{}_{}".format(band, im.split("-")[2]).replace(".fits", ".cat")
    with open(newcat, "w") as f:
        f.write("#" + " ".join(h) + "\n")
        np.savetxt(f, data)
    return
Beispiel #47
0
def create_all_tile_files(path_name, flist, extract_filename = None, overwrite = True):
    '''
    ########################################################################################################################
    #This is the main function to call to write all fits files of the slit images
    #Input:
    #    path_name: path where original data (and output data) live
    #    overwrite: should the fits files be overwritten? default is to overwrite
    #Ouptut:
    #    writes fits files for all of the slit images
    #    returns the floor of the slit width in pixels
    #Calls to:
    #    collapse_longslit
    #     write_fits_tile
    #Called from:
    #    __main__
    ########################################################################################################################
    '''
    os.chdir(path_name)
    opt_elem = pyfits.getval(flist[0], 'opt_elem', 0)
    cenwave = pyfits.getval(flist[0], 'cenwave', 0)
    if extract_filename:
        extract_dict = make_extraction_dictionary(extract_filename)
    for ifile in flist:
        print ifile
        spec = pyfits.getdata(ifile, 0)
        if extract_filename:
            extract_loc_list = extract_dict[ifile[0:8]]
        if opt_elem == 'G140L':
            if extract_filename:
                spec_tile, slit_size = collapse_longslit(spec, extract_loc_list = extract_loc_list, plate_scale = 0.0246, slit_width = 0.2)
            else:
                spec_tile, slit_size = collapse_longslit(spec, plate_scale = 0.0246, slit_width = 0.2)
        elif opt_elem == 'G430M':
            spec_tile, slit_size = collapse_longslit(spec, plate_scale = 0.05078, slit_width = 0.2)
        else:
            print 'Grating %s is not recognized' %(opt_elem)
            sys.exit()
        if not os.path.exists(path_name+'/'+ifile[0:8]+'_collapse_flt.fits'):
            write_fits_tile(spec_tile, ifile[0:8]+'_collapse_flt')
        else:
            if overwrite == True:
                #os.remove(path_name+'/'+ ifile[0:8]+'_collapse_flt.fits')
                write_fits_tile(spec_tile, ifile[0:8]+'_%i_collapse_flt' %(cenwave))
            else:
                print 'file already exists, not overwriting'
    return math.floor(slit_size)
Beispiel #48
0
def imaverage(imagelist, outfile, clobber=False, verbose=False):
    """
     construct a simple average of all images in the list.
     Assumes all input images have identical dimensions
     Returns name of outfile
    """
    import os
    import pyfits
    from numpy import where, ones, zeros, array, ndarray, nan_to_num, float32
    import exceptions

    if os.path.exists(outfile):
        if clobber:
            os.unlink(outfile)
        else:
            print("%s exists. Not clobbering." % outfile)
            return (outfile)

    # make empty arrays for components of the weighted average
    naxis1 = pyfits.getval(imagelist[0], 'NAXIS1')
    naxis2 = pyfits.getval(imagelist[0], 'NAXIS2')
    sumarray = zeros([naxis2, naxis1], dtype=float32)
    ncombinearray = zeros([naxis2, naxis1], dtype=float32)

    # construct the weighted average and update header keywords
    outhdr = pyfits.getheader(imagelist[0])
    i = 1
    # import pdb; pdb.set_trace()
    for imfile in imagelist:
        imdat = pyfits.getdata(imfile)
        sumarray += imdat
        ncombinearray += where(imdat != 0, ones(imdat.shape),
                               zeros(imdat.shape))
        outhdr.update("SRCIM%02i" % i, imfile,
                      "source image %i, used in average " % i)
        i += 1
    outscidat = where(ncombinearray > 0, sumarray / ncombinearray,
                      zeros(sumarray.shape))

    outdir = os.path.dirname(outfile)
    if outdir:
        if not os.path.isdir(outdir):
            os.makedirs(outdir)
    pyfits.writeto(outfile, outscidat, header=outhdr)

    return (outfile)
Beispiel #49
0
def ifLamp(f, lamp ):   #  'LAMPUNE'; 'LAMPTHAR'
    fexp=f[33:41]
    fReduct="/data/apogee/quickred/?????/ap2D-a-%s.fits.fz" % (fexp)  
    fileNames = glob.glob(fReduct)
    if len(fileNames) > 1:  sys.exit(" -- error: several reduced arc files found -- ")
    if len(fileNames) > 1:  sys.exit(" -- error: no reduced arc file found -- ")
    lamp=pyfits.getval(fileNames[0],lamp,0)
    if lamp == 1:  return True
    else: return False
Beispiel #50
0
 def headerRenameKey2(self, key, newkey):
     for filename in self._matchingFiles:
         self._logger.notice('rename %s: %s= %s' % (filename, key, newkey))
         try:
             value = pyfits.getval(filename, key)
             pyfits.setval(filename, newkey, value)
             pyfits.delval(filename, key)
         except:
             pass
Beispiel #51
0
def run_calstis_part2(input_dir, input_flist):
    '''
    This function finishes running calstis on the CTE corrected data. Note: CTECORR should be
    set to OMIT or COMPLETE
    '''
    #biacorr, blevcorr, and dqicorr are set to complete after part1 is called. I can call calstis
    log = open('calstis_log.txt', 'a')
    log.write('calstis_part2 - calstis')
    log.close()
    for ifile in input_flist:
        filename = os.path.join(input_dir, ifile)
        assert pyfits.getval(
            filename.replace('raw', 'flc'), 'ctecorr',
            0).upper() != 'PERFORM', 'CTECORR = PERFORM in %s' % (filename)
        wavecal = os.path.join(input_dir,
                               pyfits.getval(filename, 'wavecal', 0))
        stistools.calstis.calstis(filename.replace('raw', 'flc'),
                                  trailer='calstis_log.txt',
                                  wavecal=wavecal)
def get_extn_nums(ifile):
    '''
    Get the numbers of the science extensions in a given file
    Inputs:
        ifile: name of input file to query for the number of extensions
    '''
    nextend = pyfits.getval(ifile, 'nextend', 0)
    all_ext = np.arange(nextend) + 1
    sci_ext = all_ext[::3]
    return sci_ext
Beispiel #53
0
def isstdstar(f):
    # get the list of standard stars
    stdslist = glob(pysaltpath + '/data/standards/spectroscopic/*')
    objname = pyfits.getval(f, 'OBJECT').lower().replace('-','_')
    for std in stdslist:
        if objname in std:
            return True

    # Otherwise not in the list so return false
    return False
Beispiel #54
0
def plot_all():
    fname = 'zoo_wvt_bins'
    fd = wvt_path + fname + '.fits'
    nz = pyfits.getval(fd, 'NAXIS3')
    print nz
    for fname in ['zoo_wvt_bins', 'zoo_wvt_counts_all']:
        for iz in range(nz):
            plot(iz, fname)
    for iz in range(nz):
        plot(iz, fname='zoo_wvt_density_all', logplot=True)
Beispiel #55
0
def update_psfdat():
    import os
    from astropy.io import ascii
    from astropy.table import Column
    import pyfits
    # read in the psf mags computed by Dan
    psfdat = ascii.read('psfphot.dat')

    # get exposure time and number of exposures from the header
    etimelist, nexplist, etcSNRlist, etcOSNRlist, psfSNRlist = [], [], [], [], []
    for row in psfdat:
        imdir = '/store/snreproc/%s.090mas/' % (row['sn'])
        suffix = '-e00_sub_masked' if row['image'] == 'sub' else '_reg_drz_sci'
        imfile = os.path.join(
            imdir,
            '%s_%s_%s%s.fits' % (row['sn'], row['band'], row['epoch'], suffix))
        etime = pyfits.getval(imfile, 'EXPTIME')
        nexp = pyfits.getval(imfile, 'NCOMBINE')
        etimelist.append(etime)
        nexplist.append(nexp)
        etcSNR, etcSNRopt = synphotSNR(row['band'],
                                       row['stack'],
                                       etime=etime,
                                       nexp=nexp)
        etcSNRlist.append(etcSNR)
        etcOSNRlist.append(etcSNRopt)
        psfSNR = 1.08574 / row['err']
        psfSNRlist.append(psfSNR)

    nexpCol = Column(nexplist, 'nexp', dtype=int, format='%i')
    etimeCol = Column(etimelist, 'etime', dtype=float, format='%8.2f')
    etcSNRCol = Column(etcSNRlist, 'SNRetc', dtype=float, format='%6.2f')
    etcOSNRCol = Column(etcOSNRlist, 'optSNRetc', dtype=float, format='%6.2f')
    psfSNRCol = Column(psfSNRlist, 'SNRpsf', dtype=float, format='%6.2f')

    psfdat.add_columns([nexpCol, etimeCol, etcSNRCol, etcOSNRCol, psfSNRCol],
                       indexes=[4, 4, 9, 9, 9])
    psfdat.write('psfphot2.dat', format='ascii.commented_header')
    psfdat.write('psfphot3.dat', format='ascii.fixed_width')
Beispiel #56
0
def getfltlist(drzfile, mustexist=False):
    """ 
    extract a list of contributing flt files from the header of 
    the given drizzled image file. 
    """
    import os
    import pyfits
    import exceptions

    Nflt = pyfits.getval(drzfile, 'NDRIZIM', ext=0)
    fltfilelist = []
    for i in range(Nflt):
        fltfile = pyfits.getval(drzfile, 'D%03iDATA' % (i + 1), ext=0)
        fltfile = fltfile[:fltfile.find('[')]

        if mustexist and not os.path.isfile(fltfile):
            raise exceptions.RuntimeError(
                "%s (contributing to %s) is missing" % (fltfile, drzfile))

        if fltfile not in fltfilelist:
            fltfilelist.append(fltfile)
    return (fltfilelist)
Beispiel #57
0
def main(argv):

    if len(argv) < 3:
        print "Usage : gethead <filename> <keyw1> [<keyw2 keyw3 keywn>] <#extension>"
        sys.exit(1)

    try:
        file = argv[0]
        ext = int(argv[-1])
        for key in argv[1:-1]:
            v = getval(file, key, ext)
            print v,
    except Exception, e:
        print "Error : " + str(e)
def make_counts_image(image, outfile='default'):
    '''FUNCTION TO CONVERT CNTS/SEC IMAGE TO COUNTS (IF NECESSARY)'''

    # -- parse output filename & save a copy to file (NOTE: if outfile == input image, data is overwritten).
    if (image != outfile):
        if outfile == 'default': outfile = image.split('.fits')[0] + '_cnts.fits'
        shutil.copy(image,outfile)
    else: print 'OVERWRITING DATA FOR IMAGE: '+image+'.'

    # -- determine if image is flt/flc or crclean
    if len(image.split('crclean.fits')) > 1: imtype = 'crclean'
    else: imtype = 'flt'
    detector = pyfits.getval(outfile,'DETECTOR',ext=0)
    exptime = pyfits.getval(outfile,'EXPTIME',ext=0)

    if ((detector == 'IR') & (imtype == 'flt')):
        hdulist = pyfits.open(outfile,mode='update')
        # -- Cycle through each SCI extension
        for ff in xrange(len(hdulist)):
            if hdulist[ff].name == 'SCI': hdulist[ff].data = hdulist[ff].data * exptime
        hdulist.close()
    else: print 'IMAGE SHOULD ALREADY BE IN UNITS OF COUNTS. RETURNING...'

    return outfile
Beispiel #59
0
def plot_all_wvt(sname='final', coadd=False):
    if coadd:
        fname = 'gz2_coadd_wvt_bins'
    else:
        fname = 'gz2_wvt_bins'
    fd = wvt_path + fname + '.fits'
    nz = pyfits.getval(fd, 'NAXIS3')
    print 'nz:', nz
    if coadd:
        fnames = ['gz2_coadd_wvt_bins', 'gz2_coadd_wvt_counts_all']
    else:
        fnames = ['gz2_wvt_bins', 'gz2_wvt_counts_all']
    for fname in fnames:
        for iz in range(nz):
            plot_wvt(iz, fname, sname)
    for iz in range(nz):
        if coadd:
            fname = 'gz2_coadd_wvt_density_all'
        else:
            fname = 'gz2_wvt_density_all'
        plot_wvt(iz, fname, sname, logplot=True)