Пример #1
0
def extract(fitsfile):
    outfile = pyfits.HDUList()

    # Make empty HDU; no image
    outfile.append(pyfits.PrimaryHDU())

    for i, hdu in enumerate(fitsfile):
        if (i == 0 and hdu.data != None) or isinstance(hdu, pyfits.ImageHDU):
            print hdu.data.shape + (i, )
            if i == 0:
                print 'Image: Primary HDU (number 0) %sx%s' % hdu.data.shape
            else:
                print 'Image: Extension HDU (number %s) %sx%s' % tuple(
                    (i, ) + hdu.data.shape)

            tbhdu = source_extract(image_data)

            outfile.append(tbhdu)

    return x, y, flux, sigma, outfile
Пример #2
0
    def test_byteswap(self):
        p = fits.PrimaryHDU()
        l = fits.HDUList()

        n = np.zeros(3, dtype='i2')
        n[0] = 1
        n[1] = 60000
        n[2] = 2

        c = fits.Column(name='foo', format='i2', bscale=1, bzero=32768,
                        array=n)
        t = fits.BinTableHDU.from_columns([c])

        l.append(p)
        l.append(t)

        l.writeto(self.temp('test.fits'), clobber=True)

        with fits.open(self.temp('test.fits')) as p:
            assert p[1].data[1]['foo'] == 60000.0
Пример #3
0
def cut_frame_with_target(myfile, ra, dec, h=4000, w=2000, debug=False):
    
    hdulist = pf.open(myfile)[0]

    img = hdulist.data * 1.
    img = img.T
    
    nx, ny = img.shape
    if (ra * dec != 0):

        # Get pixel coordinates of SN
        wcs = pywcs.WCS(hdulist.header)
        try:
            target_pix = wcs.wcs_sky2pix([(np.array([ra,dec], np.float_))], 1)[0]
        except:
            print "ERROR when converting sky to wcs. Is astrometry in place? Default coordinates assigned."
            target_pix = [+nx/2., ny/2.]

        if debug: print i, target_pix
    else:
        target_pix = [+nx/2., ny/2.]
    
    #If contained in the frame
    if (target_pix[0] > 0 and target_pix[0]<nx) and (target_pix[1] > 0 and target_pix[1]<ny):
        xmin = np.maximum(0, target_pix[0]-w/2.)
        xmax = np.minimum(nx, target_pix[0]+w/2.)
        ymin = np.maximum(0, target_pix[1]-h/2.)
        ymax = np.minimum(ny, target_pix[1]+h/2.)
        
        print "Target", target_pix, xmin, xmax, ymin, ymax
        
        newhdu = pf.PrimaryHDU()
        newhdu.header = hdulist.header
        newhdu.data = hdulist.data[ymin:ymax,xmin:xmax]
        
        newname = os.path.join(os.path.dirname(myfile),"out.fits")
        newhdu.writeto(newname, output_verify="fix", clobber=False)
        print "Extracted region around target and stored to ",newname

    else:
        print "Target not in the frame!"
Пример #4
0
def new_spt():
    fn = commands.getoutput('ls *_flt.fits').split('\n')
    dx = []
    km2AU = 1.0 / 1.49598e8

    Xi, Yi, Zi, minJD, maxJD =  get_ephem( )

    for k in fn:
        #keys = commands.getoutput('gethead %s POSTNSTX POSTNSTY POSTNSTZ PSTRTIME'%(k)).split()
        #keys2 = commands.getoutput('gethead %s_flt.fits EXPSTART'%(k.split('_')[0])).split()
        #keys2 = commands.getoutput('gethead %s_flt.fits EXPSTART EXPEND'%(k.split('_')[0])).split()

        h0 = pyfits.open('%s_flt.fits'%(k.split('_')[0]))
        header0 = h0[0].header
        h0.close()

        keys2 = [ float(header0['EXPSTART']), float(header0['EXPEND']) ]
        
        date_midexp = 0.5* ( float(keys2[0]) + float(keys2[1]) ) + 2400000.5 ##-- JD

        if not (date_midexp > minJD)*(date_midexp<maxJD):
            print k, date_midexp, (date_midexp > minJD)*(date_midexp<maxJD)
            continue
        
        dx, dy, dz = float(Xi(date_midexp)), float(Yi(date_midexp)), float(Zi(date_midexp))
        #print dx, dy, dz

        #d = date2jd( keys[3] )
        #dx.append( [ d, float(keys[0])*km2AU, float(keys[1])*km2AU, float(keys[2])*km2AU, k.split('_')[0] ] )

        prihdr = pyfits.Header()
        prihdr['MIDMJD']      = date_midexp - 2400000.5
        prihdr['MIDJD']      = date_midexp #- 2400000.5
        prihdr['POSTNSTX']    = dx
        prihdr['POSTNSTY']    = dy
        prihdr['POSTNSTZ']    = dz
        
        prihdr['COMMENT'] = 'Generated by update_spt.py on %s'%(time.ctime())
        prihdu = pyfits.PrimaryHDU(header=prihdr)

        prihdu.writeto('/net/frakir/raid/aparker/FullSurvey/%s_sdt.fits'%(k.split('_')[0]))
Пример #5
0
def make_image(pixels, name, type, directory, invert=False, linearize=False):
    '''Create a grayscale image from an array of values. The
	type flag is used to set the type of images generated.
	The invert flag is used	to invert the grayscale.  For 
	example, inverted .PNG and .FITS images:

	make_image(array, 'filename', 'fp', 1)'''
    # invert
    if invert == 1: pixels = pixels.max() - pixels

    # make fits
    if 'f' in type:
        if name + '.fits' in os.listdir(directory):
            os.unlink(os.path.join(directory, name + '.fits'))
        hdu = pf.PrimaryHDU(np.flipud(pixels))
        hdulist = pf.HDUList([hdu])

        hdulist.writeto(os.path.join(directory, name + '.fits'), clobber=True)

    # scale to 0-255
    pixels -= pixels.min()
    m = pixels.max()
    if m != 0:
        pixels *= 255 / m

    # render in linear space
    if linearize == 1:
        pixels /= 255
        pixels = 1 - pixels
        pixels = np.power(pixels, 1 / 2.2)
        pixels = 1 - pixels
        pixels *= 255

    # make rest
    image = im.fromarray(np.uint8(pixels))
    if 'g' in type:
        image.save(os.path.join(directory, name + '.gif'))
    if 'p' in type:
        image.save(os.path.join(directory, name + '.png'))
    if 'e' in type:
        image.save(os.path.join(directory, name + '.eps'))
Пример #6
0
def writeFITS(data, output, overwrite=True, int=True):
    """
    Write out a FITS file using PyFITS. Will remove an existing file if overwrite=True.

    :param data: data to write to a FITS file
    :type data: ndarray
    :param output: name of the output file
    :type output: string
    :param overwrite: removes an existing file if present before writing a new one
    :type overwrite: bool
    :param int: whether or not to save the data scaled to 16bit unsigned integer values
    :type int: bool

    :return: None
    """
    if overwrite and os.path.isfile(output):
        os.remove(output)

    #create a new FITS file, using HDUList instance
    ofd = pf.HDUList(pf.PrimaryHDU())

    #new image HDU
    hdu = pf.ImageHDU(data=data)

    if int:
        hdu.scale('int16', '', bzero=32768)
        hdu.header.add_history('Scaled to unsigned 16bit integer!')

    #update and verify the header
    hdu.header.add_history(
        'If questions, please contact Sami-Matias Niemi (smn2 at mssl.ucl.ac.uk).'
    )
    hdu.header.add_history(
        'This file has been created with the VISsim Python Package at %s' %
        datetime.datetime.isoformat(datetime.datetime.now()))
    hdu.verify('fix')

    ofd.append(hdu)

    #write the actual file
    ofd.writeto(output)
Пример #7
0
    def createSigMap(self,run=True, method=2, generateDistribution=False):

        """Generates a significance map."""

        if(self.plotConf['binfactor']):
            suffix = "_rebin.fits"
        else:
            suffix = ".fits"

        try:
            checkForFiles(self.logger,
	    		 [self.commonConf['base']+"_CMAP"+suffix,
			  self.commonConf['base']+"_modelMap"+suffix])
        except(FileNotFound):
            self.logger.critical("One or more needed files do not exist")
            return

        onImage  = pyfits.open(self.commonConf['base']+"_CMAP"+suffix)
        onData   = onImage[0].data.copy()
        onHeader = onImage[0].header
        offImage = pyfits.open(self.commonConf['base']+"_modelMap"+suffix)
        offData  = offImage[0].data.copy()
        sigData  = offImage[0].data.copy()

        for x,row in enumerate(sigData):
            for y in enumerate(row):
                if method == 1:
                    sigData[x,y[0]] = ((onData[x,y[0]]-offData[x,y[0]])*(onData[x,y[0]]-offData[x,y[0]]))/sqrt(offData[x,y[0]])
                elif method == 2:
                    sigData[x,y[0]] = (onData[x,y[0]]-offData[x,y[0]])/sqrt(offData[x,y[0]])

        newImage = pyfits.PrimaryHDU(sigData)
        newImage.header = onHeader
        newImage.update_header()

        hdulist = pyfits.HDUList([newImage])
        hdulist.writeto(self.commonConf['base']+"_sigMap"+suffix,clobber=True)

        self.logger.info("Created a significance map from "
                         +self.commonConf['base']+"_CMAP"+suffix+" and "
                         +self.commonConf['base']+"_modelMap"+suffix+".") 
Пример #8
0
def convertfits(inname, stretchfactor=4, shrinkfactor=5):
    outname = inname.rpartition('.')
    outname = outname[0] + '_resize' + outname[1] + outname[2]
    if os.path.exists(outname):
        print outname + " exists and will not be remade."
        return ''
    infits = pyfits.open(inname)
    x0 = infits[1].header['NAXIS1']
    x1 = infits[1].header['NAXIS2']
    if x0 % shrinkfactor:
        print "Error: convertfis cannot divide axis 0 (" + str(
            x0) + ") by " + str(shrinkfactor)
        return 1
    if x1 % shrinkfactor:
        print "Error: convertfits cannot divide axis 1 (" + str(
            x1) + ") by " + str(shrinkfactor)
        return 1
    inarr = infits[1].data
    [x0, x1] = inarr.shape
    if x0 % shrinkfactor:
        print "Error: convertfis cannot divide axis 0 (" + str(
            x0) + ") by " + str(shrinkfactor)
        return 1
    if x1 % shrinkfactor:
        print "Error: convertfits cannot divide axis 1 (" + str(
            x1) + ") by " + str(shrinkfactor)
        return 1
    infits[1].header['NAXIS1'] = x0 * stretchfactor / shrinkfactor
    infits[1].header['NAXIS2'] = x1 * stretchfactor / shrinkfactor
    infits[1].header['CDELT1'] = infits[1].header['CDELT1'] * (
        1.0 * shrinkfactor / stretchfactor)
    infits[1].header['CDELT2'] = infits[1].header['CDELT2'] * (
        1.0 * shrinkfactor / stretchfactor)
    outname = inname.rpartition('.')
    outname = outname[0] + '_resize' + outname[1] + outname[2]
    infits[1].data = shrink(stretch(inarr, stretchfactor), shrinkfactor)
    outfits = pyfits.HDUList([
        pyfits.PrimaryHDU(infits[0].data, infits[0].header),
        pyfits.ImageHDU(infits[1].data, infits[1].header)
    ])
    outfits.writeto(outname)
Пример #9
0
    def _writeImpl(self, filename):
        if not pyfits:
            raise RuntimeError("I failed to import pyfits, so cannot write to disk")

        fits = pyfits.HDUList()

        hdr = pyfits.Header()
        hdr['RA'] = (self.raBoresight, "Telescope boresight RA, degrees")
        hdr['DEC'] = (self.decBoresight, "Telescope boresight Dec, degrees")
        hdu = pyfits.PrimaryHDU(header=hdr)
        hdr.update()
        fits.append(hdu)

        maxLength = max(len(pp) for pp in self.patch)
        columns = []
        for name in self._fields:
            format = self._fields[name]
            if format == "A":
                maxLength = max(len(ss) for ss in getattr(self, name))
                format = "A%d" % maxLength
            columns.append(pyfits.Column(name=name, format=format, array=getattr(self, name)))
        fits.append(pyfits.BinTableHDU.from_columns(columns, hdr, name=self._hduName))

        numRows = sum(len(mag) for mag in self.fiberMag)
        fiberId = np.array(sum(([ii]*len(mags) for ii, mags in zip(self.fiberId, self.fiberMag)), []))
        fiberMag = np.array(sum((mag.tolist() for mag in self.fiberMag), []))
        filterNames = sum(self.filterNames, [])
        assert(len(fiberId) == numRows)
        assert(len(fiberMag) == numRows)
        assert(len(filterNames) == numRows)
        maxLength = max(len(ff) for ff in filterNames) if filterNames else 1

        fits.append(pyfits.BinTableHDU.from_columns([
            pyfits.Column(name='fiberId', format='J', array=fiberId),
            pyfits.Column(name='fiberMag', format='E', array=fiberMag),
            pyfits.Column(name='filterName', format='A%d' % maxLength, array=filterNames),
        ], hdr, name='PHOTOMETRY'))

        # clobber=True in writeto prints a message, so use open instead
        with open(filename, "wb") as fd:
            fits.writeto(fd)
Пример #10
0
def fits_logrebin(infile,outfile):
	try:
		import pyfits
	except:
		from astropy.io import fits as pyfits
	f = pyfits.open(infile)
	header = f[0].header
	data = f[0].data.copy()
	crval1 = header['crval1']
	crpix = header['crpix1']
	cd = header['cd1_1']
	outwave,outspec = rebin_log(data,crval1,crpix,cd)
	out = pyfits.PrimaryHDU(outspec)
	outwv = scipy.log10(outwave)
	start = outwv[0]
	delt = outwv[1]-outwv[0]
	out.header.update('CRVAL1',start)
	out.header.update('CRPIX1',1)
	out.header.update('CD1_1',delt)
	out.header.update('DC-FLAG',1)
	out.writeto(outfile)
Пример #11
0
 def __init__(self):
     """ Constructor: Initialize data objects and variables
     """
     # call superclass constructor (calls setup)
     super(StepDark, self).__init__()
     # list of data and darks
     self.datalist = []  # used in run() for every new input data file
     # dark values
     self.darkloaded = 0  # indicates if dark has been loaded
     self.darks = []  # list containing arrays with dark values
     self.goodpixmap = numpy.zeros(
         [1, 1], dtype=numpy.int16)  # array with good pixels
     # dark file info and header keywords to fit
     self.darkfile = ''  # name of selected dark file
     self.darkhead = pyfits.PrimaryHDU(
         numpy.array(1))  # header of dark file
     self.fitkeys = []  # FITS keywords that have to fit
     self.keyvalues = [
     ]  # values of the keywords (from the first data file)
     # set configuration
     self.log.debug('Init: done')
Пример #12
0
def distortion_interp_frame(frame, y, x, storeall=True, output_dir="."):

    frame_ds = re.sub(".fits", "_ds.fits", frame)
    flux = pyf.open(frame_ds)[-1].data
    flux_dw = scipy.ndimage.map_coordinates(flux, [y, x], order=3)

    header = pyf.open(frame_ds)[0].header
    flux_hdu = pyf.PrimaryHDU(flux_dw, header)
    fluxout = pyf.HDUList()
    fluxout.append(flux_hdu)

    try:
        outname = re.sub(".fits", "_dw.fits", frame)
        outname = re.sub(".*/", output_dir + "/", outname)
        with warnings.catch_warnings():
            warnings.simplefilter('ignore')
            fluxout.writeto(outname, clobber=True)
            fluxout.close()
    except IOError, err:
        print err
        sys.exit(1)
Пример #13
0
def write_fits_tile(spec_tile, filename):
    '''
    ########################################################################################################################
    #This program writes the spec_tile output from collapse_longslit to a fits file
    #Input:
    #    spec_tile: 2D image to write
    #    filename: name of file to write to (without extension)
    #Output:
    #    writes a fits file
    #Called from:
    #    create_all_tile_files
    #    display_rotated_image
    ########################################################################################################################
    '''
    print filename
    
    hdu = pyfits.PrimaryHDU(spec_tile)
    hdulist = pyfits.HDUList([hdu])
    #if os.path.exists('/user/bostroem/science/12465_otfr20120425/mama/%s.fits' %(filename)):
    #    os.remove('/user/bostroem/science/12465_otfr20120425/mama/%s.fits' %(filename))
    hdulist.writeto(filename+'.fits', clobber = True)
Пример #14
0
    def test_updated_file_permissions(self):
        """
        Regression test for https://trac.assembla.com/pyfits/ticket/79

        Tests that when a FITS file is modified in update mode, the file
        permissions are preserved.
        """

        filename = self.temp('test.fits')
        hdul = [fits.PrimaryHDU(), fits.ImageHDU()]
        hdul = fits.HDUList(hdul)
        hdul.writeto(filename)

        old_mode = os.stat(filename).st_mode

        hdul = fits.open(filename, mode='update')
        hdul.insert(1, fits.ImageHDU())
        hdul.flush()
        hdul.close()

        assert old_mode == os.stat(filename).st_mode
Пример #15
0
def mask_from_weight(infile, outfile, value=0):
    import pyfits

    (data, hdr) = rfits(infile)

    newdata = numpy.where(data > 0, 1, 0)

    newfits = pyfits.HDUList()
    hdu = pyfits.PrimaryHDU()
    #hdu.data   = newdata.astype("Int16") # Just as integer
    hdu.data = newdata  #.astype("UInt8") # Just as ushort integer
    hdu.header = hdr

    # Remove old version of file before
    if os.path.isfile(outfile):
        os.remove(outfile)

    newfits.append(hdu)
    newfits.writeto(outfile)
    newfits.close
    return
Пример #16
0
    def test_writeto_append_mode_gzip(self):
        """Regression test for
        https://github.com/spacetelescope/PyFITS/issues/33

        Check that a new GzipFile opened in append mode can be used to write
        out a new FITS file.
        """

        # Note: when opening a GzipFile the 'b+' is superfluous, but this was
        # still how the original test case looked
        # Note: with statement not supported on GzipFile in older Python
        # versions
        fileobj = gzip.GzipFile(self.temp('test.fits.gz'), 'ab+')
        h = fits.PrimaryHDU()
        try:
            h.writeto(fileobj)
        finally:
            fileobj.close()

        with fits.open(self.temp('test.fits.gz')) as hdul:
            assert hdul[0].header == h.header
Пример #17
0
    def test_insert_groupshdu_to_non_empty_list(self):
        """Tests inserting a Simple GroupsHDU to an empty HDUList."""

        hdul = fits.HDUList()
        hdu = fits.PrimaryHDU(np.arange(100, dtype=np.int32))
        hdul.insert(0, hdu)
        hdu = fits.GroupsHDU()

        assert_raises(ValueError, hdul.insert, 1, hdu)

        info = [(0, 'PRIMARY', 'GroupsHDU', 8, (), '',
                 '1 Groups  0 Parameters'),
                (1, '', 'ImageHDU', 6, (100,), 'int32', '')]

        hdul.insert(0, hdu)

        assert hdul.info(output=False) == info

        hdul.writeto(self.temp('test-insert.fits'))

        assert fits.info(self.temp('test-insert.fits'), output=False) == info
Пример #18
0
def extract_field_from_moscaic(mydir, myfilter, nfields=None, origname=False):

    if np.isscalar(nfields):
        nfields = np.array([nfields])
    for i, f in enumerate(glob.glob(mydir + "/" + myfilter)):
        hdulist = pf.open(f)
        if nfields==None:
            nfields = np.arange(len(hdulist)-1)+1
        for n in nfields:
            hdu = hdulist[n]
            hdu.header = hdulist[0].header + hdulist[n].header
            hduheader = pf.PrimaryHDU()
            hduheader.header = hdu.header
            hduheader.data = hdu.data
            hdulist1 = pf.HDUList([hduheader, hdu])
            if origname:
                name = os.path.basename(f)
                name = name.replace(".fits", "_%d.fits")
                hdulist1.writeto(name%(n), output_verify="fix", clobber=True)
            else:
                hdulist1.writeto("out%d_%d.fits"%(i,n), output_verify="fix", clobber=True)
Пример #19
0
def main(args=sys.argv):

    parser = OptionParser(usage=usage)
    parser.add_option('-o',
                      '--outfile',
                      help='output catalog name',
                      dest='outfile')
    parser.add_option('-s',
                      '--saturation',
                      help='Saturation Pixel Level',
                      dest='saturation')
    parser.add_option('-i', '--instrum', help='Instrument tag', dest='instrum')
    parser.add_option('-m',
                      '--mastercat',
                      help='Master catalog to pull non Flux columns from',
                      dest='mastercat')

    options, catfiles = parser.parse_args()

    if options.outfile is None or \
            options.saturation is None:
        parser.error('Must specify outfile and saturation level')

    catlist = [ldac.openObjectFile(catfile) for catfile in catfiles]
    mastercat = None
    if options.mastercat:
        mastercat = ldac.openObjectFile(options.mastercat)

    combinedcat = combineCats(catlist,
                              options.saturation,
                              instrum=options.instrum,
                              mastercat=mastercat)

    hdus = [pyfits.PrimaryHDU(), combinedcat.hdu]
    if mastercat:
        hdus.extend(_transferOtherHDUs(options.mastercat))
    else:
        hdus.extend(_transferOtherHDUs(catfiles[0]))
    hdulist = pyfits.HDUList(hdus)
    hdulist.writeto(options.outfile, clobber=True)
def mergecat_fits(outcat, dirs, root='run*m_udf_z8', bands=['H','B','V','I','Z','Y','J'],\
   inputband='H',inputcols_dic=def_inputcols,inputcols_int=['gtype'],paramfile=None):
    # collects the results from all the simulation catalogs and write to a *FITS* table
    # collect the results
    if len(glob.glob(outcat)) > 0:
        raise ValueError, "%s already exists." % outcat
    md, allcolumns, allformats = mergeoutput(dirs,
                                             root=root,
                                             bands=bands,
                                             inputband=inputband,
                                             inputcols_dic=inputcols_dic,
                                             inputcols_int=inputcols_int,
                                             paramfile=paramfile)
    # now build the FITS table
    fitscols = []
    for i in range(len(allcolumns)):
        colname = allcolumns[i]
        format = allformats[i]
        if type(md[colname]) == type(array([])):
            fitscols += [
                pyfits.Column(name=colname, format=format, array=md[colname])
            ]
        else:
            for b in bands:
                bcolname = b + '_' + colname
                fitscols += [
                    pyfits.Column(name=bcolname,
                                  format=format,
                                  array=md[colname][b])
                ]
    fitscols += [
        pyfits.Column(name='number',
                      format='K',
                      array=arange(0, len(md['detect'])))
    ]
    cols = pyfits.ColDefs(fitscols)
    tbhdu = pyfits.new_table(cols)
    hdu = pyfits.PrimaryHDU(array([]))
    thdulist = pyfits.HDUList([hdu, tbhdu])
    thdulist.writeto(outcat)
Пример #21
0
def join_cats(cs, outputfile):
    import pyfits
    tables = {}
    i = 0
    cols = []
    seqnr = 0
    for c in cs:
        print c
        if len(c) > 1:
            TAB = c[1]
            c = c[0]
        else:
            TAB = 'STDTAB'
        i += 1
        print c
        tables[str(i)] = pyfits.open(c)
        for column in tables[str(i)][TAB].columns:
            if column.name == 'SeqNr':
                if not seqnr:
                    seqnr += 1
                else:
                    column.name = column.name + '_' + str(seqnr)
                    seqnr += 1

            cols.append(column)

    print cols
    print len(cols)
    hdu = pyfits.PrimaryHDU()
    hduSTDTAB = pyfits.new_table(cols)
    hdulist = pyfits.HDUList([hdu])
    hduFIELDS = pyfits.open(cs[1][0])['FIELDS']
    hdulist.append(hduFIELDS)
    hdulist.append(hduSTDTAB)
    hdulist[1].header.update('EXTNAME', 'FIELDS')
    hdulist[2].header.update('EXTNAME', 'STDTAB')
    import os
    os.system('rm ' + outputfile)
    print outputfile
    hdulist.writeto(outputfile)
Пример #22
0
def calculateCM(SynthObj, solarWl, rebinnedSpectrum, nominalWave,
                nominalSpectrum):
    IM = numpy.zeros((SynthObj.lineList.numLines + 2, len(solarWl)))
    for i in range(SynthObj.lineList.numLines):
        SynthObj.lineList.perturbLine(i, 0.3)
        wave, flux = SynthObj.run()
        plus = SpectralTools.binSpectrum(flux, wave, solarWl)
        SynthObj.lineList.perturbLine(i, -0.3)
        wave, flux = SynthObj.run()
        minus = SpectralTools.binSpectrum(flux, wave, solarWl)
        IM[i, :] = (plus - minus) / 0.6

    #Continuum Level
    plus = rebinnedSpectrum.copy() + 0.005
    minus = rebinnedSpectrum.copy() - 0.005
    IM[-1, :] = (plus - minus) / 0.01

    plus = SpectralTools.binSpectrum(nominalSpectrum, nominalWave + 0.1,
                                     solarWl)
    minus = SpectralTools.binSpectrum(nominalSpectrum, nominalWave - 0.1,
                                      solarWl)
    edges = (plus != 0) & (minus != 0)
    IM[-2, edges] = (plus[edges] - minus[edges]) / (0.2)

    nFilt = Synth.lineList.numLines - 10
    dims = IM.shape
    U, S, V = scipy.linalg.svd(IM)
    D = 1.0 / (S[0:-nFilt])
    S[-nFilt:] = 0.0
    newS = numpy.zeros((dims[0], dims[1]))
    I = [i for i in range(dims[1])]
    for i in range(len(D)):
        newS[i][i] = D[i]

    S = newS.copy()
    CM = numpy.array(scipy.matrix(V.T.dot(S.T.dot(U.T)),
                                  dtype=numpy.float32)).T

    hdu = pyfits.PrimaryHDU(CM)
    hdu.writeto('CommandMatrix_new.fits', clobber=True)
Пример #23
0
def writefits2d(a, fitsfile):
    """ writefits2d -- write out an array to a fits file
        a -- array (numarray or Numeric)
        fitsfile -- output filename
    """
    fitsobj = pyfits.HDUList()
    hdu = pyfits.PrimaryHDU()
    hdr = hdu.header
    hdr.update('crval1', 0.)
    hdr.update('crval2', 0.)
    hdr.update('crpix1', a.shape[0] / 2.)
    hdr.update('crpix2', a.shape[1] / 2.)

    # Check if we already have a numarray array, if not convert
    x = N.arange(2)
    print type(a)
    if type(a) != type(x):
        hdu.data = N.array(a.tolist())
    else:
        hdu.data = a.copy()
    fitsobj.append(hdu)
    fitsobj.writeto(fitsfile)
Пример #24
0
    def test_nonstandard_hdu(self):
        """
        Regression test for https://trac.assembla.com/pyfits/ticket/157

        Tests that "Nonstandard" HDUs with SIMPLE = F are read and written
        without prepending a superfluous and unwanted standard primary HDU.
        """

        data = np.arange(100, dtype=np.uint8)
        hdu = fits.PrimaryHDU(data=data)
        hdu.header['SIMPLE'] = False
        hdu.writeto(self.temp('test.fits'))

        info = [(0, '', 'NonstandardHDU', 5, (), '', '')]
        with fits.open(self.temp('test.fits')) as hdul:
            assert hdul.info(output=False) == info
            # NonstandardHDUs just treat the data as an unspecified array of
            # bytes.  The first 100 bytes should match the original data we
            # passed in...the rest should be zeros padding out the rest of the
            # FITS block
            assert (hdul[0].data[:100] == data).all()
            assert (hdul[0].data[100:] == 0).all()
Пример #25
0
def fitsout(s3d, plane, smoothx=0, smoothy=0, name='', unit=''):
    """ Write the given plane into a fits file. Uses header of the original
    data. Returns nothing, but write a fits file.

    Parameters
    ----------
    plane : np.array
        data to store in fits file
    smoothx : int
        possible Gaussian smoothing length in x-coordinate (default 0)
    smoothy : int
        possible Gaussian smoothing length in y-coordinate (default 0)
    name : str
        Name to use in fits file name
    """

    planeout = '%s_%s_%s.fits' % (s3d.inst, s3d.output, name)

    if smoothx > 0:
        plane = blur_image(plane, smoothx, smoothy)

    if os.path.isfile(planeout):
        os.remove(planeout)
    hdu = pyfits.HDUList()
    headimg = s3d.head.copy()
    headimgprim = s3d.headprim.copy()

    headimg['NAXIS'] = 2
    for delhead in [
            'NAXIS3', 'CD3_3', 'CD1_3', 'CD2_3', 'CD3_1', 'CD3_2', 'CRPIX3',
            'CRVAL3', 'CTYPE3', 'CUNIT3'
    ]:
        del headimg[delhead],
    if unit != '':
        headimg['BUNIT'] = unit
        headimgprim['BUNIT'] = unit
    hdu.append(pyfits.PrimaryHDU(header=headimgprim))
    hdu.append(pyfits.ImageHDU(data=plane, header=headimg))
    hdu.writeto(planeout)
Пример #26
0
def write_slits(spectra, crpix, crval, scale, out_prefix, slitnum):
    num = 1
    for spec in spectra:
        for item in spec:
            if item.size == 4:
                hdu = pyfits.PrimaryHDU()
                hdu.header.update('CENTER', item[2])
                hdu.header.update('WIDTH', item[3])
                hdulist = pyfits.HDUList([hdu])
            else:
                thdu = pyfits.ImageHDU(item)
                thdu.header.update('CRVAL1', crval)
                thdu.header.update('CD1_1', scale)
                thdu.header.update('CRPIX1', crpix)
                thdu.header.update('CRVAL2', 1)
                thdu.header.update('CD2_2', 1)
                thdu.header.update('CRPIX2', 1)
                thdu.header.update('CTYPE1', 'LINEAR')
                hdulist.append(thdu)
        outname = out_prefix + "_spec_%02d_%02d.fits" % (slitnum, num)
        hdulist.writeto(outname)
        num += 1
Пример #27
0
def Data_2_Fits(FileFolder,
                FitsName,
                Header,
                Wavelength,
                Intensity,
                NewKeyWord=None):

    Primary_HDU = pyfits.PrimaryHDU(header=Header)

    if NewKeyWord != None:
        Primary_HDU.header.update(NewKeyWord[0], NewKeyWord[1])

    Column1 = pyfits.Column(name='Wave', format='E', array=Wavelength)
    Column2 = pyfits.Column(name='Int', format='E', array=Intensity)
    Columns = pyfits.ColDefs([Column1, Column2])
    Table_HDU = pyfits.new_table(Columns)

    HDU_list = pyfits.HDUList([Primary_HDU, Table_HDU])

    HDU_list.writeto(FileFolder + FitsName, clobber=True)

    return
Пример #28
0
def write_fitsfile(objname,field,Lobj,Lobjerr,Lfieldlim,Lfieldlimerr,Llimsig,
                   outputname='./balff_data/objects_info.fits',verbose=True):
    """
    Create a (minimal) fits file containg data array of objects found in the fields
    described in ./balff_data/fields_info.txt on the format expected by balff_mpd.py

    --- INPUT ---
    objname          Name of objects (list of strings)
    field            Field the object was found in (list of strings)
    Lobj             Absolute luminoisity of object. Can be obtained from observed apparent magnitude with
                     balff_utilities.Mabs2L( balff_utilities.magapp2abs(magapp,zobj,Av=Avval,band=magband) )
    Lobjerr          Uncertainty on absolute Lobj
    Lfieldlim        Absolute luminosity corresponding to the limiting magnitude for the filed provided in
                     ./balff_data/fields_info.txt
    Lfieldlimerr     Uncertainty on Lfieldlim
    Llimsig          Number of sigmas the limitng luminoisity Lfieldlim corresponds to

    --- EXAMPLE OF USE ---
    see bcda.test_write_fitsfile_onBoRG() below

    """
    if verbose: print ' - Setting up output file '
    col1     = pyfits.Column(name='OBJNAME'      , format='A30', array=objname)
    col2     = pyfits.Column(name='FIELD'        , format='A30', array=field)
    col3     = pyfits.Column(name='LOBJ'         , format='D'  , array=Lobj)
    col4     = pyfits.Column(name='LOBJERR'      , format='D'  , array=Lobjerr)
    col5     = pyfits.Column(name='LFIELDLIM'    , format='D'  , array=Lfieldlim)
    col6     = pyfits.Column(name='LFIELDLIMERR' , format='D'  , array=Lfieldlimerr)
    cols     = pyfits.ColDefs([col1 ,col2 ,col3 ,col4 ,col5 ,col6])
    tbhdu    = pyfits.new_table(cols)          # creating table header

    # writing hdrkeys:   '---KEY--',                  '----------------MAX LENGTH COMMENT-------------'
    tbhdu.header.append(('LLIMSIG ',Llimsig           ,'Sigmas field limiting lum. corresponds to'),end=True)

    if verbose: print ' - Writing simulated data to fits table '+outputname
    hdu      = pyfits.PrimaryHDU()             # creating primary (minimal) header
    thdulist = pyfits.HDUList([hdu, tbhdu])    # combine primary and table header to hdulist
    thdulist.writeto(outputname,clobber=True)  # write fits file (clobber=True overwrites excisting file)
    return outputname
Пример #29
0
    def test_update_with_truncated_header(self):
        """
        Regression test for https://trac.assembla.com/pyfits/ticket/148

        Test that saving an update where the header is shorter than the
        original header doesn't leave a stump from the old header in the file.
        """

        data = np.arange(100)
        hdu = fits.PrimaryHDU(data=data)
        idx = 1
        while len(hdu.header) < 34:
            hdu.header['TEST%d' % idx] = idx
            idx += 1
        hdu.writeto(self.temp('temp.fits'), checksum=True)

        with fits.open(self.temp('temp.fits'), mode='update') as hdul:
            # Modify the header, forcing it to be rewritten
            hdul[0].header['TEST1'] = 2

        with fits.open(self.temp('temp.fits')) as hdul:
            assert (hdul[0].data == data).all()
Пример #30
0
def Math():
    global Operand_1
    Operator = OP.get()
    SOP = Second_Operand()
    for f in Operand_1:
        print("Operand 1")
        try:
            Data, Header = (read_file(f, 1, 1))
        except IOError:
            print("Can't open file:", f)
        try:
            if (Operator == 1):
                Data = Data + SOP
                Name = "Add"
            elif (Operator == 2):
                Data = Data - SOP
                Name = "Subtract"
            elif (Operator == 3):
                Data = Data * SOP
                Name = "Multiply"
            elif (Operator == 4):
                Data = Data / SOP
                Name = "Divide"
        except:
            print("Math error")
        print("Result of " + Name)
        Get_Stat(Data)
        if Show_frame.get(): Draw_pic(Data, Name, 3)

        Data = np.float32(Data)

        new_name, extension = os.path.splitext(f)
        new_name = new_name + '_' + Name + '.fits'
        Header['HISTORY'] = Name
        hdu = pyfits.PrimaryHDU(Data, Header)
        hdulist = pyfits.HDUList([hdu])
        hdulist.writeto(new_name, clobber=True, output_verify='ignore')
        print("Data saved to " + new_name)
        print('\r')