Esempio n. 1
0
def makeEmptyCEATemplate(raSizeDeg, decSizeDeg,meanRa = 180., meanDec = 0.,\
                      pixScaleXarcmin = 0.5, pixScaleYarcmin=0.5):
    assert meanDec == 0.,'mean dec other than zero not implemented yet'

    
    cdelt1 = -pixScaleXarcmin/60.
    cdelt2 = pixScaleYarcmin/60.
    naxis1 = np.int(raSizeDeg/pixScaleXarcmin*60.+0.5)
    naxis2 = np.int(decSizeDeg/pixScaleYarcmin*60.+0.5)
    refPix1 = naxis1/2.
    refPix2 = naxis2/2.
    pv2_1 = 1.0
    cardList = pyfits.Header() #CardList()
    cardList.append(pyfits.Card('NAXIS', 2))
    cardList.append(pyfits.Card('NAXIS1', naxis1))
    cardList.append(pyfits.Card('NAXIS2', naxis2))
    cardList.append(pyfits.Card('CTYPE1', 'RA---CEA'))
    cardList.append(pyfits.Card('CTYPE2', 'DEC--CEA'))
    cardList.append(pyfits.Card('CRVAL1', meanRa))
    cardList.append(pyfits.Card('CRVAL2', meanDec))
    cardList.append(pyfits.Card('CRPIX1', refPix1+1))
    cardList.append(pyfits.Card('CRPIX2', refPix2+1))
    cardList.append(pyfits.Card('CDELT1', cdelt1))
    cardList.append(pyfits.Card('CDELT2', cdelt2))
    cardList.append(pyfits.Card('CUNIT1', 'DEG'))
    cardList.append(pyfits.Card('CUNIT2', 'DEG'))
    hh = pyfits.Header(cards=cardList)
    wcs = astLib.astWCS.WCS(hh, mode='pyfits')
    data = np.zeros([naxis2,naxis1])
    ltMap = liteMapFromDataAndWCS(data,wcs)
    
    return ltMap
Esempio n. 2
0
    def write(self, dirName=".", fileName=None):
        if not pyfits:
            raise RuntimeError(
                "I failed to import pyfits, so cannot read from disk")

        for name in ["fiberId", "ra", "dec"]:
            if getattr(self, name, None) is None:
                if name == "fiberId" or self.pfsConfigId != 0x0:
                    raise RuntimeError(
                        "I cannot write a pfsConfig file unless %s is provided"
                        % name)

                setattr(self, name,
                        np.zeros_like(self.fiberId, dtype=np.float32))

        # even if set in __init__ it might be invalid by now
        _pfsConfigId = calculate_pfsConfigId(self.fiberId, self.ra, self.dec)

        if self.pfsConfigId is None:
            self.pfsConfigId = _pfsConfigId
        else:
            if self.pfsConfigId != _pfsConfigId:
                raise RuntimeError(
                    "Mismatch between pfsConfigId == 0x%016x and fiberId/ra/dec -> 0x%016x"
                    % (self.pfsConfigId, _pfsConfigId))

        hdus = pyfits.HDUList()

        hdr = pyfits.Header()
        hdu = pyfits.PrimaryHDU(header=hdr)
        hdr.update()
        hdus.append(hdu)

        # catId, objId, ra, dec, fiber flux, MPS centroid
        hdr = pyfits.Header()
        for i, b in enumerate(self.filterNames):
            hdr["FILTER%d" % i] = b
        hdr.update(INHERIT=True)

        hdu = pyfits.BinTableHDU.from_columns([
            pyfits.Column(name='fiberId', format='J', array=self.fiberId),
            pyfits.Column(name='catId', format='J', array=self.catId),
            pyfits.Column(name='tract', format='J', array=self.tract),
            pyfits.Column(name='patch', format='A3', array=self.patch),
            pyfits.Column(name='objId', format='K', array=self.objId),
            pyfits.Column(name='ra', format='E', array=self.ra),
            pyfits.Column(name='dec', format='E', array=self.dec),
            pyfits.Column(name='fiberMag',
                          format='%dE' % len(self.filterNames),
                          array=self.fiberMag),
            pyfits.Column(name='MPS centroid', format='2E', array=self.mpsCen)
        ], hdr)
        hdu.name = 'CONFIG'
        hdus.append(hdu)

        # clobber=True in writeto prints a message, so use open instead
        if fileName is None:
            fileName = self.fileNameFormat % (self.pfsConfigId)
        with open(os.path.join(dirName, fileName), "w") as fd:
            hdus.writeto(fd)
Esempio n. 3
0
def makeEmptyCEATemplateAdvanced(ra0, dec0, \
                                 ra1, dec1,\
                                 pixScaleXarcmin = 0.5, \
                                 pixScaleYarcmin= 0.5):

    """
    ALL RA DEC IN DEGREES
    """
    assert ra0<ra1
    assert dec0<dec1
    refDec = (dec0+dec1)/2.
    cosRefDec =  np.cos(refDec/180.*np.pi)
    raSizeDeg  = (ra1 - ra0)*cosRefDec
    decSizeDeg = (dec1-dec0)
    
    cdelt1 = -pixScaleXarcmin/(60.*cosRefDec)
    cdelt2 = pixScaleYarcmin/(60.*cosRefDec)
    naxis1 = np.int(raSizeDeg/pixScaleXarcmin*60.+0.5)
    naxis2 = np.int(decSizeDeg/pixScaleYarcmin*60.+0.5)
    refPix1 = np.int(-ra1/cdelt1+0.5)
    refPix2 = np.int(np.sin(-dec0*np.pi/180.)\
                        *180./np.pi/cdelt2/cosRefDec**2+0.5)
    pv2_1 = cosRefDec**2
    cardList = pyfits.Header() #CardList()
    #cardList = pyfits.CardList()
    cardList.append(pyfits.Card('NAXIS', 2))
    cardList.append(pyfits.Card('NAXIS1', naxis1))
    cardList.append(pyfits.Card('NAXIS2', naxis2))
    cardList.append(pyfits.Card('EXTEND', True))
    cardList.append(pyfits.Card('CTYPE1', 'RA---CEA'))
    cardList.append(pyfits.Card('CTYPE2', 'DEC--CEA'))
    cardList.append(pyfits.Card('CRVAL1', 0))
    cardList.append(pyfits.Card('CRVAL2', 0))
    cardList.append(pyfits.Card('CRPIX1', refPix1+1))
    cardList.append(pyfits.Card('CRPIX2', refPix2+1))
    cardList.append(pyfits.Card('CDELT1', cdelt1))
    cardList.append(pyfits.Card('CDELT2', cdelt2))
    cardList.append(pyfits.Card('CUNIT1', 'DEG'))
    cardList.append(pyfits.Card('CUNIT2', 'DEG'))
    cardList.append(pyfits.Card('PV2_1', pv2_1))
    cardList.append(pyfits.Card('EQUINOX',2000))
    cardList.append(pyfits.Card('PC1_1',1))
    cardList.append(pyfits.Card('PC1_2',0))
    cardList.append(pyfits.Card('PC2_1',0))
    cardList.append(pyfits.Card('PC2_2',1))
    
    hh = pyfits.Header(cards=cardList)
    wcs = astLib.astWCS.WCS(hh, mode='pyfits')
    data = np.zeros([naxis2,naxis1])
    ltMap = liteMapFromDataAndWCS(data,wcs)
    
    return ltMap
Esempio n. 4
0
def HiResHDU(model):
    '''
    Construct the HDU containing the hi res image of the target.

    '''

    # Get mission cards
    cards = model._mission.HDUCards(model.meta, hdu=5)

    # Add EVEREST info
    cards.append(('COMMENT', '************************'))
    cards.append(('COMMENT', '*     EVEREST INFO     *'))
    cards.append(('COMMENT', '************************'))
    cards.append(('MISSION', model.mission, 'Mission name'))
    cards.append(('VERSION', EVEREST_MAJOR_MINOR, 'EVEREST pipeline version'))
    cards.append(('SUBVER', EVEREST_VERSION, 'EVEREST pipeline subversion'))
    cards.append(('DATE', strftime('%Y-%m-%d'),
                  'EVEREST file creation date (YYYY-MM-DD)'))

    # Create the HDU
    header = pyfits.Header(cards=cards)
    if model.hires is not None:
        hdu = pyfits.ImageHDU(
            data=model.hires, header=header, name='HI RES IMAGE')
    else:
        hdu = pyfits.ImageHDU(data=np.empty(
            (0, 0), dtype=float), header=header, name='HI RES IMAGE')
    return hdu
Esempio n. 5
0
def ImagesHDU(model):
    '''
    Construct the HDU containing sample postage stamp images of the target.

    '''

    # Get mission cards
    cards = model._mission.HDUCards(model.meta, hdu=4)

    # Add EVEREST info
    cards.append(('COMMENT', '************************'))
    cards.append(('COMMENT', '*     EVEREST INFO     *'))
    cards.append(('COMMENT', '************************'))
    cards.append(('MISSION', model.mission, 'Mission name'))
    cards.append(('VERSION', EVEREST_MAJOR_MINOR, 'EVEREST pipeline version'))
    cards.append(('SUBVER', EVEREST_VERSION, 'EVEREST pipeline subversion'))
    cards.append(('DATE', strftime('%Y-%m-%d'),
                  'EVEREST file creation date (YYYY-MM-DD)'))

    # The images
    format = '%dD' % model.pixel_images[0].shape[1]
    arrays = [pyfits.Column(name='STAMP1', format=format,
                            array=model.pixel_images[0]),
              pyfits.Column(name='STAMP2', format=format,
                            array=model.pixel_images[1]),
              pyfits.Column(name='STAMP3', format=format,
                            array=model.pixel_images[2])]

    # Create the HDU
    header = pyfits.Header(cards=cards)
    cols = pyfits.ColDefs(arrays)
    hdu = pyfits.BinTableHDU.from_columns(
        cols, header=header, name='POSTAGE STAMPS')

    return hdu
Esempio n. 6
0
def ApertureHDU(model):
    '''
    Construct the HDU containing the aperture used to de-trend.

    '''

    # Get mission cards
    cards = model._mission.HDUCards(model.meta, hdu=3)

    # Add EVEREST info
    cards.append(('COMMENT', '************************'))
    cards.append(('COMMENT', '*     EVEREST INFO     *'))
    cards.append(('COMMENT', '************************'))
    cards.append(('MISSION', model.mission, 'Mission name'))
    cards.append(('VERSION', EVEREST_MAJOR_MINOR, 'EVEREST pipeline version'))
    cards.append(('SUBVER', EVEREST_VERSION, 'EVEREST pipeline subversion'))
    cards.append(('DATE', strftime('%Y-%m-%d'),
                  'EVEREST file creation date (YYYY-MM-DD)'))

    # Create the HDU
    header = pyfits.Header(cards=cards)
    hdu = pyfits.ImageHDU(data=model.aperture,
                          header=header, name='APERTURE MASK')

    return hdu
Esempio n. 7
0
    def fits_create(self, nspec, coords, system='ga'):
        """
        Open a fits file for reading and create a primary HDU with the
        observation attributes.

        Inputs:

        - ``nspec``: Number of spectra to collect.
        - ``coords``: Coordinates of the target of observation. \
                Format: (lon/ra, lat/dec)
        - ``system``: Coordinate system of ``coords`` (eq, ga).

        Return:

        - This function returns a primary HDU with a header containing \
                the attributes of the observation.
        """
        # Check that the user isn't a dingus
        if system != 'ga' and system != 'eq':
            raise ValueError('Invalid coordinate system: ' + system)

        # Create a header and write spectral info to it.
        obs_attr = _fits.Header()
        obs_attr['NSPEC'] = (nspec, 'Number of spectra recorded')
        obs_attr['BOFFILE'] = (self.boffile, 'FPGA binary code')
        obs_attr['MODE'] = (self.mode, 'Spectrometer mode')
        obs_attr['FPGA'] = (self.clock_rate, 'FPGA clock speed (Hz)')
        obs_attr['IADC'] = (self.iadc_rate, 'iADC clock speed (Hz)')
        obs_attr['DOWNSAMP'] = (self.downsample, 'ADC downsampling period.')
        obs_attr['SAMPRATE'] = (self.samp_rate, 'Downsampled clock speed (Hz)')
        obs_attr['BW'] = (self.bandwidth, 'Bandwidth of spectra (Hz)')
        obs_attr['NCHAN'] = (self.nchan, 'Number of frequency channels')
        obs_attr['RES'] = (self.resolution, 'Frequency resolution (Hz)')
        obs_attr['FFTSHIFT'] = (self.fft_shift, 'FFT Shifting instructions')
        obs_attr['ACCLEN'] = (self.acc_len, 'Number of clock cycles')
        obs_attr['INTTIME'] = (self.int_time, 'Integration time of spectra')
        obs_attr['SCALE'] = (self.scale, 'Average instead of sum on ROACH')

        # Set the coordinates. Both RA/Dec and galactic will be stored.
        obs_start_seconds = _time.time()
        obs_start = get_epoch(obs_start_seconds)
        obs_start_jd = julian_date(obs_start_seconds)
        if system == 'ga':
            lon, lat = coords_deg2rad(coords)
            galactic = _ephem.Galactic(lon, lat, epoch=obs_start)
            equatorial = _ephem.Equatorial(galactic)
        else:
            ra, dec = coords_deg2rad(coords)
            equatorial = _ephem.Equatorial(ra, dec, epoch=obs_start)
            galactic = _ephem.Galactic(equatorial)

        # Pack the coordinates into the FITS header.
        obs_attr['L'] = (ephem2deg(galactic.lon), 'Galactic longitude')
        obs_attr['B'] = (ephem2deg(galactic.lat), 'Galactic latitude')
        obs_attr['RA'] = (ephem2deg(equatorial.ra), 'Right Ascension')
        obs_attr['DEC'] = (ephem2deg(equatorial.dec), 'Declination')
        obs_attr['JD'] = (obs_start_jd, 'Julian date of start time')
        obs_attr['UTC'] = (obs_start, 'Starting date of accumulation')
        obs_attr['TIME'] = (obs_start_seconds, 'Seconds since epoch')
        return _fits.PrimaryHDU(header=obs_attr)
Esempio n. 8
0
    def __init__(self, header=None, cardlist=None, origin=None):
        """
            Creates an Header object
            
            Parameters
            --------------
            header : pyfits.header object, optional
                    Fits header as header
            cardlist : pyfits.CardList object, optional
                    Fits header as a card list,
                    if header is given cardlist parameter will be ignored
            origin : string, optional
                    Name of the Fits file as the origin for the header,
                    can be the full path of the file
                       
        """
        if header != None:
            # Assign private variable and convert header to card list
            self._cardlist = header.ascardlist()
            self._header = header
        elif cardlist != None and header == None:
            # Assign private variable and convert card list  to header
            self._cardlist = cardlist
            self._header = pyfits.Header(cardlist)
        else:
            # Create empty Header and CardList objects
            self._cardlist = None
            self._header = None

        # Set the Fits file origin of the header if given
        if origin != None:
            self._origin = origin
        else:
            self._origin = None
Esempio n. 9
0
def PrimaryHDU(model):
    '''
    Construct the primary HDU file containing basic header info.

    '''

    # Get mission cards
    cards = model._mission.HDUCards(model.meta, hdu=0)
    if 'KEPMAG' not in [c[0] for c in cards]:
        cards.append(('KEPMAG', model.mag, 'Kepler magnitude'))

    # Add EVEREST info
    cards.append(('COMMENT', '************************'))
    cards.append(('COMMENT', '*     EVEREST INFO     *'))
    cards.append(('COMMENT', '************************'))
    cards.append(('MISSION', model.mission, 'Mission name'))
    cards.append(('VERSION', EVEREST_MAJOR_MINOR, 'EVEREST pipeline version'))
    cards.append(('SUBVER', EVEREST_VERSION, 'EVEREST pipeline subversion'))
    cards.append(('DATE', strftime('%Y-%m-%d'),
                  'EVEREST file creation date (YYYY-MM-DD)'))

    # Create the HDU
    header = pyfits.Header(cards=cards)
    hdu = pyfits.PrimaryHDU(header=header)

    return hdu
Esempio n. 10
0
    def getWcs(self, subId, outFilename=None):
        """Download the WCS and return as a pyfits header object.


        Inputs:
        subId
        outFilename (string) If not None, save the header to this file

        Returns:
        A pyfits.Header() object
        """

        jobId = self.submittedJobs[subId]['jobs'][0]
        url = re.sub("api", "wcs_file/%i" % (jobId), self.apiurl)
        #return url
        f = urllib2.urlopen(url)
        text = f.read()

        hdr = pyfits.Header().fromstring(text)

        if outFilename is not None:
            wf = open(outFilename, "wb")
            wf.write(text)
            wf.close()
        return hdr
def gal_header(center, major_axes, galactic=True):

    values = [
            ["NAXIS",  2,          ],

            ["NAXIS1", IMAGE_SIZE,       ],
            ["NAXIS2", IMAGE_SIZE,       ],

            ["CTYPE1", 'GLON-ZEA' if galactic else 'RA---ZEA' ],
            ["CTYPE2", 'GLAT-ZEA' if galactic else 'DEC--ZEA' ],

            ["CRPIX1", IMAGE_SIZE/2. + 0.5,       ],
            ["CRPIX2", IMAGE_SIZE/2. + 0.5,       ],

            ["CRVAL1", center.l() if galactic else center.ra(),        ],
            ["CRVAL2", center.b() if galactic else center.dec(),       ],

            ["CDELT1", -3.*major_axes/IMAGE_SIZE,       ],
            ["CDELT2", 3.*major_axes/IMAGE_SIZE,        ],
    ]

    if galactic is False:
        values += [
            ['RADECSYS','FK5'],
            ['EQUINOX',2000],
        ]


    cards = [pyfits.Card(*i) for i in values]

    header=pyfits.Header(cards=cards)

    return header
Esempio n. 12
0
def header_create(hdr_cube,
                  in_par_list):  #create an appropiate header for the maps
    hdr_cube['NAXIS'] = 2
    hdr_cube['BITPIX'] = -64
    hdr_cube['EXTEND'] = True
    hdr_dict = hdr_cube.ascard
    gen = [
        'SIMPLE', 'BITPIX', 'NAXIS', 'NAXIS1', 'NAXIS2', 'EXTEND', 'RADESYS',
        'CTYPE1', 'CTYPE2', 'CRVAL1', 'CRVAL2', 'CRPIX1', 'CRPIX2', 'CROTA2',
        'CDELT1', 'CDELT2', 'EQUINOX', 'CD1_1', 'CD1_2', 'CD2_1', 'CD2_2'
    ]
    hdr_map = pf.Header()

    for key in gen:
        if key in hdr_cube:
            hdr_map.append(hdr_dict[key])

    hdr_map.add_comment('')
    hdr_map.add_comment('BUBBLY parameters used to produce this file:')
    hdr_map.add_comment('')

    for line in in_par_list:
        hdr_map.add_comment(line[0:line.find('#')])

    return hdr_map
Esempio n. 13
0
    def test_update_header_card(self):
        """A very basic test for the Header.update method--I'd like to add a
        few more cases to this at some point.
        """

        header = fits.Header()
        comment = 'number of bits per data pixel'
        header['BITPIX'] = (16, comment)
        assert 'BITPIX' in header
        assert header['BITPIX'] == 16
        assert header.ascard['BITPIX'].comment == comment

        # The new API doesn't support savecomment so leave this line here; at
        # any rate good to have testing of the new API mixed with the old API
        header.update('BITPIX', 32, savecomment=True)
        # Make sure the value has been updated, but the comment was preserved
        assert header['BITPIX'] == 32
        assert header.ascard['BITPIX'].comment == comment

        # The comment should still be preserved--savecomment only takes effect if
        # a new comment is also specified
        header['BITPIX'] = 16
        assert header.ascard['BITPIX'].comment == comment
        header.update('BITPIX', 16, 'foobarbaz', savecomment=True)
        assert header.ascard['BITPIX'].comment == comment
Esempio n. 14
0
def generate_header(header_file):

    # Read in header
    header = pyfits.Header()
    header.fromTxtFile(header_file)

    return header
Esempio n. 15
0
def fits2fits(infile, outfile, verbose=False, fix_idr=False):
    """
    Returns: error string, or None on success.
    """
    if fix_idr:
        from astrometry.util.fix_sdss_idr import fix_sdss_idr

    # Read input file.
    fitsin = pyfits.open(infile)
    # Print out info about input file.
    if verbose:
        fitsin.info()

    for i, hdu in enumerate(fitsin):
        if fix_idr:
            hdu = fitsin[i] = fix_sdss_idr(hdu)
        # verify() fails when a keywords contains invalid characters,
        # so go through the primary header and fix them by converting invalid
        # characters to '_'
        hdr = hdu.header
        logging.info('Header has %i cards' % len(hdr))
        # allowed characters (FITS standard section 5.1.2.1)
        pat = re.compile(r'[^A-Z0-9_\-]')

        newcards = []
        for c in hdr.ascard:
            k = c.keyword
            # new keyword:
            knew = pat.sub('_', k)
            if k != knew:
                logging.debug('Replacing illegal keyword %s by %s' % (k, knew))
                # it seems pyfits is not clever enough to notice this...
                if len(knew) > 8:
                    knew = 'HIERARCH ' + knew
            newcards.append(pyfits.Card(keyword=knew, value=c.value,
                                        comment=c.comment))
        hdu.header = pyfits.Header(newcards)
            
        # Fix input header
        hdu.verify('fix')

        # UGH!  Work around stupid pyfits handling of scaled data...
        # (it fails to round-trip scaled data correctly!)
        bzero = hdr.get('BZERO', None)
        bscale = hdr.get('BSCALE', None)
        if (bzero is not None and bscale is not None
            and (bzero != 0. or bscale != 1.)):
            logging.debug('Scaling to bzero=%g, bscale=%g' % (bzero, bscale))
            hdu.scale('int16', '', bscale, bzero)

    # Describe output file we're about to write...
    if verbose:
        print 'Outputting:'
        fitsin.info()

    try:
        pyfits_writeto(fitsin, outfile, output_verify='warn')
    except pyfits.VerifyError, ve:
        return ('Verification of output file failed: your FITS file is probably too broken to automatically fix.' +
                '  Error message is:' + str(ve))
Esempio n. 16
0
 def setHdrValue(self, keyword, value, comment=None):
     if self._header == None:
         self._header = pyfits.Header()
     if comment == None:
         self._header.update(keyword, value)
     else:
         self._header.update(keyword, value, comment)
     self._cardlist = self._header.ascardlist()
Esempio n. 17
0
def header_from_string(str):
    """
    header_from_string(str):
        Convert an input string (which should be the ASCII header from
            a FITS HFU) into an instantiation of a pyfits 'Header' class.
    """
    cl = cardlist_from_string(str)
    return pyfits.Header(cl)
Esempio n. 18
0
def create_FITS(in_file, out_file, param_file):
    import numpy as np
    import pyfits
    import math

    prihdr = pyfits.Header()  # Create Header
    with open(in_file, 'r') as f:
        first_line = f.readline().rstrip().split()
    #print "Creating fits with Ns, N=",first_line
    N = int(math.floor(float(first_line[1])))
    Ns = int(math.floor(float(first_line[0])))
    prihdr['N'] = (N, 'number of latitude zones')
    prihdr['Ns'] = (Ns, 'number of time steps per orbit')

    lines = [line.rstrip('\n') for line in open(param_file)]
    for line in lines:
        if line:
            words = line.split('!')
            if words[0] == "COMMENT":
                prihdr['COMMENT'] = words[2]
            else:
                if words[3] == "F":
                    prihdr[words[0]] = (float(words[1]), words[2])
                elif words[3] == "I":
                    prihdr[words[0]] = (int(words[1]), words[2])
                else:
                    prihdr[words[0]] = (words[1], words[2])
            if words[0] == "DATE":
                date = words[1]
            if words[0] == "NUMBER":
                number = words[1]

    prihdu = pyfits.PrimaryHDU(header=prihdr)

    #print "Date of creation of map: ",date
    out_file = out_file + date[0:2] + "." + date[2:4] + "." + date[
        4:8] + "-" + number + ".fits"
    #print "Creating ",out_file

    data = np.loadtxt(in_file, skiprows=1)  # Read data from input file

    year, lat, temp = data.T
    year = year - math.floor(
        year[0])  # This subtracts the integer part of "year"

    c1 = pyfits.Column(name='year', format='E', array=year)
    c2 = pyfits.Column(name='lat', format='E', array=lat)
    c3 = pyfits.Column(name='temp', format='E', array=temp)

    cols = pyfits.ColDefs([c1, c2, c3])

    tbhdu = pyfits.BinTableHDU.from_columns(cols)

    thdulist = pyfits.HDUList([prihdu, tbhdu])

    thdulist.writeto(out_file)

    return date
Esempio n. 19
0
def save_PDF(zfine,
             pdfs,
             Pars,
             path='',
             filebase='',
             num=-1,
             oob='no',
             var='',
             multiple='no',
             rank=0):
    """
    Saves photo-z PDFs
    """

    if path == '':
        path = Pars.path_results
    if not os.path.exists(path): os.system('mkdir -p ' + path)
    if filebase == '':
        filebase = Pars.finalfilename
    if num == -1:
        for j in range(100):
            if os.path.exists(path + filebase + '.' + str(j) +
                              '.mlz') and os.path.exists(path + filebase +
                                                         '.' + str(j) +
                                                         '.P.npy'):
                continue
            else:
                fileoutPDF = path + filebase + '.' + str(j) + '.P'
                if oob == 'yes':
                    fileoutPDF = path + filebase + '_oob' + var + '.' + str(
                        j) + '.P'
                break
    else:
        fileoutPDF = path + filebase + '.' + str(num) + '.P'
        if oob == 'yes':
            fileoutPDF = path + filebase + '_oob' + var + '.' + str(num) + '.P'

    if multiple == 'yes': fileoutPDF = fileoutPDF + '_' + str(rank)
    pdfs = concatenate((pdfs, [zfine]))
    if Pars.writefits == 'no':
        save(fileoutPDF, pdfs)
    else:
        head = pf.Header()
        head['N_TOT'] = len(pdfs) - 1
        head['DZ'] = zfine[1] - zfine[0]
        head['NPOINTS'] = len(zfine)
        head[
            'COMMENT'] = 'The last row of the table are the redshift positions'
        head['COMMENT'] = 'This file was created using MLZ'
        head['HISTORY'] = 'Created on ' + datetime.datetime.now().strftime(
            "%Y-%m-%d  %H:%M")
        fmt = '%dE' % len(zfine)
        col0 = pf.Column(name='PDF values', format=fmt, array=pdfs)
        #table0 = pf.new_table(pf.ColDefs([col0]))
        table0 = pf.BinTableHDU.from_columns(pf.ColDefs([col0]))
        prihdu = pf.PrimaryHDU(header=head)
        hdulist = pf.HDUList([prihdu, table0])
        hdulist.writeto(fileoutPDF + '.fits', clobber=True)
Esempio n. 20
0
def mkfitshdr(cards=None, usedefaults=True):
	"""
	Make a FITS file header of all arguments supplied in the dict **cards**.

	If **usedefaults** is set, also add default header items:
	- Program filename and pasth (from sys.argv[0])
	- Current working dir
	- Program filesize, mtime and ctime
	- Git revision of executable (if available)
	- epoch (time())
	- utctime / localtime
	- hostid

	@params [in] cards Dict containing key=value pairs for the header
	@params [in] usedefaults Also store default parameters in header
	@return pyfits header object
	"""

	clist = pyfits.CardList()

	if (usedefaults):
		clist.append(pyfits.Card(key='progname', 
								value=os.path.basename(sys.argv[0]),
								comment='Program filename') )
		clist.append(pyfits.Card(key='progpath', 
								value=os.path.dirname(sys.argv[0]),
								comment='Program path') )
		grev = git_rev(sys.argv[0])
		if (grev):
			clist.append(pyfits.Card(key='gitrev', 
								value=grev,
								comment='Program git revision') )
		clist.append(pyfits.Card(key='progsize', 
								value=os.path.getsize(sys.argv[0]),
								comment='Program filesize (bytes)') )
		clist.append(pyfits.Card(key='mtime', 
								value=os.path.getmtime(sys.argv[0]),
								comment='Program last last modification time') )
		clist.append(pyfits.Card(key='ctime', 
								value=os.path.getctime(sys.argv[0]),
								comment='Program metadata change time' ) )
		clist.append(pyfits.Card(key='curdir', 
								value=os.path.realpath(os.path.curdir),
								comment='Current working dir') )
		clist.append(pyfits.Card(key='epoch', value=time(),
								comment='Current seconds since epoch from time.time()') )
		# No comments for the last two fields because they are too large
		clist.append(pyfits.Card(key='utctime', value=asctime(gmtime(time()))) )
		clist.append(pyfits.Card(key='loctime', value=asctime(localtime(time()))) )
		clist.append(pyfits.Card(key='hostid', value=os.uname()[1],
								comment='Hostname from os.uname()') )

	if (cards):
		for key, val in cards.iteritems():
			clist.append(pyfits.Card(key, val) )

	return pyfits.Header(cards=clist)
Esempio n. 21
0
 def _make_streaming_hdu(self, fileobj):
     hd = fits.Header()
     hd['SIMPLE'] = (True, 'conforms to FITS standard')
     hd['BITPIX'] = (32, 'array data type')
     hd['NAXIS'] = (2, 'number of array dimensions')
     hd['NAXIS1'] = 5
     hd['NAXIS2'] = 5
     hd['EXTEND'] = True
     return fits.StreamingHDU(fileobj, hd)
Esempio n. 22
0
    def test_unfixable_missing_card(self):
        class TestHDU(fits.hdu.base.NonstandardExtHDU):
            def _verify(self, option='warn'):
                errs = super(TestHDU, self)._verify(option)
                hdu.req_cards('TESTKW', None, None, None, 'fix', errs)
                return errs

        hdu = TestHDU(header=fits.Header())
        assert_raises(fits.VerifyError, hdu.verify, 'fix')
 def writeFits(self, file, overWrite=False):
     """
     23-10-2009: added by JB Juin
     02-12-2009: rewrote to include proper WCS keywords (sudeep)
     so that multipoles can be read off in ds9 
     @brief Write a fft2D as a Fits file
     """
     h = pyfits.Header()
     h.update("COMMENT", "flipper.fft2D")
     idx = numpy.where(numpy.fft.fftshift(self.lx == 0))
     idy = numpy.where(numpy.fft.fftshift(self.ly == 0))
     h.update('CTYPE1', 'ANG-FREQ')
     h.update('CTYPE2', 'ANG-FREQ')
     h.update("CRPIX1", idx[0][0] + 1)
     h.update("CRPIX2", idy[0][0] + 1)
     h.update("CRVAL1", 0.0)
     h.update("CRVAL2", 0.0)
     h.update("CDELT1", numpy.abs(self.lx[0] - self.lx[1]))
     h.update("CDELT2", numpy.abs(self.ly[0] - self.ly[1]))
     realFile = file.split('.')[0] + '_real.fits'
     pyfits.writeto(realFile,
                    fftshift(numpy.real(self.kMap)),
                    header=h,
                    clobber=overWrite)
     del h
     h = pyfits.Header()
     h.update("COMMENT", "flipper.fft2D")
     idx = numpy.where(numpy.fft.fftshift(self.lx == 0))
     idy = numpy.where(numpy.fft.fftshift(self.ly == 0))
     h.update('CTYPE1', 'ANG-FREQ')
     h.update('CTYPE2', 'ANG-FREQ')
     h.update("CRPIX1", idx[0][0] + 1)
     h.update("CRPIX2", idy[0][0] + 1)
     h.update("CRVAL1", 0.0)
     h.update("CRVAL2", 0.0)
     h.update("CDELT1", numpy.abs(self.lx[0] - self.lx[1]))
     h.update("CDELT2", numpy.abs(self.ly[0] - self.ly[1]))
     realFile = file.split('.')[0] + '_imag.fits'
     pyfits.writeto(realFile,
                    fftshift(numpy.imag(self.kMap)),
                    header=h,
                    clobber=overWrite)
Esempio n. 24
0
def resampleToTanProjection(imageData,
                            imageWCS,
                            outputPixDimensions=[600, 600]):
    """Resamples an image and WCS to a tangent plane projection. Purely for plotting purposes
    (e.g., ensuring RA, dec. coordinate axes perpendicular).
    
    @type imageData: numpy array
    @param imageData: image data array
    @type imageWCS: astWCS.WCS
    @param imageWCS: astWCS.WCS object
    @type outputPixDimensions: list
    @param outputPixDimensions: [width, height] of output image in pixels
    @rtype: dictionary
    @return: image data (numpy array), updated astWCS WCS object for image, in format {'data', 'wcs'}.
    
    """

    RADeg, decDeg = imageWCS.getCentreWCSCoords()
    xPixelScale = imageWCS.getXPixelSizeDeg()
    yPixelScale = imageWCS.getYPixelSizeDeg()
    xSizeDeg, ySizeDeg = imageWCS.getFullSizeSkyDeg()
    xSizePix = int(round(outputPixDimensions[0]))
    ySizePix = int(round(outputPixDimensions[1]))
    xRefPix = xSizePix / 2.0
    yRefPix = ySizePix / 2.0
    xOutPixScale = xSizeDeg / xSizePix
    yOutPixScale = ySizeDeg / ySizePix
    cardList = pyfits.CardList()
    cardList.append(pyfits.Card('NAXIS', 2))
    cardList.append(pyfits.Card('NAXIS1', xSizePix))
    cardList.append(pyfits.Card('NAXIS2', ySizePix))
    cardList.append(pyfits.Card('CTYPE1', 'RA---TAN'))
    cardList.append(pyfits.Card('CTYPE2', 'DEC--TAN'))
    cardList.append(pyfits.Card('CRVAL1', RADeg))
    cardList.append(pyfits.Card('CRVAL2', decDeg))
    cardList.append(pyfits.Card('CRPIX1', xRefPix + 1))
    cardList.append(pyfits.Card('CRPIX2', yRefPix + 1))
    cardList.append(pyfits.Card('CDELT1', -xOutPixScale))
    cardList.append(pyfits.Card(
        'CDELT2', xOutPixScale))  # Makes more sense to use same pix scale
    cardList.append(pyfits.Card('CUNIT1', 'DEG'))
    cardList.append(pyfits.Card('CUNIT2', 'DEG'))
    newHead = pyfits.Header(cards=cardList)
    newWCS = astWCS.WCS(newHead, mode='pyfits')
    newImage = numpy.zeros([ySizePix, xSizePix])

    tanImage = resampleToWCS(newImage,
                             newWCS,
                             imageData,
                             imageWCS,
                             highAccuracy=True,
                             onlyOverlapping=False)

    return tanImage
Esempio n. 25
0
 def createHeader(self, exp, gain):
     prihdr = pyfits.Header()
     prihdr['COMMENT'] = 'MRO Guider Camera'
     prihdr['COMMENT'] = 'Orion Star Shoot Auto Guider'
     prihdr['IMAGTYP'] = None
     prihdr['EXPTIME'] = exp
     prihdr['CCDBIN1'] = 1
     prihdr['CCDBIN2'] = 1
     prihdr['GAIN'] = gain
     prihdr['RN'] = None
     return prihdr
Esempio n. 26
0
def create_header(coord, radius, proj='ZEA', npix=30):
    """ Create a header a new image """
    gal = coord.name == 'galactic'
    values = [
        [
            "NAXIS",
            2,
        ],
        [
            "NAXIS1",
            npix,
        ],
        [
            "NAXIS2",
            npix,
        ],
        ["CTYPE1", 'GLON-%s' % proj if gal else 'RA---%s' % proj],
        ["CTYPE2", 'GLAT-%s' % proj if gal else 'DEC--%s' % proj],
        [
            "CRPIX1",
            npix / 2. + 0.5,
        ],
        [
            "CRPIX2",
            npix / 2. + 0.5,
        ],
        [
            "CRVAL1",
            coord.l.deg if gal else coord.ra.deg,
        ],
        [
            "CRVAL2",
            coord.b.deg if gal else coord.dec.deg,
        ],
        [
            "CDELT1",
            -3. * radius / npix,
        ],
        [
            "CDELT2",
            3. * radius / npix,
        ],
    ]

    if not gal:
        values += [
            ['RADECSYS', 'FK5'],
            ['EQUINOX', 2000],
        ]

    cards = [pyfits.Card(*i) for i in values]
    header = pyfits.Header(cards=cards)

    return header
Esempio n. 27
0
    def write(self, dirName=".", fileName=None):
        if not pyfits:
            raise RuntimeError("I failed to import pyfits, so cannot read from disk")

        hdus = pyfits.HDUList()

        hdr = pyfits.Header()

        for k in sorted(self._metadata):
            if len(k) <= 8:
                kk = k
            else:
                kk = "HIERARCH " + k    # avoid warning
            hdr[kk] = self._metadata[k]

        hdr.update()
        hdus.append(pyfits.PrimaryHDU(header=hdr))

        hdu = pyfits.ImageHDU(self.flux)
        hdu.name = "FLUX"
        hdus.append(hdu)

        hdu = pyfits.ImageHDU(self.covar)
        hdu.name = "COVAR"
        hdus.append(hdu)

        hdu = pyfits.ImageHDU(self.mask)
        hdu.name = "MASK"
        hdus.append(hdu)

        hdu = pyfits.ImageHDU(self.lam)
        hdu.name = "WAVELENGTH"
        hdus.append(hdu)

        hdu = pyfits.ImageHDU(self.sky)
        hdu.name = "SKY"
        hdus.append(hdu)

        hdu = pyfits.BinTableHDU.from_columns([
            pyfits.Column(name='pfsConfigId', format='K',
                          array=np.array([self.pfsConfigId], dtype=np.int64)),
            pyfits.Column(name='visit', format='J',
                          array=np.array([self.visit], dtype=np.int32))
        ])

        hdu.name = 'CONFIG'
        hdus.append(hdu)

        # clobber=True in writeto prints a message, so use open instead
        if fileName is None:
            fileName = self.fileNameFormat % (self.visit, self.arm, self.spectrograph)
        with open(os.path.join(dirName, fileName), "w") as fd:
            hdus.writeto(fd)
Esempio n. 28
0
    def test_writeto(self):
        """
        Simple test for writing a trivial header and some data to a file
        with the `writeto()` convenience function.
        """

        data = np.zeros((100, 100))
        header = fits.Header()
        fits.writeto(self.temp('array.fits'), data, header=header,
                       clobber=True)
        hdul = fits.open(self.temp('array.fits'))
        assert len(hdul) == 1
        assert (data == hdul[0].data).all()
Esempio n. 29
0
def copy_header(header):
    header_dict = dict(header)
    dtype = get_dtype(header)
    cards = list()
    for k in header_dict:
        try:
            cards.append(pyfits.Card(key=k, value=header_dict[k]))
        except (ValueError):
            try:
                cards.append(pyfits.Card(key=k, value=float(header_dict[k])))
            except (ValueError):
                pass
    return pyfits.Header(cards=cards)
Esempio n. 30
0
    def test_new_hdulist_extend_keyword(self):
        """Regression test for https://trac.assembla.com/pyfits/ticket/114

        Tests that adding a PrimaryHDU to a new HDUList object updates the
        EXTEND keyword on that HDU.
        """

        h0 = fits.Header()
        hdu = fits.PrimaryHDU(header=h0)
        sci = fits.ImageHDU(data=np.array(10))
        image = fits.HDUList([hdu, sci])
        image.writeto(self.temp('temp.fits'))
        assert 'EXTEND' in hdu.header
        assert hdu.header['EXTEND'] == True