Пример #1
0
 def _get_ft1_dss(self):
     """ Get existing DSS keywords from FT1 files.
         If there is more than one FT1 file present, the protocol
         is that all DSS entries must agree."""
     self.data_pass = get_pass(self.ft1files[0])
     if 'PROC_VER' not in pyfits.getheader(self.ft1files[0]).keys():
         print 'Warning: PROC_VER not found in %s header' % self.ft1files[0]
         self.dss = dssman.DSSEntries(self.ft1files[0])
         return
     if len(self.ft1files) == 1:
         self.dss = dssman.DSSEntries(self.ft1files[0])
     else:
         all_dss = [dssman.DSSEntries(ft1) for ft1 in self.ft1files]
         #Kludge to handle inconsistencies in DSS keywords between P120 and P130
         if (pyfits.getheader(self.ft1files[0])['PROC_VER'] <= 120 and
                 pyfits.getheader(self.ft1files[-1])['PROC_VER'] >= 130):
             for dss in all_dss:
                 for i, d in enumerate(dss):
                     if isinstance(d, dssman.DSSBitMask): dss.delete(i)
         if not np.all([all_dss[0] == x for x in all_dss]):
             raise ValueError(
                 'DSS keywords are inconsistent for FT1 files.')
         self.dss = all_dss[0]
         for ft1 in self.ft1files[1:]:
             # sanity check that all files have same pass
             if get_pass(ft1) != self.data_pass:
                 raise DataError('%s is not Pass %d' %
                                 (ft1, self.data_pass))
Пример #2
0
    def add(self, others, output, binfile, ltcube):
        """Combine this DataSpec instance with another and return the result

        The two instances must have consistent definitions (DSS, binning,
        and livetime), and must have non-overlapping Gtis. The binfiles and
        ltcubes will be combined and written to the provided destinations;
        perhaps in the future, I will come up with sensible defaults.
        """
        if not hasattr(others, '__iter__'):
            others = [others]
        for other in others:
            exc = self.check_consistency(other)
            if exc is not None:
                raise (exc)
        binfile = os.path.expandvars(binfile)
        ltcube = os.path.expandvars(ltcube)
        gti = skymaps.Gti(self.gti)
        ft1 = self.ft1files
        ft2 = self.ft2files
        bpd = skymaps.BinnedPhotonData(self.binfile)
        for other in others:
            gti.intersection(other.gti)
            ft1 += other.ft1files
            ft2 += other.ft2files
            bpd.add(skymaps.BinnedPhotonData(other.binfile))
        if gti.computeOntime() > 0:
            raise DataError("DataSpec instances have overlapping GTIs")
        ft2 = sorted(list(set(ft2)))
        bpd.write(binfile)
        fitstools.merge_lt([self.ltcube] + [other.ltcube for other in others],
                           outfile=ltcube,
                           weighted=self.use_weighted_livetime)
        dssman.DSSEntries(self.binfile, header_key=0).write(binfile,
                                                            header_key=0)
        dssman.DSSEntries(self.ltcube, header_key=0).write(ltcube,
                                                           header_key=0)
        #TODO: move the copying of DSS entries into the merge_bpd and merge_lt functions
        gti_mask = skymaps.Gti(self.gti_mask)
        for other in others:
            gti_mask.combine(other.gti_mask)
        kwargs = dict(ft1=ft1,
                      ft2=ft2,
                      binfile=binfile,
                      ltcube=ltcube,
                      gti_mask=gti_mask,
                      binsperdec=self.binsperdec,
                      mc_src_id=self.mc_src_id,
                      mc_energy=self.mc_energy,
                      use_weighted_livetime=self.use_weighted_livetime,
                      livetime_buffer=self.livetime_buffer,
                      livetime_pixelsize=self.livetime_pixelsize,
                      clobber=False)
        return DataSpec(output, **kwargs)
Пример #3
0
 def _get_ft1_dss(self):
     """ Get existing DSS keywords from FT1 files.
         If there is more than one FT1 file present, the protocol
         is that all DSS entries must agree.
         Assume that it is so, only look at the first for PASS and DSS """
     self.data_pass = get_pass(self.ft1files[0])
     self.dss = dssman.DSSEntries(self.ft1files[0])
Пример #4
0
    def _check_ltcube(self):
        """ Verify ltcube exists and is consistent with any existing data cuts.
        
        """
        #if os.path.exists(self.ltcube) and self.legacy :
        #    print('Accepting ltcube without dss checks since legacy specified')
        #    return True
        if self.clobber or (not os.path.exists(self.ltcube or '')):
            print 'checking ltcube: failed clobber on %s' % self.ltcube
            return False
        # check for presence of important history
        # eew -- primary header doesn't have info about extensions, so just
        #   open the file and use that handle to check header keys and
        #   extensions.
        # h = pyfits.getheader(self.ltcube,0)
        lt = pyfits.open(self.ltcube)
        try:
            lt[0].header['RADIUS']
            lt[0].header['PIXSIZE']
        except KeyError:
            if not self.quiet:
                print 'no header info in ltcube?'  #pass #return False
        # check for weighted extension if we are using it
        if self.use_weighted_livetime:
            #try: h['WEIGHTED_EXPOSURE']
            #except KeyError: return False
            try:
                assert (len(lt) > 3)
            except AssertionError:
                print 'fail len(lt)>3:%d' % len(lt)
                return False
        #
        # DSS check
        #
        dss = dssman.DSSEntries(self.ltcube, header_key=0)
        if dss is None or len(dss) == 0:
            if self.legacy:
                if not self.quiet:
                    print 'Accepting ltcube without DSS info since legacy specified'
                dss = self.dss
            else:
                raise DataManException('DSS found in ltcube %s' % self.ltcube)
        if dss != self.dss:
            print 'Failed dss comparison:\n ltcube %s,\n FT1 %s' % (dss,
                                                                    self.dss)
            return False
        #
        # compare GTI with that found in FT1 or binfile
        #
        gti = skymaps.Gti(self.ltcube)
        if (gti.minValue != self.gti.minValue) or (gti.computeOntime() !=
                                                   self.gti.computeOntime()):
            print 'Failed gti check:\n  ltcube: %s \n binfile: %s' % (gti,
                                                                      self.gti)
            return self.legacy  #ignore if legacy, for now

        if (not self.quiet): print('Verified ltcube {0}'.format(self.ltcube))
        return True
Пример #5
0
 def _check_ltcube(self):
     """ Verify ltcube exists and is consistent with any existing data cuts.
     
     """
     #if os.path.exists(self.ltcube) and self.legacy : 
     #    print('Accepting ltcube without dss checks since legacy specified')
     #    return True
     ltcubes = glob.glob(self.ltcube)
     if len(ltcubes)==0:
     #if self.clobber or (not os.path.exists(self.ltcube or '')): 
         print 'Checking ltcube: will generate new {}'.format(self.ltcube)
         return False
     # check for presence of important history
     # eew -- primary header doesn't have info about extensions, so just
     #   open the file and use that handle to check header keys and
     #   extensions.
     # h = pyfits.getheader(self.ltcube,0) 
     lt = pyfits.open(ltcubes[0])
     #   DISABLE this: seems to be normal
     # try:
     #     lt[0].header['RADIUS']; lt[0].header['PIXSIZE']
     # except KeyError: 
     #     if not self.quiet: print 'no header info in ltcube?' #pass #return False
     # check for weighted extension if we are using it
     if self.use_weighted_livetime:
         #try: h['WEIGHTED_EXPOSURE']
         #except KeyError: return False
         try: assert(len(lt)>3)
         except AssertionError:
             print 'fail len(lt)>3:%d' %len(lt)
             return False
     #        
     # DSS check
     #
     nodss=False
     dss = dssman.DSSEntries(ltcubes[0],header_key=0)
     if dss is None or len(dss)==0: 
         nodss=True
         if True: # Permamently allow this self.legacy:
             #if not self.quiet: print 'Accepting ltcube without DSS info since legacy specified'
             dss=self.dss
         else:
             raise DataManException('DSS found in ltcube %s' % ltcubes[0])
     if dss != self.dss:
         print 'Failed dss comparison:\n ltcube %s,\n FT1 %s' % (dss, self.dss)
         return False
     #
     # compare GTI with that found in FT1 or binfile
     #
     gti = skymaps.Gti(ltcubes[0])
     tdiff = gti.computeOntime() - self.gti.computeOntime()
     if  (gti.minValue()!=self.gti.minValue()) or abs(tdiff)>1:
         print 'Failed gti check, time difference %.1f:\n  ltcube: %s \n binfile: %s' % (tdiff, gti, self.gti)
         return True #self.legacy #ignore if legacy, for now
         
     if (not self.quiet): print('Verified ltcube {} {}'.format(ltcubes[0],
         '(without DSS)' if nodss else ''))
     return True
Пример #6
0
    def _check_binfile(self):
        """ Verify binfile exists and is consistent with any existing data cuts.
            Return False if fails any, print message
            (why not an exception?)
        """
        if self.binfile is None:
            raise DataManException('No bin file specified')
        if not os.path.exists(self.binfile):
            print 'File %s not found' % self.binfile
            sys.stdout.flush()
            return False
        if self.clobber or self.binfile is None:
            print 'self.clobber or self.binfile is None'
            sys.stdout.flush()
            return False
        #
        # Check DSS keywords
        #
        dss = dssman.DSSEntries(self.binfile, header_key=0)
        if (dss is None):
            print("dss is None")
            sys.stdout.flush()
            return False
        if self.dss is None:
            if not self.quiet: print('Extracting DSS from existing binfile')
            self.dss = dss
        if self.dss != dss:
            print 'File %s Failed DSS keyword check: expected \n%s \nbut found \n%s' % (
                self.binfile, self.dss, dss)
            sys.stdout.flush()
            return False
        #
        #  Check GTI
        #
        gti = skymaps.Gti(self.binfile)
        if not self.quiet: print 'GTI from binfile', gti
        if gti is None:
            raise DataManException('GTI not found in the binfile %s' %
                                   self.binfile)
        self.gti = gti
        if (gti.minValue != self.gti.minValue) or (gti.computeOntime() !=
                                                   self.gti.computeOntime()):
            print 'File %s Failed GTI check: \n  expect %s \n  found  %s' % (
                self.binfile, self.gti, gti)
            return self.legacy  # ignore if legacy, for now

        if (not self.quiet): print('Verified binfile {0}'.format(self.binfile))
        return True