Ejemplo n.º 1
0
    def __init__(self, savedir, code, **kwargs):
        keyword_options.process(self, kwargs)

        self.savedir = expandvars(savedir)
        self.code = code

        print 'Putting jobs in %s' % self.savedir
Ejemplo n.º 2
0
    def __init__(self, savedir, keys, **kwargs):
        keyword_options.process(self, kwargs)

        self.savedir = expandvars(savedir)
        self.keys = keys

        self._merge()
Ejemplo n.º 3
0
    def __init__(self, roi_dir, ps_manager, ds_manager, spectral_analysis,
                 **kwargs):
        """ roi_dir    -- the center of the ROI
            ps_manager -- an instance of ROIPointSourceManager
            ds_manager -- an instance of ROIDiffuseManager
        """
        keyword_options.process(self, kwargs)

        self.__dict__.update(**kwargs)
        self.roi_dir = roi_dir
        self.psm = ps_manager
        self.dsm = ds_manager
        self.bgm = self.dsm  ### bgm is deprecated, set here for convenience of interactive access

        self.sa = spectral_analysis
        self.logl = None
        self.prev_logl = None
        self.__setup_bands__()
        self.__warn_about_binning__()
        self.bin_centers = np.sort(list(set([b.e for b in self.bands])))
        self.bin_edges = np.sort(
            list(
                set([b.emin
                     for b in self.bands] + [b.emax for b in self.bands])))

        self.param_state, self.param_vals = None, None
        if self.skip_setup:
            if not self.quiet:
                print 'No likelihood setup done: skip_setup is set'
            return
        self.__pre_fit__()

        self.logl = self.prev_logl = -self.logLikelihood(
            self.get_parameters())  # make sure everything initialized
Ejemplo n.º 4
0
    def __init__(self, band, sources, free, roi, **kwargs):
        """
           band    : bands.EnergyBand object
           sources : list of sources.Source objects
           free    : [array of bool | None]
                to select models with variable parameters
                If None, select all
           roi reference to the ROI
        """
        keyword_options.process(self, kwargs)
        # make a list of the Response objects
        self.bandsources = np.array(
            map(lambda s: s.response(band, quiet=self.quiet, roi=roi),
                sources))

        self.band = band
        self.event_type_name = config.event_type_name(band.event_type)
        self.exposure_factor = band.exposure.correction
        self.data = band.pix_counts if band.has_pixels else [
        ]  # data from the band
        self.pixels = len(self.data)

        self.initialize(free)
        self.update()
        # special code to unweight if galactic diffuse too large
        self.unweight = self.make_unweight()
Ejemplo n.º 5
0
    def __init__(self, ft1_file,  **kwargs):
        """
        """
        keyword_options.process(self, kwargs)
        self.ft1_hdus=ft1 = fits.open(ft1_file)
        self.tstart = ft1[1].header['TSTART'] # changed from 0 (redundant?) due to new Ballet FT1-generation

        # extract arrays for values of interest
        data =ft1['EVENTS'].data
        self.glon, self.glat, self.energy, self.et, self.z, self.theta =\
             [data[x] for x in 'L B ENERGY EVENT_TYPE ZENITH_ANGLE THETA'.split()]
        #self.front = np.array([x[-1] for x in self.et],bool) # et is arrray of array of bool, last one true if Front
        # generate event_type masks
        self.et_mask={}
        for et in self.etypes:
            self.et_mask[et]= self.et[:,-1-et]
        self.data_cut = np.logical_and(self.theta<self.theta_cut, self.z<self.z_cut)
        if self.verbose>0:
            print 'Found {} events. Removed: {:.2f} %'.format(len(data), 100.- 100*sum(self.data_cut)/float(len(data)));

        # DataFrame with component values for energy and event type, nside
        t = {}
        band=0
        for ie in range(len(self.ebins)-1):
            for et in self.etypes:
                order = self.orders[et if et<2 else 0][ie] # use max if PSF
                nside = 2**order
                t[band]= dict(ie=ie, event_type=et, nside=nside)
                band+=1
        self.df = pd.DataFrame(t).T
Ejemplo n.º 6
0
    def __init__(self, like, name, *args, **kwargs):
        keyword_options.process(self, kwargs)

        self.like = like
        self.name = name

        if self.ul_algorithm not in BaseGtlikeSED.ul_choices:
            raise Exception("Upper Limit Algorithm %s not in %s" % (self.ul_algorithm,str(BaseGtlikeSED.ul_choices)))

        if self.bin_edges is not None:
            if not BaseGtlikeSED.good_binning(like, self.bin_edges):
                raise Exception("bin_edges is not commensurate with the underlying energy binning of pyLikelihood.")
            
            self.bin_edges = np.asarray(self.bin_edges)
            energy = np.sqrt(self.bin_edges[1:]*self.bin_edges[:-1])
        else:
            # These energies are always in MeV
            self.bin_edges = like.energies
            energy = like.e_vals

        self.lower=self.bin_edges[:-1]
        self.upper=self.bin_edges[1:]

        source=self.like.logLike.getSource(name) 
        self.init_spectrum=source.spectrum()
        self.init_model=build_pointlike_model(self.init_spectrum)

        self.results = dict(
            Name=name,
            spectrum=name_to_spectral_dict(like,name, errors=True, covariance_matrix=True),
        )
        self._calculate(like)

        super(GtlikeSED,self).__init__(self.results, **keyword_options.defaults_to_kwargs(self, SED))
Ejemplo n.º 7
0
    def __init__(self, roi, which, **kwargs):
        """ Compute an upper limit on the source extension, by the "PDG Method". """
        keyword_options.process(self, kwargs)

        self.roi = roi
        self.which = which

        self.init_ts = roi.TS(which, quick=False)

        if self.init_ts < 4:
            # Bunt on extension upper limits for completely insignificant sources
            print 'Unable to compute extension upper limit for point-like source with too-small TS'
            self.extension_limit = None

        else:
            if not isinstance(self.spatial_model,type):
                raise Exception("The spatial model bust be a type, like Gaussian, not an instance, like Gaussian()")

            # Note, since the input is the class, not the instance, the
            # position parmaeters have not yet been added on.
            n = self.spatial_model.param_names
            assert len(n) == 1 and n[0] == 'Sigma'

            self.saved_state = PointlikeState(roi)

            self.spatial_low_lim, self.spatial_hi_lim = self.spatial_model.default_limits[0]

            results = self._compute()

            self.saved_state.restore()
Ejemplo n.º 8
0
    def __init__(self,roi,which,**kwargs):
        """ Object for calculating TS as a function of sigma. """


        self.roi = roi
        self.which = which

        keyword_options.process(self, kwargs)

        self.source = roi.get_source(which)

        if not hasattr(self.source,'spatial_model'):
            raise Exception("An extension profile can only be calculated for extended sources")

        self.spatial_model=self.source.spatial_model
        if not len(self.spatial_model.p)==3: 
            raise Exception("An extension profile can only be calculated for extended sources with 3 parameters (position + one extension)")

        self.fit_kwargs = dict(estimate_errors=False)
        if self.use_gradient is not None: self.fit_kwargs['use_gradient']=self.use_gradient

        self.ts_kwargs = dict(which=self.which)
        if self.quick is not None: self.ts_kwargs['quick']=self.quick

        self.fill()
Ejemplo n.º 9
0
    def __init__(self, config, roi_spec, **kwargs):
        """config : configuration.Configuration object
            used to find model info
        roi_spec : integer or string or ...
            ROI specification, passed to load_sources in a subclass
        """
        keyword_options.process(self, kwargs)
        self.config = config
        self.config.diffuse = config.diffuse.copy(
        )  # since something changes??

        if self.ecat is None:  #speed up if already loaded
            self.ecat = extended.ExtendedCatalog(self.config.extended,
                                                 quiet=self.quiet)

        # clear if called again
        while len(self) > 0:
            self.pop()

        # sources loaded by a subclass that must implement this function
        self.load_sources(roi_spec, **self.load_kw)

        if config.auxcat is not None:
            self.add_sources(config.auxcat)
        self.initialize()

        if len(self.parameters) == 0:
            print 'WARNING: there are no free parameters'
        print self.summary()
        self.selected_source = None
Ejemplo n.º 10
0
    def __init__(self, like, name, bin_edges, **kwargs):
        """ Parameters:
            * like - pyLikelihood object
            * name - source to make an SED for
            * bin_edges - if specified, calculate the SED in these bins.
        """
        keyword_options.process(self, kwargs)
        self.like               = like
        self.name               = name

        if not BaseGtlikeSED.good_binning(self.like, bin_edges):
            raise Exception("bin_edges is not commensurate with the underlying energy binning of pyLikelihood.")

        source=self.like.logLike.getSource(name) 
        self.init_spectrum=source.spectrum()
        self.init_model=build_pointlike_model(self.init_spectrum)
        self.init_energes = self.like.energies[[0,-1]]
            
        bin_edges = np.asarray(bin_edges)

        self.lower_energy=bin_edges[:-1]
        self.upper_energy=bin_edges[1:]
        self.middle_energy=np.sqrt(self.lower_energy*self.upper_energy)

        if self.ul_algorithm not in self.ul_choices:
            raise Exception("Upper Limit Algorithm %s not in %s" % (self.ul_algorithm,str(self.ul_choices)))

        empty = lambda: np.empty_like(self.middle_energy)

        self._calculate()
Ejemplo n.º 11
0
    def __init__(self, data_specification, **kwargs):
        """
        Create a new spectral analysis object.

        data_specification: an instance of DataSpec with links to the 
                            FT1/FT2, and/or binned data / livetime cube needed
                            for analysis (see docstring for that class). 
                            
                            Instance may be given as a pickle file.
        """
        if not isinstance(data_specification,dataman.DataSpec):
            if os.path.exists(data_specification):
                from cPickle import load
                try:
                    data_specification = load(file(data_specification))
                except UnpicklingError:
                    print 'Invalid pickle file for DataSpecification.'
                    raise Exception
        self.ae = self.dataspec = data_specification
        keyword_options.process(self, kwargs)

        #pixeldata name for backward compatibility
        self.dataman=self.pixeldata = self.dataspec()
        self.CALDBManager = pycaldb.CALDBManager(irf=self.irf,psf_irf=self.psf_irf,
            CALDB=self.CALDB,custom_irf_dir=self.custom_irf_dir)

        self.exposure  = ExposureManager(self.dataman,self.CALDBManager,verbose=self.verbose)
        self.psf = pypsf.CALDBPsf(self.CALDBManager)
Ejemplo n.º 12
0
    def __init__(self, savedir, code, **kwargs):
        keyword_options.process(self, kwargs)

        self.savedir = expandvars(savedir)
        self.code = code

        print 'Putting jobs in %s' % self.savedir
Ejemplo n.º 13
0
    def __init__(self, savedir, keys, **kwargs):
        keyword_options.process(self, kwargs)

        self.savedir = expandvars(savedir)
        self.keys = keys

        self._merge()
Ejemplo n.º 14
0
    def __init__(self,roi,**kwargs):
        keyword_options.process(self, kwargs)

        if self.npix is None:
            self.npix = float(self.size)/self.pixelsize
        
        self.roi = roi

        self.selected_bands = tuple(self.roi.bands if self.conv_type < 0 else \
            [ band for band in self.roi.bands if band.ct == self.conv_type ])

        # by default, use get energy range and image center from roi.
        if self.center is None: self.center=self.roi.roi_dir

        # bins in theta^2
        self.bin_edges_deg = np.linspace(0.0,self.size**2,self.npix+1)
        self.bin_centers_deg = (self.bin_edges_deg[1:] + self.bin_edges_deg[:-1])/2.0
        
        # two factors of radians b/c theta^2
        self.bin_edges_rad = np.radians(np.radians(self.bin_edges_deg))
        self.bin_centers_rad = np.radians(np.radians(self.bin_centers_deg))

        # the lower and upper agle for each bin.
        self.theta_pairs_rad = zip(np.sqrt(self.bin_edges_rad[:-1]),
                                   np.sqrt(self.bin_edges_rad[1:]))


        self.fill()
Ejemplo n.º 15
0
    def __init__(self, lcf, **kwargs):
        keyword_options.process(self, kwargs)

        self.lcf = lcf # uw.pulsar.lcfitters.LCFitter

        self.lct = lcf.template # uw.pulsar.lcfitters.LCTemplate

        self._fit()
Ejemplo n.º 16
0
 def __init__(self, index, **kwargs):
     """ index : int
             HEALpix index for the ROI (RING)
     """
     keyword_options.process(self, kwargs)
     assert type(index) == types.IntType, 'Expect int type'
     self.myindex = index
     self.mskydir = self.skydir(index)
Ejemplo n.º 17
0
 def __init__(self, **kwargs):
     keyword_options.process(self, kwargs)
     #ct0_file,ct1_file = get_irf_file(self.irf,CALDB=self.CALDB)
     cdbm = pycaldb.CALDBManager(self.irf)
     ct0_file, ct1_file = cdbm.get_aeff()
     self._read_aeff(ct0_file, ct1_file)
     if self.use_phidep:
         self._read_phi(ct0_file, ct1_file)
Ejemplo n.º 18
0
    def __init__(self, results, **kwargs):
        keyword_options.process(self, kwargs)

        if isinstance(results,dict):
            self.results = results
        elif isinstance(results, str):
            self.results = loaddict(results)
        else:
            raise Exception("Unrecognized results %s" % results)
Ejemplo n.º 19
0
    def __init__(self, output=None, **kwargs):
        """ **NB -- if ft1 is None, binfile MUST be set to a real file
            **NB -- if ft2 is None, either ltcube must be set to a real file or $FERMI/ft2.fits must exist
        """
        keyword_options.process(self, kwargs)
        self._set_bins()
        self.dss = None  # initialize
        self.gti = None

        def init_data():
            self.ft1files = self._parse_filename(self.ft1)
            if self.ft1files is not None:
                # Register FT1 DSS keywords
                self._get_ft1_dss()
                self._make_cuts()
                # Get GTI from FT1 if not already set
                if self.gti is None:
                    self.gti = self._get_GTI()
                if not self._check_binfile():
                    if self.nocreate:
                        raise DataManException('need to create %s' %
                                               self.binfile)
                    self._make_binfile()
            elif not self._check_binfile():
                raise ValueError(
                    'No FT1 files or valid binned data found. (Looking for %s)'
                    % self.binfile)

        init_data()

        def init_exposure():
            self.ft2files = self._parse_filename(self.ft2)
            if self.exposure_cube is not None:
                print 'using exposure cube files: ignore FT2'
                full = [
                    os.path.join(os.path.expandvars('$FERMI/data'), f)
                    for f in self.exposure_cube
                ]
                assert np.all(
                    map(os.path.exists,
                        full)), 'Exposure cube file(s) #s not found' % full
                self.exposure_cube = full  #replace with full path
                return
            if self.ft2files is not None:
                if not self._check_ltcube():
                    if self.nocreate:
                        raise DataManException('need to create %s' %
                                               self.ltcube)
                    self._make_ltcube()
            elif not self._check_ltcube():
                raise ValueError('No FT2 files or valid livetime found.')

        init_exposure()
        # save version to allow custom processing for backwards compat.
        self.version = dataman_version
        if output is not None: self.dump(output)
Ejemplo n.º 20
0
    def __init__(self, config_dir, roi_spec=None, xml_file=None, **kwargs):
        """Start pointlike v2 (like2) in the specified ROI
        
        parameters
        ----------
        config_dir : string
            file path to a folder containing a file config.txt
            see configuration.Configuration
        roi_spec : [None |integer | (ra,dec) tuple ]
            If None, require that the input_model dict has a key 'xml_file'
            if an integer, it must be <1728, the ROI number
            if a string, assume a source name and load the ROI containing it
            
        """
        keyword_options.process(self, kwargs)
        self.config = config = configuration.Configuration(
            config_dir, quiet=self.quiet, postpone=self.postpone)
        ecat = extended.ExtendedCatalog(config.extended)

        if isinstance(roi_spec, str):
            # try:
            #     roi_sources =from_xml.ROImodelFromXML(config, roi_spec)
            #     roi_index = roi_sources.index
            # except:
            #     print 'No ROI specification (an index) or presence of an xml file'
            #     raise
            # Change to just expecting the name of a source
            sourcelist = glob.glob('sources_*.csv')[0]
            df = pd.read_csv(sourcelist,
                             index_col=3 if roi_spec[0] == 'J' else 0)
            if roi_spec not in df.index:
                print 'Source name "{}" not found '.format(roi_spec)
                raise Exception
            roi_index = int(df.loc[roi_spec]['roiname'][-4:])
            print 'Loading ROI #{}, containing source "{}"'.format(
                roi_index, roi_spec)
        elif isinstance(roi_spec, int):
            roi_index = roi_spec
        elif type(roi_spec) == tuple and len(roi_spec) == 2:
            roi_index = Band(12).index(SkyDir(*roi_spec))

        else:
            raise Exception('Did not recoginze roi_spec: %s' % (roi_spec))

        roi_sources = from_healpix.ROImodelFromHealpix(
            config,
            roi_index,
            ecat=ecat,
        )
        config.roi_spec = configuration.ROIspec(healpix_index=roi_index)

        self.name = config.roi_spec.name if config.roi_spec is not None else roi_spec

        roi_bands = bands.BandSet(config, roi_index)
        roi_bands.load_data()
        super(ROI, self).__init__(roi_bands, roi_sources)
Ejemplo n.º 21
0
    def __init__(self, code, results, hypothesis, **kwargs):
        keyword_options.process(self, kwargs)

        self.code = code
        self.results = results
        self.hypothesis = hypothesis

        assert self.code in ['gtlike','pointlike']

        self._load_results()
Ejemplo n.º 22
0
 def __init__(self, center, **kwargs):
     """ center -- a SkyDir giving the center of the grid on which to convolve bg
         kwargs are passed to Grid.
     """
     keyword_options.process(self, kwargs)
     defaults=dict(bounds_error=False)
     defaults.update(kwargs)
     # note do not use code in superclass needing psf, diffuse function
     super(ConvolvableGrid, self).__init__(center, None, None, **defaults)
     self.center = center
Ejemplo n.º 23
0
    def __init__(self, code, results, hypothesis, **kwargs):
        keyword_options.process(self, kwargs)

        self.code = code
        self.results = results
        self.hypothesis = hypothesis

        assert self.code in ['gtlike', 'pointlike']

        self._load_results()
Ejemplo n.º 24
0
    def __init__(self, roi, which, **kwargs):
        """ Required arguments:
                roi - ROIAnalysis boject
                which - source for which to compute upper limit.
        """
        keyword_options.process(self, kwargs)
        self.roi = roi
        self.which = which

        self._compute()
Ejemplo n.º 25
0
    def __init__(self, roi, name, **kwargs):
        keyword_options.process(self, kwargs)
        self.roi = roi
        self.name = name
        
        bf = BandFlux(self.roi, which=self.name, merge=self.merge, scale_factor=1)
        results = PointlikeSED.pointlike_sed_to_dict(bf, flux_units=self.flux_units, energy_units=self.energy_units)

        results['spectrum'] = name_to_spectral_dict(roi, name, errors=True, covariance_matrix=True)

        super(PointlikeSED,self).__init__(results, **keyword_options.defaults_to_kwargs(self, SED))
Ejemplo n.º 26
0
    def __init__(self, roi, name, param_name, param_min, param_max, **kwargs):

        self.roi = roi
        self.name = name
        self.param_name = param_name
        self.param_min = param_min
        self.param_max = param_max

        keyword_options.process(self, kwargs)

        self._calculate()
Ejemplo n.º 27
0
    def __init__(self, roi, name, param_name, param_min, param_max, **kwargs):

        self.roi = roi
        self.name = name
        self.param_name = param_name
        self.param_min = param_min
        self.param_max = param_max

        keyword_options.process(self, kwargs)

        self._calculate()
Ejemplo n.º 28
0
    def __init__(self, input_model, emin, emax, **kwargs):
        """ Create an approximate power law spectrum. """

        raise Exception("This code doesn't work yet. I think you need the exposure to do the fit correctly.")
        self.input_model = input_model
        self.emin = emin
        self.emax = emax

        keyword_options.process(self, kwargs)

        self._calculate()
Ejemplo n.º 29
0
    def __init__(self, roi, name, **kwargs):
        keyword_options.process(self, kwargs)

        self.roi = roi 
        self.name = name

        if self.emin is None and self.emax is None: 
            self.emin, self.emax = get_full_energy_range(roi)


        self._compute()
Ejemplo n.º 30
0
    def __init__(self, like, name, **kwargs):
        keyword_options.process(self, kwargs)

        self.like = like
        self.name = name

        if self.emin is None and self.emax is None: 
            self.emin, self.emax = get_full_energy_range(like)
            self.e = np.sqrt(self.emin*self.emax)

        self._compute()
Ejemplo n.º 31
0
    def __init__(self, roi, which, model0, model1, **kwargs):
        keyword_options.process(self, kwargs)

        state = PointlikeState(roi)

        self.roi=roi
        self.which=which
        self.model0=model0.copy()
        self.model1=model1.copy()

        self.compute()
        state.restore()
Ejemplo n.º 32
0
 def __init__(self, ft2files, ft1files, **kwargs):
     keyword_options.process(self, kwargs)
     self.prev_vals = self.prev_ra = self.prev_dec = None  # initialize caching
     self.fields = [
         'START', 'STOP', 'LIVETIME', 'RA_SCZ', 'DEC_SCZ', 'RA_ZENITH',
         'DEC_ZENITH', 'RA_SCX', 'DEC_SCX'
     ]
     self._setup_gti(ft1files)
     self._setup_ft2(ft2files)
     self._update_gti()
     self._process_ft2()
     self._finish()
Ejemplo n.º 33
0
    def __init__(self, roi, which, model0, model1, **kwargs):
        keyword_options.process(self, kwargs)

        state = PointlikeState(roi)

        self.roi = roi
        self.which = which
        self.model0 = model0.copy()
        self.model1 = model1.copy()

        self.compute()
        state.restore()
Ejemplo n.º 34
0
    def __init__(self, roi, **kwargs):

        self.roi = roi

        keyword_options.process(self, kwargs)

        self.pf = self.create_pyfits()

        self.data = self.pf[0].data
        self.header = self.pf[0].header

        if self.fitsfile is not None:
            self.pf.writeto(self.fitsfile, clobber=True)
Ejemplo n.º 35
0
    def __init__(self, roi, **kwargs):
        keyword_options.process(self, kwargs)

        roi.setup_energy_bands()
        self.emin = np.asarray([eb.emin for eb in roi.energy_bands])
        self.emax = np.asarray([eb.emax for eb in roi.energy_bands])

        which = self.which
        old_roi_p   = roi.get_parameters().copy()

        # extended hypothesis

        source=roi.get_source(which='IC443')
        sm = source.spatial_model
        manager,index=roi.mapper(which)
        roi.fit(estimate_errors=False)

        self.ll_ext,self.ll_pt = [],[]


        for eb in roi.energy_bands:
            self.ll_ext.append(
                -sum(band.logLikelihood() for band in eb.bands)
                )


        sm.shrink()
        manager.bgmodels[index].initialize_counts(roi.bands)
        roi.__update_state__()

        roi.fit(estimate_errors=False)

        # point hypothesis

        manager,index=roi.mapper('IC443')
        for eb in roi.energy_bands:
            self.ll_pt.append(
                -sum(band.logLikelihood() for band in eb.bands)
                )

        sm.unshrink()
        manager.bgmodels[index].initialize_counts(roi.bands)

        roi.set_parameters(old_roi_p)
        roi.__update_state__()

        self.ll_ext = np.asarray(self.ll_ext)
        self.ll_pt = np.asarray(self.ll_pt)

        self.ts_ext=2*(self.ll_ext-self.ll_pt)
        self.ts_ext[self.ts_ext<0]=0
Ejemplo n.º 36
0
    def __init__(self,roi,which=0,**kwargs):
        keyword_options.process(self, kwargs)

        self.roi,self.which = roi, which
        self.quiet = roi.quiet

        # we need a reference both to the center of the ROI, for use in calculating overlap,
        # and to the original position of the point source, which may or may not be the same
        self.rd = roi.roi_dir

        self.set_source_info()
        if self.bandfits: self.do_bandfits()
        
        self.tsref=0
        self.tsref = self.TSmap(self.sd) # source position not necessarily ROI center
Ejemplo n.º 37
0
    def __init__(self, roi, name, param_name, **kwargs):

        self.roi = roi
        self.name = name
        self.param_name = param_name

        keyword_options.process(self, kwargs)

        if self.param_vals is not None:
            assert self.param_min is None and self.param_max is None and self.nparams is None
        else:
            assert self.param_min is not None and self.param_max is not None and self.nparams is not None
            self.param_vals = np.linspace(self.param_min, self.param_max, self.nparams)

        self._calculate()
Ejemplo n.º 38
0
    def __init__(self, roi, name, param_name, **kwargs):

        self.roi = roi
        self.name = name
        self.param_name = param_name

        keyword_options.process(self, kwargs)

        if self.param_vals is not None:
            assert self.param_min is None and self.param_max is None and self.nparams is None
        else:
            assert self.param_min is not None and self.param_max is not None and self.nparams is not None
            self.param_vals = np.linspace(self.param_min, self.param_max,
                                          self.nparams)

        self._calculate()
Ejemplo n.º 39
0
    def __init__(self,
                 spectral_analysis,
                 diffuse_source,
                 roi_dir,
                 name=None,
                 **kwargs):

        self.sa = spectral_analysis
        self.roi_dir = roi_dir
        self.diffuse_source = diffuse_source
        self.dmodel = diffuse_source.dmodel
        self.smodel = diffuse_source.smodel
        self.name = diffuse_source.name

        keyword_options.process(self, kwargs)
        self.setup()
Ejemplo n.º 40
0
    def __init__(self, irf, **kwargs):

        keyword_options.process(self, kwargs)
        self.irf = irf

        if self.CALDB is None:
            try:
                self.CALDB = os.environ['CALDB']
            except:
                try:
                    from facilities import py_facilities
                    os_environ = py_facilities.commonUtilities_getEnvironment
                    self.CALDB = os_environ('CALDB')
                except:
                    raise Exception(
                        'Environment variable CALDB must be set, or findable by py_facilities package'
                    )

        if self.custom_irf_dir is not None:
            if not os.path.exists(self.custom_irf_dir):
                raise Exception("custom_irf_dir %s does not exist" %
                                self.custom_irf_dir)
        else:
            self.custom_irf_dir = os.environ.get('CUSTOM_IRF_DIR', None)
        if self.custom_irf_dir is not None and self.custom_irf_dir != '':
            if not self.quiet:
                print 'CALDBManager: using custom irf: "%s"' % self.custom_irf_dir

        self.bcf = join(self.CALDB, 'bcf')

        if not os.path.exists(self.bcf):
            self.CALDB = join(self.CALDB, 'data', 'glast', 'lat')
            self.bcf = join(self.CALDB, 'bcf')

            if not os.path.exists(self.bcf):
                raise Exception('Invalid CALDB directory %s.' % self.bcf)

        self.CALDB_index = os.path.join(self.CALDB, 'caldb.indx')

        if not os.path.exists(self.CALDB_index):
            raise Exception("caldb.indx file %s does not exist." %
                            self.CALDB_index)

        self.load_caldb_indx()
        self.construct_psf()
        if not self.quiet: print 'PSF: %s' % self.psf_files
        self.construct_aeff()
Ejemplo n.º 41
0
    def __init__(self, roi, name, *args, **kwargs):
        self.roi = roi
        keyword_options.process(self, kwargs)

        self.pointlike_fit_kwargs = dict(use_gradient=False)

        self.name = name

        self._setup_savedir()

        self._setup_time_bins()

        saved_state = PointlikeState(roi)

        self._test_variability()

        saved_state.restore()
Ejemplo n.º 42
0
    def __init__(self, roi, name, *args, **kwargs):
        self.roi = roi
        keyword_options.process(self, kwargs)

        self.pointlike_fit_kwargs = dict(use_gradient=False)

        self.name = name

        self._setup_savedir()

        self._setup_time_bins()

        saved_state = PointlikeState(roi)

        self._test_variability()

        saved_state.restore()
Ejemplo n.º 43
0
    def __init__(self,**kwargs):
        keyword_options.process(self, kwargs)

        if self.point_sources == [] and self.diffuse_sources == []:
            self.point_sources, self.diffuse_sources = self.get_default_sources()

        ltcube = join(self.tempdir,'ltcube.fits')
        ds = DataSpecification(
            ft1files = join(self.tempdir,'ft1.fits'),
            ft2files = join(self.tempdir,'ft2.fits'),
            ltcube = ltcube, 
            binfile = join(self.tempdir,'binfile.fits')
        )

        sa = SpectralAnalysisMC(ds,
                                seed=self.seed,
                                emin=self.emin,
                                emax=self.emax,
                                binsperdec=self.binsperdec,
                                event_class=self.event_class,
                                conv_type=self.conv_type,
                                roi_dir=self.roi_dir,
                                minROI=self.maxROI,
                                maxROI=self.maxROI,
                                irf=self.irf,
                                use_weighted_livetime=True,
                                savedir=self.tempdir,
                                tstart=0,
                                tstop=self.simtime,
                                ltfrac=0.9,
                               )

        roi = sa.roi(roi_dir=self.roi_dir,
                     point_sources = self.point_sources,
                     diffuse_sources = self.diffuse_sources)

        self.roi = roi

        fix_pointlike_ltcube(ltcube)
Ejemplo n.º 44
0
    def __init__(self,roi,bin_edges,nrows=1,grid_kwargs=dict(),**kwargs):

        default_grid_kwargs = dict(axes_pad=0.1, 
                                   cbar_location="top",
                                   cbar_mode="each",
                                   cbar_size="7%",
                                   cbar_pad="2%")

        self.grid_kwargs = default_grid_kwargs.copy()
        self.grid_kwargs.update(grid_kwargs)

        self.roi = roi
        keyword_options.process(self, kwargs)
        self.nrows=nrows

        self.bin_edges = bin_edges
        self.nplots = len(self.bin_edges)-1
        self.ncols= int(math.ceil(float(self.nplots)/self.nrows))

        for e in bin_edges:
            if not np.any(np.abs(e-roi.bin_edges) < 0.5):
                raise Exception("Energy %.1f inconsistent with ROI energy binning." % e)

        self.lower_energies = bin_edges[:-1]
        self.upper_energies = bin_edges[1:]

        state = PointlikeState(roi)
 
        # step 1, test consistentcy of each energy with binning in pointlike

        kwargs['title'] = '' # dont title the subplots
        self.maps = []
        for i,(lower,upper) in enumerate(zip(self.lower_energies, self.upper_energies)):
            roi.change_binning(fit_emin=lower,fit_emax=upper)
            self.maps.append(self.object(roi,**kwargs))

        state.restore()
Ejemplo n.º 45
0
 def __init__(self, roi, diffuse_kwargs, **kwargs):
     """ diffuse_kwargs passed into get_gulli_diffuse
     """
     keyword_options.process(self, kwargs)
     self.roi = roi
     self.diffuse_kwargs = diffuse_kwargs
Ejemplo n.º 46
0
    def __init__(self, roi, name, *args, **kwargs):
        keyword_options.process(self, kwargs)

        self.roi = roi
        self.name = name
        self._calculate()
Ejemplo n.º 47
0
    def __init__(self, like, name, *args, **kwargs):
        keyword_options.process(self, kwargs)

        self.like = like
        self.name = name
        self._calculate()
Ejemplo n.º 48
0
    def __init__(self, roi, **kwargs):
        """ Build a gtlike pyLikelihood object
            which is consistent with a pointlike roi. """
        keyword_options.process(self, kwargs)
        self.roi = roi

        if not roi.quiet: print 'Running a gtlike followup'

        self.old_dir=os.getcwd()
        if self.savedir is not None:
            self.savedata = True
            if not os.path.exists(self.savedir):
                os.makedirs(self.savedir)
        else:
            self.savedata = False
            self.savedir=mkdtemp(prefix=self.savedir_prefix)

        # put pfiles into savedir
        os.environ['PFILES']=self.savedir+';'+os.environ['PFILES'].split(';')[-1]

        if not roi.quiet: print 'Saving files to ',self.savedir

        cut_ft1=join(self.savedir,"ft1_cut.fits")
        input_srcmdl_file=join(self.savedir,'srcmdl.xml')
        expmap = join(self.savedir,"expmap.fits")

        ltcube=roi.sa.pixeldata.ltcube 
        ft2=Gtlike.get_ft2(roi)
        irfs=Gtlike.get_gtlike_irfs(roi)

        ct = roi.sa.pixeldata.conv_type
        radius = roi.sa.maxROI
        ra = roi.roi_dir.ra()
        dec = roi.roi_dir.dec()
        emin,emax=roi.bin_edges[0],roi.bin_edges[-1]

        Gtlike.save_xml(roi, input_srcmdl_file)

        evfile=Gtlike.make_evfile(roi,self.savedir)

        if not os.path.exists(cut_ft1):
            if not roi.quiet: print 'Running gtselect'
            gtselect=GtApp('gtselect','dataSubselector')
            gtselect.run(infile=evfile,
                         outfile=cut_ft1,
                         ra=ra, dec=dec, rad=radius,
                         tmin=0, tmax=0,
                         emin=emin, emax=emax,
                         zmax=180, convtype=ct,
                         chatter=self.chatter)
        else:
            if not roi.quiet: print '... Skiping gtselect'

        if not os.path.exists(expmap):
            # Run gtexpmap following suggestions from tutorial
            # pad 10deg on radius to account for nearby sources,
            # nlat has half degree pixels
            if not roi.quiet: print 'Running gtexpmap'
            gtexpmap=GtApp('gtexpmap')
            gtexpmap.run(evfile=cut_ft1,
                         scfile=ft2,
                         expcube=ltcube,
                         outfile=expmap,
                         irfs=irfs,
                         srcrad=radius+10,
                         nlong=int(np.ceil(0.5*(radius+10)*2)),
                         nlat=int(np.ceil(0.5*(radius+10)*2)),
                         nenergies=int(np.ceil(np.log10(emax)-np.log10(emin)))*4,
                         chatter=self.chatter,
                )
        else:
            if not roi.quiet: print '... Skiping gtexpmap'

        gtdiffrsp = GtApp('gtdiffrsp')
        if not roi.quiet: print 'Running gtdiffrsp'
        gtdiffrsp.run(evfile=cut_ft1,
                      scfile=ft2,
                      srcmdl=input_srcmdl_file,
                      irfs=irfs,
                      chatter=self.chatter,
                     )

        if not roi.quiet: print 'Creating Unbinned LIKE'
        obs = UnbinnedObs(eventFile=cut_ft1, scFile=ft2, expMap=expmap, expCube=ltcube, irfs=irfs)

        self.like = UnbinnedAnalysis(observation=obs,srcModel=input_srcmdl_file,optimizer=self.optimizer)

        if not roi.quiet: print 'Unbinned LIKE Created!'
Ejemplo n.º 49
0
    def __init__(self, roi, **kwargs):
        """ Build a gtlike pyLikelihood object
            which is consistent with a pointlike roi. """
        keyword_options.process(self, kwargs)
        self.roi = roi

        if not roi.quiet: print 'Running a gtlike followup'

        self.old_dir=os.getcwd()
        if self.savedir is not None:
            self.savedata = True
            if not os.path.exists(self.savedir):
                os.makedirs(self.savedir)
        else:
            self.savedata = False
            self.savedir=mkdtemp(prefix=self.savedir_prefix)

        # put pfiles into savedir
        os.environ['PFILES']=self.savedir+';'+os.environ['PFILES'].split(';')[-1]

        if not roi.quiet: print 'Saving files to ',self.savedir

        if self.emin==None and self.emax==None and self.enumbins==None:
            self.emin,self.emax=roi.bin_edges[0],roi.bin_edges[-1]
            self.enumbins=len(roi.bin_edges)-1
        elif self.emin is not None and \
                self.emax is not None and \
                self.enumbins is not None:
            # all set
            pass
        else:
            raise Exception("emin, emax, and enumbins must all be set.")

        # Note that this formulation makes the gtlike slightly smaller than
        # the pointlike ROI (so the gtlike ROI is inside the pointlike ROI)
        roi_radius=np.degrees(max(_.radius_in_rad for _ in roi.bands))
        if self.bigger_roi:
            npix=int(math.ceil(2.0*roi_radius/self.binsz))
        else:
            npix=int(math.ceil(np.sqrt(2.0)*roi_radius/self.binsz))

        ct = roi.sa.pixeldata.conv_type

        cmap_file=join(self.savedir,'ccube.fits')
        srcmap_file=join(self.savedir,'srcmap.fits')
        bexpmap_file=join(self.savedir,'bexpmap.fits')
        input_srcmdl_file=join(self.savedir,'srcmdl.xml')
        cut_ft1=join(self.savedir,"ft1_cut.fits")


        ft2=Gtlike.get_ft2(roi)
        ltcube=roi.sa.pixeldata.ltcube 

        if self.fix_pointlike_ltcube:
            print 'Fixing pointlike ltcube %s' % ltcube
            livetime.fix_pointlike_ltcube(ltcube)
        
        irfs=Gtlike.get_gtlike_irfs(roi)

        if self.galactic:
            x,y,coordsys_str=roi.roi_dir.l(),roi.roi_dir.b(),'GAL'
        else:
            x,y,coordsys_str=roi.roi_dir.ra(),roi.roi_dir.dec(),'CEL'

        Gtlike.save_xml(roi, input_srcmdl_file, extended_dir_name=self.extended_dir_name)

        evfile=Gtlike.make_evfile(roi,self.savedir)

        if not os.path.exists(cut_ft1):
            if not roi.quiet: print 'Running gtselect'
            gtselect=GtApp('gtselect','dataSubselector')
            gtselect.run(infile=evfile,
                         outfile=cut_ft1,
                         ra=0, dec=0, rad=180,
                         tmin=0, tmax=0,
                         emin=self.emin, emax=self.emax,
                         zmax=180, convtype=ct,
                         chatter=self.chatter)
        else:
            if not roi.quiet: print '... Skiping gtselect'

        if not os.path.exists(cmap_file):
            if not roi.quiet: print 'Running gtbin (ccube)'
            gtbin=GtApp('gtbin','evtbin')
            gtbin.run(algorithm='ccube',
                      nxpix=npix, nypix=npix, binsz=self.binsz,
                      evfile=cut_ft1,
                      outfile=cmap_file,
                      scfile=ft2,
                      xref=x, yref=y, axisrot=0, proj=self.proj,
                      ebinalg='LOG', emin=self.emin, emax=self.emax, enumbins=self.enumbins,
                      coordsys=coordsys_str,
                      chatter=self.chatter)
        else:
            if not roi.quiet: print '... Skiping gtbin (ccube)'

        if not os.path.exists(bexpmap_file):
            # Use the default binning all sky, 1deg/pixel
            if not roi.quiet: print 'Running gtexpcube'
            gtexpcube=GtApp('gtexpcube2','Likelihood')
            gtexpcube.run(infile=ltcube,
                          cmap='none',
                          ebinalg='LOG', emin=self.emin, emax=self.emax, enumbins=self.enumbins,
                          outfile=bexpmap_file, proj='CAR',
                          nxpix=360, nypix=180, binsz=1,
                          irfs=irfs,
                          coordsys=coordsys_str,
                          chatter=self.chatter)
        else:
            if not roi.quiet: print '... Skiping gtexpcube'

        if not os.path.exists(srcmap_file):
            if not roi.quiet: print 'Running gtsrcmaps'
            gtsrcmaps=GtApp('gtsrcmaps','Likelihood')
            gtsrcmaps.run(scfile=ft2,
                          expcube=ltcube,
                          cmap=cmap_file,
                          srcmdl=input_srcmdl_file,
                          bexpmap=bexpmap_file,
                          outfile=srcmap_file,
                          irfs=irfs,
                          rfactor=self.rfactor,
                          resample=self.resample,
                          minbinsz=self.minbinsz,
                          chatter=self.chatter)
        else:
            if not roi.quiet: print '... Skiping gtsrcmaps'

        if not roi.quiet: print 'Creating Binned LIKE'
        obs=BinnedObs(srcMaps=srcmap_file,expCube=ltcube,binnedExpMap=bexpmap_file,irfs=irfs)

        self.like = BinnedAnalysis(binnedData=obs,srcModel=input_srcmdl_file,optimizer=self.optimizer)

        if self.enable_edisp:
            if not roi.quiet: print 'Enabeling energy dispersion'
            self.like.logLike.set_edisp_flag(True)


        if not roi.quiet: print 'Binned LIKE Created!'