Esempio n. 1
0
 def __call__(self, v, skydir=None):
     sd = skydir or SkyDir(Hep3Vector(v[0], v[1], v[2]))
     rval = 0
     for band in self.bands:
         PythonUtilities.arclength(band.rvals, band.wsdl, sd)
         mask = band.rvals < band.max_rad
         rval += (band.psf(band.rvals[mask], density=True) *
                  band.pix_counts[mask]).sum()
     return rval
Esempio n. 2
0
 def __call__(self, v, skydir=None):
     """ copied from roi_tsmap.HealpixKDEMap """
     sd = skydir or SkyDir(Hep3Vector(v[0], v[1], v[2]))
     rval = 0
     for i, band in enumerate(self.bands):
         if not band.has_pixels: continue
         rvals = np.empty(len(band.wsdl), dtype=float)
         PythonUtilities.arclength(rvals, band.wsdl, sd)
         mask = rvals < self.r95[i]
         rval += (band.psf(rvals[mask]) * band.pix_counts[mask]).sum()
     return rval
Esempio n. 3
0
 def call2(self, v, skydir=None):
     sd = skydir or SkyDir(Hep3Vector(v[0], v[1], v[2]))
     rval = 0
     for band in self.bands:
         if band.photons == 0: continue
         band.rvals = np.empty(len(band.wsdl), dtype=float)
         PythonUtilities.arclength(band.rvals, band.wsdl, sd)
         mask = band.rvals < band.r99
         rval += (band.psf(band.rvals[mask], density=True) *
                  band.pix_counts[mask]).sum()
     return rval
Esempio n. 4
0
    def extended_source_counts(self, extended_model):
        if type(extended_model) not in [
                ROIExtendedModel, ROIExtendedModelAnalytic
        ]:
            raise Exception("Unknown extended model.")

        roi = self.roi
        sm = extended_model.extended_source.model

        extended_counts = np.zeros_like(self.bin_centers_rad)

        for band, smaller_band in zip(self.selected_bands, self.smaller_bands):

            extended_model.set_state(smaller_band)

            if type(extended_model) == ROIExtendedModel:

                nside = RadialModel.get_nside(self.size, self.npix)

                temp_band = Band(nside)
                wsdl = WeightedSkyDirList(temp_band, self.center,
                                          np.radians(self.size), True)
                vals = extended_model._pix_value(wsdl)

                rvals = np.empty(len(wsdl), dtype=float)
                PythonUtilities.arclength(rvals, wsdl, self.center)

                # get average value in each ring by averaging values.
                fraction = np.histogram(rvals,weights=vals,bins=np.sqrt(self.bin_edges_rad))[0]/\
                           np.histogram(rvals,bins=np.sqrt(self.bin_edges_rad))[0]

                # multiply intensities by solid angle in ring
                fraction *= RadialModel.solid_angle_cone(np.radians(
                    self.size)) / self.npix

            elif type(extended_model) == ROIExtendedModelAnalytic:

                fraction = np.empty_like(self.bin_centers_rad)

                for i, (theta_min,
                        theta_max) in enumerate(self.theta_pairs_rad):

                    fraction[i]=extended_model._overlaps(self.center,band,theta_max) - \
                             extended_model._overlaps(self.center,band,theta_min)

            # total counts from source * fraction of PDF in ring = model predictions in each ring.
            extended_counts += band.expected(sm) * fraction

        return extended_counts
Esempio n. 5
0
 def num_overlap(self,
                 band,
                 roi_dir,
                 ps_dir,
                 radius_in_rad=None,
                 override_pdf=None):
     roi_rad = radius_in_rad or band.radius_in_rad
     if self.cache_hash != hash(band):
         # fragile due to radius dep.
         self.set_dir_cache(band, roi_dir, roi_rad)
     if override_pdf is None:
         band.psf.cpsf.wsdl_val(self.cache_diffs, ps_dir, self.cache_wsdl)
     else:
         difference = np.empty(len(self.cache_wsdl))
         PythonUtilities.arclength(difference, self.cache_wsdl, roi_dir)
         self.cache_diffs = override_pdf(difference)
     return self.cache_diffs.sum() * band.b.pixelArea()
Esempio n. 6
0
    def setup_from_roi(self, hr, factor):

        band1 = hr.band
        band2 = Band(int(round(band1.nside() * factor)))
        rd = band1.dir(hr.index)

        # get pixels within a radius 10% greater than the base Healpix diagonal dimension
        radius = (np.pi / (3 * band1.nside()**2))**0.5 * 2**0.5 * 1.1
        wsdl = WeightedSkyDirList(band2, rd, radius, True)

        # then cut down to just the pixels inside the base Healpixel
        inds = np.asarray([band1.index(x) for x in wsdl])
        mask = inds == hr.index
        dirs = [wsdl[i] for i in xrange(len(mask)) if mask[i]]
        inds = np.asarray([band2.index(x) for x in dirs]).astype(int)

        # sanity check
        if abs(float(mask.sum()) / factor**2 - 1) > 0.01:
            print 'Warning: number of pixels found does not agree with expectations!'

        # loop through the bands and image pixels to calculate the KDE
        from libpointlike import DoubleVector
        #dv = DoubleVector()
        rvs = [np.empty(len(band.wsdl), dtype=float) for band in hr.roi.bands]
        img = np.zeros(len(inds))
        weights = [
            np.asarray([x.weight() for x in b.wsdl]).astype(int)
            for b in hr.roi.bands
        ]
        for idir, mydir in enumerate(dirs):
            #print 'Processing pixel %d'%(idir)
            for iband, band in enumerate(hr.roi.bands):
                PythonUtilities.arclength(band.rvals, band.wsdl, mydir)
                img[idir] += (band.psf(rvs[iband], density=True) *
                              weights[iband]).sum()

        self.band1 = band1
        self.band2 = band2
        self.previous_index = -1

        sorting = np.argsort(inds)
        self.inds = inds[sorting]
        self.img = img[sorting]

        self.index = hr.index
Esempio n. 7
0
    def otf_source_counts(self, bg):

        roi = self.roi

        mo = bg.smodel

        background_counts = np.zeros_like(self.bin_centers_rad)

        for band, smaller_band in zip(self.selected_bands, self.smaller_bands):

            ns, bg_points, bg_vector = ROIDiffuseModel_OTF.sub_energy_binning(
                band, bg.nsimps)

            nside = RadialModel.get_nside(self.size, self.npix)

            temp_band = Band(nside)
            wsdl = WeightedSkyDirList(temp_band, self.center,
                                      np.radians(self.size), True)

            ap_evals = np.empty([len(self.bin_centers_rad), len(bg_points)])

            for ne, e in enumerate(bg_points):

                bg.set_state(e, band.ct, smaller_band)

                rvals = np.empty(len(wsdl), dtype=float)
                PythonUtilities.arclength(rvals, wsdl, self.center)
                vals = bg._pix_value(wsdl)

                # get average value in each ring by averaging values.
                ap_evals[:,ne] = np.histogram(rvals,weights=vals,bins=np.sqrt(self.bin_edges_rad))[0]/\
                                 np.histogram(rvals,bins=np.sqrt(self.bin_edges_rad))[0]

            # multiply intensities by solid angle in ring
            ap_evals *= RadialModel.solid_angle_cone(np.radians(
                self.size)) / self.npix

            ap_evals *= bg_vector
            mo_evals = mo(bg_points)
            background_counts += (ap_evals * mo_evals).sum(axis=1)

        return background_counts
Esempio n. 8
0
    def __call__(self, skydir):
        """ This funciton is analogous to the BandCALDBPsf.__call__ function
            except that it always returns the density (probability per unit
            area). Also, it is different in that it takes in a skydir or WSDL 
            instead of a radial distance. """
        if isinstance(skydir, BaseWeightedSkyDirList):
            difference = np.empty(len(skydir), dtype=float)
            PythonUtilities.arclength(
                difference, skydir, self.extended_source.spatial_model.center)
            return self.val(difference)
        elif type(skydir) == np.ndarray:
            return self.val(skydir)
        elif type(skydir) == list and len(skydir) == 3:
            skydir = SkyDir(Hep3Vector(skydir[0], skydir[1], skydir[2]))

        elif type(skydir) == SkyDir:
            return float(
                self.val(
                    skydir.difference(
                        self.extended_source.spatial_model.center)))
        else:
            raise Exception("Unknown input to AnalyticConvolution.__call__()")
Esempio n. 9
0
    def _cache(self, skydir):
        """Cache results for a particular SkyDir.  Then can change the model
           minimal overhead."""

        if (skydir.ra() == self.cache_ra) and (skydir.dec() == self.cache_dec):
            return

        for i, band in enumerate(self.roi.bands):

            en, exp, pa = band.e, band.exp.value, band.b.pixelArea()

            # make a first-order correction for exposure variation
            band.ts_er = exp(skydir, en) / exp(self.rd, en)

            # unnormalized PSF evaluated at each data pixel
            PythonUtilities.arclength(band.rvals, band.wsdl, skydir)
            band.ts_pix_counts = pa * band.psf(band.rvals, density=True)

            # calculate overlap
            band.ts_overlap = self.ro(band, self.rd, skydir)

        self.cache_ra = skydir.ra()
        self.cache_dec = skydir.dec()
Esempio n. 10
0
    def __call__(self,
                 skydir,
                 repeat_diffuse=False,
                 bright_source_mask=None,
                 no_cache=False):
        """Return the TS for the position on the sky given by the argument.
        
           bright_sources = a mask to select sources to include with the
           diffuse when generating the TS map.
           
           repeat_diffuse [False] -- if set to True, will assume that the PSF eval.
                                     has already been done and the function is
                                     being called again with a change to the diffuse.

           no_cache       [False] -- will never pre-compute the PSF
        """

        bands = self.roi.bands
        bsm = bright_source_mask
        offset = skydir.difference(self.roi.roi_dir)

        if not repeat_diffuse or no_cache:

            for i, band in enumerate(bands):

                en, exp, pa = band.e, band.exp.value, band.b.pixelArea()

                # make a first-order correction for exposure variation
                band.ts_er = exp(skydir, en) / exp(self.rd, en)

                # separation of data from position
                PythonUtilities.arclength(band.rvals, band.wsdl, skydir)

                # screen out pixels too far
                max_rad = min(band.radius_in_rad - offset, band.max_rad)
                band.ts_mask = band.rvals <= max_rad

                # evaluate PSF at pixels
                band.ts_pix_counts = pa * band.psf(band.rvals[band.ts_mask],
                                                   density=True)

                # calculate overlap
                #band.ts_overlap = self.ro(band,self.rd,skydir)
                band.ts_overlap = band.psf.integral(max_rad)

        if not repeat_diffuse:

            for i, band in enumerate(bands):

                # pre-calculate the "pixel" part
                if band.has_pixels:
                    band.ts_pix_term = (band.ps_all_pix_counts[band.ts_mask] + band.bg_all_pix_counts[band.ts_mask])/ \
                                       (band.ts_exp*band.ts_pix_counts)

        else:

            # include bright point sources and diffuse in the background model
            if bsm is not None:
                for i, band in enumerate(bands):
                    if band.has_pixels:
                        bps_term = (band.ps_counts[bsm] * band.overlaps[bsm] *
                                    band.ps_pix_counts[:, bsm]).sum(axis=1)
                        band.ts_pix_term = (bps_term + band.bg_all_pix_counts)[
                            band.ts_mask] / (band.ts_exp * band.ts_pix_counts)

            # include only the diffuse in the background model
            else:
                for i, band in enumerate(bands):
                    if band.has_pixels:
                        band.ts_pix_term = band.bg_all_pix_counts[
                            band.ts_mask] / (band.ts_exp * band.ts_pix_counts)

        # NB -- can save some computation by calculating f0/f1/f2 simultaneously, but it is
        # currently a minute fraction of the total time (above code dominates)
        J = np.log(10)

        def f0(n0, *args):
            n0 = 10**n0
            accum = 0
            for band in bands:
                pix_term = (band.pix_counts[band.ts_mask] *
                            np.log(1 + n0 / band.ts_pix_term)
                            ).sum() if band.has_pixels else 0
                ap_term = -n0 * band.ts_overlap * band.ts_exp * band.phase_factor
                accum += pix_term + ap_term
            return accum

        def f1(n0, *args):
            n0 = 10**n0
            accum = 0
            for band in bands:
                pix_term = -(band.pix_counts[band.ts_mask] *
                             (1 + band.ts_pix_term / n0)**-1
                             ).sum() if band.has_pixels else 0
                ap_term = n0 * band.ts_exp * band.ts_overlap * band.phase_factor
                accum += pix_term + ap_term
            return J * accum

        def f2(n0, *args):
            n0 = 10**n0
            accum = 0
            for band in bands:
                if band.has_pixels:
                    quot = band.ts_pix_term / n0
                    pix_term = -(band.pix_counts[band.ts_mask] * quot /
                                 (1 + quot)**2).sum()
                else:
                    pix_term = 0
                ap_term = n0 * band.ts_exp * band.ts_overlap * band.phase_factor
                accum += pix_term + ap_term
            return J * J * accum

        def TS(n0, *args):
            return 2 * f0(n0, *args)

        # assess along a grid of seed values to make sure we have a good starting position
        vals = [f0(x) for x in self.seeds]
        amax = np.argmax(vals)
        if amax == 0: return 0
        seed = self.seeds[
            amax] + 0.5  # for some reason, helps to start *above* the critical point

        # re-implementation of scipy version that uses half the calls!
        def my_newton(func, x0, fprime, tol=1e-2):
            p0 = x0
            for i in xrange(30):
                fval = func(x0)
                if fval == 0: return x0, True
                gval = fprime(x0)
                delt = fval / gval
                x0 -= delt
                if (abs(delt) < tol):
                    return x0, True
            return x0, False

        n0, conv = my_newton(f1, seed, fprime=f2)
        if conv: return TS(n0)
        else:
            print 'Warning! did not converge to a value or a value consistent with 0 flux.'
            print 'Trying again...'
            n0, conv = my_newton(f1, n0, fprime=f2)
            if conv:
                print 'Converged on 2nd Try'
                return TS(n0)
            print 'DID NOT CONVERGE AFTER TWO ATTEMPTS'
            return -1