예제 #1
0
def xpmdp(**kwargs):
    """Calculate the MDP.
    """
    logger.info('Loading the instrument response functions...')
    aeff = load_arf(kwargs['irfname'])
    modf = load_mrf(kwargs['irfname'])
    module_name = os.path.basename(kwargs['configfile']).replace('.py', '')
    ROI_MODEL = imp.load_source(module_name, kwargs['configfile']).ROI_MODEL
    logger.info(ROI_MODEL)

    # This is copied from xpobbsim and should probably be factored out.
    # Actually, this should be a method of the ROI class. TBD
    if kwargs['tstart'] < ROI_MODEL.min_validity_time():
        kwargs['tstart'] = ROI_MODEL.min_validity_time()
        logger.info('Simulation start time set to %s...' % kwargs['tstart'])
    tstop = kwargs['tstart'] + kwargs['duration']
    if tstop > ROI_MODEL.max_validity_time():
        tstop = ROI_MODEL.max_validity_time()
        logger.info('Simulation stop time set to %s...' % tstop)
    kwargs['tstop'] = tstop
    observation_time = kwargs['tstop'] - kwargs['tstart']

    # This is copied from roi.py and should probably be factored out.
    # Again, the ROI class should be able to sum the count spectra of all the
    # component and expose the result.
    sources = ROI_MODEL.values()
    if len(sources) > 1:
        abort('Multiple sources not implemented, yet.')
    source = sources[0]
    if isinstance(source, xPeriodicPointSource):
        psamples = numpy.linspace(kwargs['phasemin'], kwargs['phasemax'], 100)
        logger.info('Sampling phases: %s' % psamples)
        count_spectrum = xCountSpectrum(source.energy_spectrum,
                                        aeff,
                                        psamples,
                                        scale=observation_time)
        time_integrated_spectrum = count_spectrum.build_time_integral()
    else:
        tsamples = source.sampling_time(kwargs['tstart'], kwargs['tstop'])
        logger.info('Sampling times: %s' % tsamples)
        count_spectrum = xCountSpectrum(source.energy_spectrum, aeff, tsamples)
        time_integrated_spectrum = count_spectrum.build_time_integral()

    # Thuis should be a callable method in the binning module.
    ebinning = _make_binning(kwargs['ebinalg'], kwargs['emin'], kwargs['emax'],
                             kwargs['ebins'], kwargs['ebinning'])

    # And this might be implemented in the irf.mrf module.
    _x = time_integrated_spectrum.x
    _y = time_integrated_spectrum.y * modf(_x)
    mu_spectrum = xInterpolatedUnivariateSplineLinear(_x, _y)

    for _emin, _emax in zip(ebinning[:-1], ebinning[1:]) +\
        [(ebinning[0], ebinning[-1])]:
        num_counts = count_spectrum.num_expected_counts(emin=_emin, emax=_emax)
        mu_average = mu_spectrum.integral(_emin, _emax) / num_counts
        mdp = 4.29 / mu_average / numpy.sqrt(num_counts)
        logger.info('%.2f--%.2f keV: %d counts in %d s, mu %.3f, MDP %.2f%%' %\
                    (_emin, _emax, num_counts, observation_time, mu_average,
                     100*mdp))
예제 #2
0
파일: xpmdp.py 프로젝트: pabell/ximpol
def xpmdp(**kwargs):
    """Calculate the MDP.
    """
    logger.info('Loading the instrument response functions...')
    aeff = load_arf(kwargs['irfname'])
    modf = load_mrf(kwargs['irfname'])
    module_name = os.path.basename(kwargs['configfile']).replace('.py', '')
    ROI_MODEL = imp.load_source(module_name, kwargs['configfile']).ROI_MODEL
    logger.info(ROI_MODEL)

    # This is copied from xpobbsim and should probably be factored out.
    # Actually, this should be a method of the ROI class. TBD
    if kwargs['tstart'] < ROI_MODEL.min_validity_time():
        kwargs['tstart'] = ROI_MODEL.min_validity_time()
        logger.info('Simulation start time set to %s...' % kwargs['tstart'])
    tstop = kwargs['tstart'] + kwargs['duration']
    if tstop > ROI_MODEL.max_validity_time():
        tstop = ROI_MODEL.max_validity_time()
        logger.info('Simulation stop time set to %s...' % tstop)
    kwargs['tstop'] = tstop
    observation_time = kwargs['tstop'] - kwargs['tstart']

    # This is copied from roi.py and should probably be factored out.
    # Again, the ROI class should be able to sum the count spectra of all the
    # component and expose the result.
    sources = ROI_MODEL.values()
    if len(sources) > 1:
        abort('Multiple sources not implemented, yet.')
    source = sources[0]
    if isinstance(source, xPeriodicPointSource):
        psamples = numpy.linspace(kwargs['phasemin'], kwargs['phasemax'], 100)
        logger.info('Sampling phases: %s' % psamples)
        count_spectrum = xCountSpectrum(source.energy_spectrum, aeff, psamples,
                                        scale=observation_time)
        time_integrated_spectrum = count_spectrum.build_time_integral()
    else:
        tsamples = source.sampling_time(kwargs['tstart'], kwargs['tstop'])
        logger.info('Sampling times: %s' % tsamples)
        count_spectrum = xCountSpectrum(source.energy_spectrum, aeff, tsamples)
        time_integrated_spectrum = count_spectrum.build_time_integral()

    # Thuis should be a callable method in the binning module.
    ebinning =_make_binning(kwargs['ebinalg'], kwargs['emin'], kwargs['emax'],
                            kwargs['ebins'], kwargs['ebinning'])

    # And this might be implemented in the irf.mrf module.
    _x = time_integrated_spectrum.x
    _y = time_integrated_spectrum.y*modf(_x)
    mu_spectrum = xInterpolatedUnivariateSplineLinear(_x, _y)

    for _emin, _emax in zip(ebinning[:-1], ebinning[1:]) +\
        [(ebinning[0], ebinning[-1])]:
        num_counts = count_spectrum.num_expected_counts(emin=_emin, emax=_emax)
        mu_average = mu_spectrum.integral(_emin, _emax)/num_counts
        mdp = 4.29/mu_average/numpy.sqrt(num_counts)
        logger.info('%.2f--%.2f keV: %d counts in %d s, mu %.3f, MDP %.2f%%' %\
                    (_emin, _emax, num_counts, observation_time, mu_average,
                     100*mdp))
예제 #3
0
파일: xpmdp.py 프로젝트: lucabaldini/ximpol
def xpmdp(**kwargs):
    """Calculate the MDP.
    """
    logger.info('Loading the instrument response functions...')
    aeff = load_arf(kwargs['irfname'])
    modf = load_mrf(kwargs['irfname'])
    module_name = os.path.basename(kwargs['configfile']).replace('.py', '')
    ROI_MODEL = imp.load_source(module_name, kwargs['configfile']).ROI_MODEL
    logger.info(ROI_MODEL)

    # This is copied from xpobbsim and should probably be factored out.
    # Actually, this should be a method of the ROI class. TBD
    if kwargs['tstart'] < ROI_MODEL.min_validity_time():
        kwargs['tstart'] = ROI_MODEL.min_validity_time()
        logger.info('Simulation start time set to %s...' % kwargs['tstart'])
    tstop = kwargs['tstart'] + kwargs['duration']
    if tstop > ROI_MODEL.max_validity_time():
        tstop = ROI_MODEL.max_validity_time()
        logger.info('Simulation stop time set to %s...' % tstop)
    kwargs['tstop'] = tstop

    # This is copied from roi.py and should probably be factored out.
    # Again, the ROI class should be able to sum the count spectra of all the
    # component and expose the result.
    sources = ROI_MODEL.values()
    if len(sources) > 1:
        abort('Multiple sources not implemented, yet.')
    source = sources[0]
    if isinstance(source, xPeriodicPointSource):
        observation_time = kwargs['tstop'] - kwargs['tstart']
        psamples = numpy.linspace(kwargs['phasemin'], kwargs['phasemax'], 100)
        logger.info('Sampling phases: %s' % psamples)
        count_spectrum = xCountSpectrum(source.energy_spectrum, aeff, psamples,
                                        source.column_density, source.redshift,
                                        scale_factor=observation_time)
    else:
        tsamples = source.sampling_time(kwargs['tstart'], kwargs['tstop'])
        logger.info('Sampling times: %s' % tsamples)
        count_spectrum = xCountSpectrum(source.energy_spectrum, aeff, tsamples,
                                        source.column_density, source.redshift)

    # Do the actual work.
    ebinning =_make_energy_binning(**kwargs)
    mdp_table = count_spectrum.build_mdp_table(ebinning, modf)
    logger.info(mdp_table)
    file_path = kwargs['outfile']
    if file_path is not None:
        logger.info('Writing output file path %s...' % file_path)
        open(file_path, 'w').write('%s\n\n%s' % (kwargs, mdp_table))
        logger.info('Done.')
    return mdp_table
예제 #4
0
파일: roi.py 프로젝트: lucabaldini/ximpol
    def rvs_event_list(self, aeff, psf, modf, edisp, **kwargs):
        """Extract a random event list for the model component.

        TODO: here we should pass the sampling phase, instead?

        TODO: properly take into account the derivatives in the ephemeris.
        """
        # Create the event list and the count spectrum.
        event_list = xMonteCarloEventList()
        # Mind the count spectrum is made in phase!
        sampling_phase = numpy.linspace(0., 1., 100)
        count_spectrum = xCountSpectrum(self.energy_spectrum, aeff,
                                        sampling_phase, self.column_density,
                                        self.redshift)
        # All this is not properly taking into account the ephemeris.
        min_time = kwargs['tstart']
        max_time = kwargs['tstop']
        #min_time=sampling_time[0]
        #max_time = sampling_time[-1]
        delta_time = (max_time - min_time)
        period = self.ephemeris.period(min_time)
        # This is not accurate, as we are effectively discarding the last
        # fractional period. Need to think about it.
        num_periods = int(delta_time/period)
        num_expected_events = delta_time*count_spectrum.light_curve.norm()
        # Extract the number of events to be generated based on the integral
        # of the light curve over the simulation time.
        num_events = numpy.random.poisson(num_expected_events)
        logger.info('About to generate %d events...' % num_events)
        # Extract the event phases and sort them.
        col_phase = count_spectrum.light_curve.rvs(num_events)
        event_list.set_column('PHASE', col_phase)
        col_period = numpy.random.randint(0, num_periods, num_events)
        col_time = (col_period + col_phase)*period
        event_list.set_column('TIME', col_time)
        # Extract the MC energies and smear them with the energy dispersion.
        col_mc_energy = count_spectrum.rvs(col_phase)
        event_list.set_column('MC_ENERGY', col_mc_energy)
        col_pha = edisp.matrix.rvs(col_mc_energy)
        event_list.set_column('PHA', col_pha)
        event_list.set_column('ENERGY', edisp.ebounds(col_pha))
        # Extract the MC sky positions and smear them with the PSF.
        col_mc_ra, col_mc_dec = self.rvs_sky_coordinates(num_events)
        event_list.set_column('MC_RA', col_mc_ra)
        event_list.set_column('MC_DEC', col_mc_dec)
        col_ra, col_dec = psf.smear(col_mc_ra, col_mc_dec)
        event_list.set_column('RA', col_ra)
        event_list.set_column('DEC', col_dec)
        # Extract the photoelectron emission directions.
        pol_degree = self.polarization_degree(col_mc_energy, col_phase,
                                              col_mc_ra, col_mc_dec)
        pol_angle = self.polarization_angle(col_mc_energy, col_phase,
                                            col_mc_ra, col_mc_dec)
        col_pe_angle = modf.rvs_phi(col_mc_energy, pol_degree, pol_angle)
        event_list.set_column('PE_ANGLE', col_pe_angle)
        # Set the source ID.
        event_list.set_column('MC_SRC_ID', self.identifier)
        event_list.sort()
        return event_list
예제 #5
0
파일: roi.py 프로젝트: lucabaldini/ximpol
 def rvs_event_list(self, aeff, psf, modf, edisp, **kwargs):
     """Extract a random event list for the model component.
     """
     # Create the event list and the count spectrum.
     event_list = xMonteCarloEventList()
     tsamples = self.sampling_time(kwargs['tstart'], kwargs['tstop'])
     logger.info('Sampling times: %s' % tsamples)
     count_spectrum = xCountSpectrum(self.energy_spectrum, aeff, tsamples,
                                     self.column_density, self.redshift)
     # Extract the number of events to be generated based on the integral
     # of the light curve over the simulation time.
     num_events = numpy.random.poisson(count_spectrum.light_curve.norm())
     logger.info('About to generate %d events...' % num_events)
     # Extract the event times and sort them.
     col_time = count_spectrum.light_curve.rvs(num_events)
     col_time.sort()
     event_list.set_column('TIME', col_time)
     # Extract the MC energies and smear them with the energy dispersion.
     col_mc_energy = count_spectrum.rvs(col_time)
     event_list.set_column('MC_ENERGY', col_mc_energy)
     col_pha = edisp.matrix.rvs(col_mc_energy)
     event_list.set_column('PHA', col_pha)
     event_list.set_column('ENERGY', edisp.ebounds(col_pha))
     # Extract the MC sky positions and smear them with the PSF.
     col_mc_ra, col_mc_dec = self.rvs_sky_coordinates(num_events)
     event_list.set_column('MC_RA', col_mc_ra)
     event_list.set_column('MC_DEC', col_mc_dec)        
     col_ra, col_dec = psf.smear(col_mc_ra, col_mc_dec)
     event_list.set_column('RA', col_ra)
     event_list.set_column('DEC', col_dec)
     # Extract the photoelectron emission directions.
     pol_degree = self.polarization_degree(col_mc_energy, col_time,
                                           col_mc_ra, col_mc_dec)
     pol_angle = self.polarization_angle(col_mc_energy, col_time,
                                           col_mc_ra, col_mc_dec)
     col_pe_angle = modf.rvs_phi(col_mc_energy, pol_degree, pol_angle)
     event_list.set_column('PE_ANGLE', col_pe_angle)
     # Set the source ID.
     event_list.set_column('MC_SRC_ID', self.identifier)
     # Set the phase to rnd [0-1] for all non-periodic sources.
     phase = numpy.random.uniform(0., 1., len(col_pe_angle))
     event_list.set_column('PHASE', phase)
     return event_list
    def mdp_table(self, column_density, index, exposure_time, eflux):
        """Return the MDP table for a point source with a power-law
        spectral shape with a given set of parameters and for a given
        observation time.

        There's a slight complication, here, due to the fact that the
        sensitivity calculator is rescaling the absorbed fluxes so that the
        input energy flux (in the web form) is that at the observer instead of
        that at the source. Therefore we need to do the same here.
        """
        tsamples = numpy.linspace(0.0, exposure_time, 2)
        norm = int_eflux2pl_norm(eflux, self.emin, self.emax, index, erg=True)
        energy_spectrum = power_law(norm, index)
        ism_trans = self.ism_model.transmission_factor(column_density)
        _x = numpy.linspace(self.emin, self.emax, 1000)
        _y = _x * energy_spectrum(_x, 0.0) * ism_trans(_x)
        absorbed_energy_spectrum = xInterpolatedUnivariateSplineLinear(_x, _y)
        absorbed_eflux = keV2erg(absorbed_energy_spectrum.norm())
        scale = eflux / absorbed_eflux
        count_spectrum = xCountSpectrum(energy_spectrum, self.aeff, tsamples, column_density, scale_factor=scale)
        mdp_table = count_spectrum.build_mdp_table(self.ebinning, self.modf)
        return mdp_table
예제 #7
0
def process_grb(grb_name, tstart=21600., duration=30000., prompt_duration=600):
    """
    """
    file_path = download_swift_grb_lc_file(grb_name)
    if file_path is None:
        return None
    pl_index = get_grb_spec_index(file_path)
    ra, dec = get_grb_position(file_path)
    light_curve = parse_light_curve(file_path, num_min_data=5.)
    if light_curve is None:
        return None
    t = light_curve.x
    grb_start, prompt_tstart = t[0], t[0]
    grb_stop = t[-1]
    prompt_tstop = t[0] + prompt_duration
    prompt_flux = light_curve.integral(prompt_tstart, prompt_tstop)
    logger.info('Integral energy flux in %.3f--%.3f s: %.3e erg cm^{-2}' %\
                (prompt_tstart, prompt_tstop, prompt_flux))
    tstart = max(tstart, t[0])
    tstop = min(tstart + duration, t[-1])
    logger.info('Effective time interval for the MDP: %.3f--%.3f s' %\
                (tstart, tstop))
    t = t[(t >= tstart)*(t <= tstop)]
    if len(t) < 2:
        return None
    scale_factor = int_eflux2pl_norm(1., 0.3, 10., pl_index, erg=True)
    pl_norm = light_curve.scale(scale_factor)# Fix the label.
    def energy_spectrum(E, t):
        return pl_norm(t)*numpy.power(E, -pl_index)
    count_spectrum = xCountSpectrum(energy_spectrum, aeff, t)
    mdp_table = count_spectrum.build_mdp_table(ENERGY_BINNING, modf)
    logger.info(mdp_table)
    mdp = mdp_table.mdp_values()[0]
    eff_mu = [row.mu_effective for row in mdp_table.rows]
    counts = [row.num_signal for row in mdp_table.rows]
    grb_values = [ra, dec, pl_index, tstart, tstop, prompt_flux, prompt_tstart,\
                  prompt_tstop, grb_start, grb_stop, eff_mu[0], counts[0], mdp]
    return grb_values
예제 #8
0
                "flux_min": flux_min,
                "flux_max": flux_max,
                "p_opt_max": p_opt_max,
                "p_opt_min": p_opt_min,
            }
            src_list.append(src)


pl_norm_ref = int_eflux2pl_norm(FLUX_REF, E_MIN, E_MAX, PL_INDEX)
logger.info("PL normalization @ %.3e erg cm^-2 s^-1: %.3e keV^-1 cm^-2 s^-1" % (FLUX_REF, pl_norm_ref))
aeff = load_arf(DEFAULT_IRF_NAME)
modf = load_mrf(DEFAULT_IRF_NAME)
tsamples = numpy.array([0, OBS_TIME_REF])
ebinning = numpy.array([E_MIN, E_MAX])
energy_spectrum = power_law(pl_norm_ref, PL_INDEX)
count_spectrum = xCountSpectrum(energy_spectrum, aeff, tsamples)
mdp_table = count_spectrum.build_mdp_table(ebinning, modf)
mdp_ref = mdp_table.mdp_values()[-1]
logger.info("Reference MDP for %s s: %.3f" % (OBS_TIME_REF, mdp_ref))

blazar_list = parse_blazar_list(PRIORITY_ONLY)
mirror_list = [1, 3, 9, 17, 18]

numpy.random.seed(10)
_color = numpy.random.random((3, len(blazar_list)))
# numpy.random.seed(1)
# _disp = numpy.random.uniform(0.7, 2., len(blazar_list))

plt.figure("Average polarization degree", (11, 8))
_x = numpy.logspace(-13, -7, 100)
for obs_time in [1.0e3, 10.0e3, 100.0e3, 1.0e6]:
예제 #9
0
fmt = dict(xname='Time', xunits='s', yname='Energy', yunits='keV',
           zname='dN/dE', zunits='cm$^{-2}$ s$^{-1}$ keV$^{-1}$')
spectrum_spline = xInterpolatedBivariateSplineLinear(_t, _e, spectrum, **fmt)
spectrum_spline.plot(show=False, logz=True)
save_current_figure('source_spectrum.png', OUTPUT_FOLDER, clear=False)

plt.figure('Source spectrum slices')
for t in [T_MIN, 0.5*(T_MIN + T_MAX), T_MAX]:
    spectrum_spline.vslice(t).plot(show=False, logx=True, logy=True,
                                   label='t = %d s' % t)
plt.legend(bbox_to_anchor=(0.75, 0.95))
plt.axis([1, 10, None, None])
save_current_figure('source_spectrum_slices.png', OUTPUT_FOLDER, clear=False)

plt.figure('Count spectrum')
count_spectrum = xCountSpectrum(spectrum, aeff, _t)
count_spectrum.plot(show=False, logz=True)
save_current_figure('count_spectrum.png', OUTPUT_FOLDER, clear=False)

plt.figure('Count spectrum slices')
for t in [T_MIN, 0.5*(T_MIN + T_MAX), T_MAX]:
    count_spectrum.vslice(t).plot(show=False, logx=True, logy=True,
                                  label='t = %d s' % t)
plt.legend(bbox_to_anchor=(0.9, 0.95))
plt.axis([1, 10, None, None])
save_current_figure('count_spectrum_slices.png', OUTPUT_FOLDER, clear=False)

plt.figure('Light curve')
count_spectrum.light_curve.plot(show=False)
save_current_figure('light_curve.png', OUTPUT_FOLDER, clear=False)
    def test_power_law_stationary(self):
        """Test a time-independent power law.

        This creates a count spectrum with no time dependence, i.e., with
        only two (identical) interpolating time points on the auxiliary
        (time) axis. The power-law parameters (C and gamma) are constant

        >>> tmin = 0.
        >>> tmax = 100.
        >>> C = 1.
        >>> Gamma = 2.
        >>>
        >>> def powerlaw(E, t):
        >>>     return C*numpy.power(E, -Gamma)
        >>>
        >>> _t = numpy.linspace(tmin, tmax, 2)
        >>> count_spectrum = xCountSpectrum(powerlaw, self.aeff, _t)

        and the underlying xUnivariateAuxGenerator looks like.

        .. image:: ../figures/test_power_law_stationary_2d.png

        Then a vertical slice (i.e., an interpolated linear spline) is taken
        in the middle of the auxiliary axis

        >>> tref = 0.5*(tmin + tmax)
        >>> ref_slice = count_spectrum.slice(tref)

        and the y-values of the spline are compared with the direct product of
        the effective area and the input spectrum:

        >>> _x = self.aeff.x
        >>> _y = C*numpy.power(_x, -Gamma)*self.aeff.y

        (Note that in general the two are technically not the same thing, as
        going from the count spectrum to the slice we do interpolate in time,
        although in this particular case the interpolation is trivial).
        If everything goes well, they should be on top of each other.
        The figure below is also showing the original power-law spectrum
        multiplied by the peak effective area.

        .. image:: ../figures/test_power_law_stationary_slice.png

        """
        tmin = 0.
        tmax = 100.
        tref = 0.5*(tmin + tmax)
        C = 1.
        Gamma = 2.

        def powerlaw(E, t):
            """Function defining a time-dependent energy spectrum.
            """
            return C*numpy.power(E, -Gamma)

        _t = numpy.linspace(tmin, tmax, 2)
        count_spectrum = xCountSpectrum(powerlaw, self.aeff, _t)
        count_spectrum.plot(show=False)
        overlay_tag(color='white')
        save_current_figure('test_power_law_stationary_2d.png',
                            show=self.interactive)

        ref_slice = count_spectrum.slice(tref)
        _x = self.aeff.x
        _y = C*numpy.power(_x, -Gamma)*self.aeff.y.max()
        plt.plot(_x, _y, '-', label='Original power-law spectrum')
        _y = C*numpy.power(_x, -Gamma)*self.aeff.y
        _mask = _y > 0.
        _x = _x[_mask]
        _y = _y[_mask]
        delta = abs((_y - ref_slice(_x))/_y).max()
        self.assertTrue(delta < 1e-3, 'max deviation %.9f' % delta)
        plt.plot(_x, _y, 'o', label='Direct convolution with aeff')
        ref_slice.plot(logx=True, logy=True, show=False,
                       label='xCountSpectrum output')
        overlay_tag()
        plt.text(0.1, 0.1, 'Max. difference = %.3e' % delta,
                 transform=plt.gca().transAxes)
        plt.legend(bbox_to_anchor=(0.75, 0.5))
        plt.axis([0.6, 10, None, None])
        save_current_figure('test_power_law_stationary_slice.png',
                            show=self.interactive)
    def test_power_law_rvs(self, num_events=1000000):
        """Test the generation of event energies from a count power-law
        spectrum convoluted with the effective area.

        This turned out to be more tricky than we anticipated. Since the
        convolution of the source spectrum with the effective area falls
        pretty quickly at high energy, there's typically very few events above
        a few keV and, as a consequence, the slope of the corresponding ppf is
        fairly steep close to one.

        .. image:: ../figures/test_power_law_rvs_vppf.png

        This implies that the ppf in each slice must be properly sampled
        close to 1 (initial tests showed, e.g., that a uniform grid with
        100 points between 0 and 1 was not enough to throw meaningful random
        numbers for a typical power-law source spectrum). This is particularly
        true for soft spectral indices---which is why we picked `Gamma = 3.`
        for this test.

        .. image:: ../figures/test_power_law_rvs_counts.png

        """
        tmin = 0.
        tmax = 100.
        tref = 0.5*(tmin + tmax)
        C = 1.
        Gamma = 3.

        def powerlaw(E, t):
            """Function defining a time-dependent energy spectrum.
            """
            return C*numpy.power(E, -Gamma)

        _t = numpy.linspace(tmin, tmax, 2)
        count_spectrum = xCountSpectrum(powerlaw, self.aeff, _t)

        count_spectrum.vppf.vslice(tref).plot(show=False, overlay=True)
        overlay_tag()
        save_current_figure('test_power_law_rvs_vppf.png',
                            show=self.interactive)

        ref_slice = count_spectrum.slice(tref)
        _time = numpy.full(num_events, tref)
        _energy = count_spectrum.rvs(_time)
        _binning = numpy.linspace(self.aeff.xmin(), self.aeff.xmax(), 100)
        obs, bins, patches = plt.hist(_energy, bins=_binning,
                                      histtype='step', label='Random energies')
        plt.yscale('log')
        # We want to overlay the reference count-spectrum slice, normalized
        # to the total number of events simulated.
        bin_width = (bins[1] - bins[0])
        scale = num_events*bin_width/ref_slice.norm()
        _x = 0.5*(bins[:-1] + bins[1:])
        _y = scale*ref_slice(_x)
        plt.plot(_x, _y, label='Underlying pdf')
        # And, for the chisquare, we do correctly integrate the slice in each
        # energy bin, rather than evaluating it at the bin center.
        exp = []
        scale = num_events/ref_slice.norm()
        for _emin, _emax in zip(bins[:-1], bins[1:]):
            exp.append(scale*ref_slice.integral(_emin, _emax))
        exp = numpy.array(exp)
        _mask = exp > 0.
        exp = exp[_mask]
        obs = obs[_mask]
        chi2 = ((exp - obs)**2/exp).sum()
        ndof = len(obs)
        chi2_min = ndof - 3*numpy.sqrt(2*ndof)
        chi2_max = ndof + 3*numpy.sqrt(2*ndof)
        self.assertTrue(chi2 > chi2_min, 'chisquare too low (%.2f/%d)' %\
                        (chi2, ndof))
        self.assertTrue(chi2 < chi2_max, 'chisquare too high (%.2f/%d)' %\
                        (chi2, ndof))
        plt.text(0.5, 0.1, '$\chi^2$/ndof = %.2f/%d' % (chi2, ndof),
                 transform=plt.gca().transAxes)
        plt.legend(bbox_to_anchor=(0.85, 0.75))
        overlay_tag()
        save_current_figure('test_power_law_rvs_counts.png',
                            show=self.interactive)
    def test_power_law_variable(self):
        """Test a time-dependent power law.

        This creates a time-dependent count spectrum, where the two parameters
        of the underlying power law (C and Gamma) vary linearly with time, in
        opposite direction, between 1 and 2.

        >>> def C(t):
        >>>     return 1. + (t - tmin)/(tmax - tmin)
        >>>
        >>> def Gamma(t):
        >>>    return 2. - (t - tmin)/(tmax - tmin)
        >>>
        >>> def powerlaw(E, t):
        >>>    return C(t)*numpy.power(E, -Gamma(t))

        (Beware: this does not mean that you can interpolate linearly between
        the two time extremes, as both parameters vary at the same time and
        the spectral shape does not evolve linearly with time---we're sampling
        the time axis with 100 points).
        The underlying xUnivariateAuxGenerator looks like.

        .. image:: ../figures/test_power_law_variable_2d.png

        Then a vertical slice (i.e., an interpolated linear spline) is taken
        in the middle of the auxiliary axis and the y-values of the spline
        are compared with the direct product of the effective area and the
        count spectrum (evaluated at the same time). If everything goes well,
        they should be on top of each other. The figure below is also showing
        the orignal power-law spectrum multiplied by the peak effective area.

        .. image:: ../figures/test_power_law_variable_slice.png

        Finally, we do test the light-curve building by comparing it with the
        values from a direct intergration of the vertical slices on a
        fixed-spacing grid. Note that, since the normalization increases with
        time and the spectral index becomes harder, the light-curve increases
        more than linearly.

        .. image:: ../figures/test_power_law_variable_lc.png

        """
        tmin = 0.
        tmax = 100.
        tref = 0.5*(tmin + tmax)

        def C(t):
            """Time-dependent C---equals to 1 @ tmin and 2 @ tmax.
            """
            return 1. + (t - tmin)/(tmax - tmin)

        def Gamma(t):
            """Time-dependent C---equals to 2 @ tmin and 1 @ tmax.
            """
            return 2. - (t - tmin)/(tmax - tmin)

        def powerlaw(E, t):
            """Function defining a time-dependent energy spectrum.
            """
            return C(t)*numpy.power(E, -Gamma(t))

        _t = numpy.linspace(tmin, tmax, 100)
        count_spectrum = xCountSpectrum(powerlaw, self.aeff, _t)
        count_spectrum.plot(show=False)
        overlay_tag(color='white')
        save_current_figure('test_power_law_variable_2d.png',
                            show=self.interactive)

        ref_slice = count_spectrum.slice(tref)
        _x = self.aeff.x
        _y = self.aeff.y.max()*C(tref)*numpy.power(_x, -Gamma(tref))
        plt.plot(_x, _y, '-', label='Original power-law spectrum')
        _y = C(tref)*numpy.power(_x, -Gamma(tref))*self.aeff.y
        _mask = _y > 0.
        _x = _x[_mask]
        _y = _y[_mask]
        delta = abs((_y - ref_slice(_x))/_y).max()
        self.assertTrue(delta < 1e-3, 'max deviation %.9f' % delta)
        plt.plot(_x, _y, 'o', label='Direct convolution with aeff')
        ref_slice.plot(logx=True, logy=True, show=False,
                       label='xCountSpectrum output')
        overlay_tag()
        plt.text(0.1, 0.1, 'Max. difference = %.3e' % delta,
                 transform=plt.gca().transAxes)
        plt.legend(bbox_to_anchor=(0.75, 0.5))
        plt.axis([0.6, 10, None, None])
        save_current_figure('test_power_law_variable_slice.png',
                            show=self.interactive)

        _x = numpy.linspace(tmin, tmax, 33)
        _y = []
        for _xp in _x:
            _y.append(count_spectrum.slice(_xp).norm())
        _y = numpy.array(_y)
        plt.plot(_x, _y, 'o', label='Direct integral flux values')
        delta = abs((_y - count_spectrum.light_curve(_x))/_y).max()
        self.assertTrue(delta < 1e-3, 'max deviation %.9f' % delta)
        count_spectrum.light_curve.plot(show=False,
                                        label='xCountSpectrum light-curve')
        overlay_tag()
        plt.legend(bbox_to_anchor=(0.65, 0.75))
        plt.text(0.5, 0.1, 'Max. difference = %.3e' % delta,
                 transform=plt.gca().transAxes)
        save_current_figure('test_power_law_variable_lc.png',
                            show=self.interactive)