Beispiel #1
0
def test_calc_flux_pha_unabsorbed(make_data_path, clean_astro_ui):
    """Can we calculate an unabsorbed flux?"""

    # The idea is that with a model expression of
    #    const1d.scale * powlaw1d.pl
    # when scale is not 1 (and not integrated) then we can
    # just look to see if the "absorbed" flux is scale * the
    # "unabsorbed" flux.
    #
    infile = make_data_path('3c273.pi')
    ui.load_pha(infile)

    scale = ui.create_model_component('const1d', 'scale')
    pl = ui.create_model_component('powlaw1d', 'pl')

    scale.c0 = 0.8
    scale.integrate = False
    pl.gamma = 1.5
    pl.ampl = 1e-4

    ui.set_source(scale * pl)

    pflux_abs = ui.calc_photon_flux(0.5, 7)
    pflux_unabs = ui.calc_photon_flux(0.5, 7, model=pl)

    eflux_abs = ui.calc_energy_flux(0.5, 7)
    eflux_unabs = ui.calc_energy_flux(0.5, 7, model=pl)

    pflux_scale = pflux_abs / pflux_unabs
    eflux_scale = eflux_abs / eflux_unabs

    assert pflux_scale == pytest.approx(0.8)
    assert eflux_scale == pytest.approx(0.8)
Beispiel #2
0
    def setUp(self):

        self._old_logger_level = logger.getEffectiveLevel()
        logger.setLevel(logging.ERROR)

        ui.set_stat('wstat')

        infile = self.make_path('3c273.pi')
        ui.load_pha(1, infile)

        # Change the backscale value slightly so that the
        # results are different to other runs with this file.
        #
        nbins = ui.get_data(1).get_dep(False).size
        bscal = 0.9 * np.ones(nbins) * ui.get_backscal(1)
        ui.set_backscal(1, backscale=bscal)

        ui.set_source(1, ui.powlaw1d.pl)

        # The powerlaw slope and normalization are
        # intended to be "a reasonable approximation"
        # to the data, just to make sure that any statistic
        # calculation doesn't blow-up too much.
        #
        ui.set_par("pl.gamma", 1.7)
        ui.set_par("pl.ampl", 1.7e-4)
Beispiel #3
0
def test_plot_pvalue(make_data_path, clean_astro_ui, hide_log_output):

    fname = make_data_path("qso.pi")
    ui.load_pha(fname)

    ui.set_stat('cstat')
    ui.set_method("neldermead")

    ui.group_counts(10)
    ui.notice(0.3, 8)

    ui.set_model("xsphabs.abs1*xspowerlaw.p1")
    ui.set_model("abs1*(p1+gauss1d.g1)")

    # move the fit close to the best fit to save a small amount
    # of time.
    abs1.nh = 0.05
    p1.phoindex = 1.28
    p1.norm = 2e-4
    g1.ampl = 1.8e-5

    g1.pos = 3.
    ui.freeze(g1.pos)
    g1.fwhm = 0.1
    ui.freeze(g1.fwhm)

    ui.fit()
    ui.plot_pvalue(p1, p1 + g1, num=100)

    tmp = ui.get_pvalue_results()

    assert tmp.null == pytest.approx(210.34566845619273)
    assert tmp.alt == pytest.approx(207.66618095925094)
    assert tmp.lr == pytest.approx(2.679487496941789)
Beispiel #4
0
    def load_pha(self, id, arg=None, use_errors=False):
        """Load multiple data arrays.

        This extends ``sherpa.astro.ui.load_arrays`` to load multiple
        data sets with one call.

        The usual ``filename`` argument can be a stack file with multiple
        data files defined in it. In this case, the load function will be
        called as many times as datasets are included in the stack file.
        """
        if arg is None:
            id, arg = arg, id

        if id is not None:
            if self._default_instance:
                ui.load_pha(id, arg, use_errors)
                return
            else:
                raise AttributeError(load_error_msg(id))

        # File Stacks. If the file argument is a stack file, expand the
        # file and call this function for each file in the stack.
        try:
            for infile in stk.build(arg):
                self._load_func(ui.load_pha, infile, use_errors)
        except (NameError, OSError, IOErr):
            self._load_func(ui.load_pha, arg, use_errors)
Beispiel #5
0
def test_calc_flux_pha_analysis(elo, ehi, setting, lo, hi, make_data_path,
                                clean_astro_ui):
    """Do calc_photon/energy_flux return the expected results: fluxes + analysis setting

    Basic test for different analysis settings: the
    same range (modulo precision of conversion) gives the
    same results.
    """

    infile = make_data_path('3c273.pi')
    pl = ui.create_model_component('powlaw1d', 'pl')

    ui.load_pha(infile)
    ui.set_source(pl)

    pflux = ui.calc_photon_flux(elo, ehi)
    eflux = ui.calc_energy_flux(elo, ehi)

    ui.set_analysis(setting)
    pflux2 = ui.calc_photon_flux(lo, hi)
    eflux2 = ui.calc_energy_flux(lo, hi)

    # use approx here since the bin edges are not guaranteed
    # to line up, and use a large tolerance.
    #
    assert pflux2 == pytest.approx(pflux, rel=1e-2)

    eflux = np.log10(eflux)
    eflux2 = np.log10(eflux2)
    assert eflux2 == pytest.approx(eflux, rel=1e-3)
Beispiel #6
0
    def test_ARFModelPHA(self):
        from sherpa.astro import ui
        ui.load_pha(self.make_path("3c120_meg_1.pha"))

        # remove the RMF to ensure this is an ARF-only analysis
        # (which is what is needed to trigger the bug that lead to #699)
        ui.get_data().set_rmf(None)

        ui.group_counts(20)
        ui.notice(0.5, 6)
        ui.subtract()
        ui.set_model(ui.xsphabs.abs1 * (ui.xsapec.bubble + ui.powlaw1d.p1))
        ui.set_xsabund('angr')
        ui.set_xsxsect('vern')
        abs1.nh = 0.163
        abs1.nh.freeze()
        p1.ampl = 0.017
        p1.gamma = 1.9
        bubble.kt = 0.5
        bubble.norm = 4.2e-5
        tol = 1.0e-2
        ui.set_method_opt('ftol', tol)
        ui.fit()
        result = ui.get_fit_results()
        assert result.numpoints == self._fit_using_ARFModelPHA['numpoints']
        assert result.dof == self._fit_using_ARFModelPHA['dof']
def test_grouped_pha_all_bad_response_bg_warning(elo, ehi, nbins, bkg_id,
                                                 caplog, make_data_path,
                                                 clean_astro_ui):
    """Check we get the warning messages with background filtering"""

    ui.load_pha('check', make_data_path('3c273.pi'))

    ui.set_quality('check', 2 * numpy.ones(1024, dtype=numpy.int16), bkg_id=1)
    ui.ignore_bad('check', bkg_id=1)

    with caplog.at_level(logging.INFO, logger='sherpa'):
        ui.notice_id('check', elo, ehi, bkg_id=bkg_id)

    # filtering has or hasn't happened
    nsrc = ui.get_dep('check', filter=True).size
    nback = ui.get_dep('check', filter=True, bkg_id=1).size

    if bkg_id is None:
        assert nsrc == nbins
        assert nback == 0
    else:
        assert nsrc == 46  # ie no filter
        assert nback == 0

    # did we get a warning message from the background?
    assert len(caplog.records) == 1
    name, lvl, msg = caplog.record_tuples[0]
    assert name == 'sherpa.astro.data'
    assert lvl == logging.INFO
    assert msg.startswith('Skipping dataset ')
    assert msg.endswith('/3c273_bg.pi: mask excludes all data')
Beispiel #8
0
    def setUp(self):

        self._old_logger_level = logger.getEffectiveLevel()
        logger.setLevel(logging.ERROR)

        ui.set_stat('wstat')

        infile = self.make_path('3c273.pi')
        ui.load_pha(1, infile)

        # Change the backscale value slightly so that the
        # results are different to other runs with this file.
        #
        nbins = ui.get_data(1).get_dep(False).size
        bscal = 0.9 * np.ones(nbins) * ui.get_backscal(1)
        ui.set_backscal(1, backscale=bscal)

        ui.set_source(1, ui.powlaw1d.pl)

        # The powerlaw slope and normalization are
        # intended to be "a reasonable approximation"
        # to the data, just to make sure that any statistic
        # calculation doesn't blow-up too much.
        #
        ui.set_par("pl.gamma", 1.7)
        ui.set_par("pl.ampl", 1.7e-4)
def testWrite(make_data_path):

    fname = make_data_path('3c273.pi')
    ui.load_pha(1, fname)
    pha_orig = ui.get_data(1)

    ofh = tempfile.NamedTemporaryFile(suffix='sherpa_test')
    ui.save_pha(1, ofh.name, ascii=False, clobber=True)

    # limited checks
    pha = ui.unpack_pha(ofh.name)
    assert isinstance(pha, DataPHA)

    for key in ["channel", "counts"]:
        newval = getattr(pha, key)
        oldval = getattr(pha_orig, key)
        assert_allclose(oldval, newval, err_msg=key)

    # at present grouping and quality are not written out

    for key in ["exposure", "backscal", "areascal"]:
        newval = getattr(pha, key)
        oldval = getattr(pha_orig, key)
        assert newval == pytest.approx(oldval), key
    """
Beispiel #10
0
    def load_pha(self, id, arg=None, use_errors=False):
        """Load multiple data arrays.

        This extends ``sherpa.astro.ui.load_arrays`` to load multiple
        data sets with one call.

        The usual ``filename`` argument can be a stack file with multiple
        data files defined in it. In this case, the load function will be
        called as many times as datasets are included in the stack file.
        """
        if arg is None:
            id, arg = arg, id

        if id is not None:
            if self._default_instance:
                ui.load_pha(id, arg, use_errors)
                return
            else:
                raise AttributeError(load_error_msg(id))

        # File Stacks. If the file argument is a stack file, expand the
        # file and call this function for each file in the stack.
        try:
            files = stk.build(arg)
            for file in files:
                self._load_func(ui.load_pha, file, use_errors)
        except:
            self._load_func(ui.load_pha, arg, use_errors)
Beispiel #11
0
    def __init__(self, name, filename):  # name means datid!!
        self.name = name
        sau.load_pha(name, filename)
        self.data = sau.get_data(name)
        self.arf = sau.get_data(name)
        self.rmf = sau.get_data(name)

        try:  # Read keywords from pha header
            self.threshold = self.data.header['ETH']
        except KeyError:
            print(" ! WARNING: no threshold found, using 200 GeV")
            self.threshold = 2e8  # default value 200 GeV
        self.emax = 1e11  # default value 100 TeV

        try:
            self.zenith = self.data.header['ZENITH']
        except KeyError:
            print("WARNING: no mean zenith angle found, using 45 deg")
            self.zenith = 45.0  # default value 200 GeV

        try:
            self.offset = self.data.header['OFFSET']
        except KeyError:
            print("WARNING: no offset angle found, using 1.0 deg")
            self.offset = 1.0  # default value 200 GeV

        try:
            self.telcode = self.data.header['TELCODE']
        except KeyError:
            print("WARNING: no telcode found, using 0")
            self.telcode = 0  # default value 200 GeV
Beispiel #12
0
def test_eqwith_err1(make_data_path, restore_xspec_settings):

    def check1(e0, e1, e2):
        assert e0 == approx(0.028335201547206704, rel=1.0e-3)
        assert e1 == approx(-0.00744118799274448756, rel=1.0e-3)
        assert e2 == approx(0.0706249544851336, rel=1.0e-3)

    ui.set_xsabund('angr')
    ui.set_xsxsect('bcmc')

    ui.load_pha(make_data_path('3c273.pi'))
    ui.notice(0.5, 7.0)
    ui.set_stat("chi2datavar")
    ui.set_method("simplex")
    ui.set_model('powlaw1d.p1+gauss1d.g1')
    g1.fwhm = 0.1
    g1.pos = 2.0
    ui.freeze(g1.pos, g1.fwhm)
    ui.fit()

    numpy.random.seed(2345)
    e = ui.eqwidth(p1, p1 + g1, error=True, niter=100)
    check1(e[0], e[1], e[2])
    params = e[3]

    numpy.random.seed(2345)
    e = ui.eqwidth(p1, p1 + g1, error=True, params=params, niter=100)
    check1(e[0], e[1], e[2])

    parvals = ui.get_fit_results().parvals
    assert parvals[0] == approx(1.9055272902160334, rel=1.0e-3)
    assert parvals[1] == approx(0.00017387966749772638, rel=1.0e-3)
    assert parvals[2] == approx(1.279415076070516e-05, rel=1.0e-3)
Beispiel #13
0
    def test_fits_io(self):
        """
        Test that basic FITS I/O functions work.

        This test ensures that the FITS backend can be used to perform basic
        I/O functions.
        """

        from sherpa.astro import datastack
        folder = os.path.dirname(datastack.__file__)
        infile = os.path.join(folder, "tests", "data",
                              "acisf07867_000N001_r0002_pha3.fits")

        ui.load_pha(infile)
        with NamedTemporaryFile() as f:
            ui.save_pha(f.name, ascii=False, clobber=True)

            # And can we read it back in?
            ui.load_pha(2, f.name)

        # Check the data is the same (note: although counts/channels are integers
        # we use approximate equality checks here as easier to do).
        d1 = ui.get_data(1)
        d2 = ui.get_data(2)
        assert_almost_equal(d2.channel, d1.channel)
        assert_almost_equal(d2.counts, d1.counts)
        assert_almost_equal(d2.exposure, d1.exposure)
        assert_almost_equal(np.log10(d2.backscal), np.log10(d1.backscal))
Beispiel #14
0
    def test_sherpa_fit(self, tmpdir):
        # this is to make sure that the written PHA files work with sherpa
        import sherpa.astro.ui as sau
        from sherpa.models import PowLaw1D

        # TODO: this works a little bit, but some info and warnings
        # from Sherpa remain. Not sure what to do, OK as-is for now.
        import logging

        logging.getLogger("sherpa").setLevel("ERROR")

        for obs in self.obs_list:
            obs.to_ogip_files(str(tmpdir), use_sherpa=True)

        filename = tmpdir / "pha_obs23523.fits"
        sau.load_pha(str(filename))
        sau.set_stat("wstat")
        model = PowLaw1D("powlaw1d.default")
        model.ref = 1e9
        model.ampl = 1
        model.gamma = 2
        sau.set_model(model * 1e-20)
        sau.fit()
        assert_allclose(model.pars[0].val, 2.732, rtol=1e-3)
        assert_allclose(model.pars[2].val, 4.647, rtol=1e-3)
Beispiel #15
0
def test_bug_276(make_data_path):
    ui.load_pha(make_data_path('3c273.pi'))
    ui.set_model('polynom1d.p1')
    ui.fit()
    ui.covar()
    scal = ui.get_covar_results().parmaxes
    ui.sample_flux(ui.get_model_component('p1'), 0.5, 1, num=5, correlated=False, scales=scal)
def test_eqwith_err1(make_data_path, restore_xspec_settings):

    def check1(e0, e1, e2):
        assert e0 == pytest.approx(0.028335201547206704, rel=1.0e-3)
        assert e1 == pytest.approx(-0.00744118799274448756, rel=1.0e-3)
        assert e2 == pytest.approx(0.0706249544851336, rel=1.0e-3)

    ui.set_xsabund('angr')
    ui.set_xsxsect('bcmc')

    ui.load_pha(make_data_path('3c273.pi'))
    ui.notice(0.5, 7.0)
    ui.set_stat("chi2datavar")
    ui.set_method("simplex")
    ui.set_model('powlaw1d.p1+gauss1d.g1')
    g1.fwhm = 0.1
    g1.pos = 2.0
    ui.freeze(g1.pos, g1.fwhm)
    ui.fit()

    np.random.seed(2345)
    e = ui.eqwidth(p1, p1 + g1, error=True, niter=100)
    check1(e[0], e[1], e[2])
    params = e[3]

    np.random.seed(2345)
    e = ui.eqwidth(p1, p1 + g1, error=True, params=params, niter=100)
    check1(e[0], e[1], e[2])

    parvals = ui.get_fit_results().parvals
    assert parvals[0] == pytest.approx(1.9055272902160334, rel=1.0e-3)
    assert parvals[1] == pytest.approx(0.00017387966749772638, rel=1.0e-3)
    assert parvals[2] == pytest.approx(1.279415076070516e-05, rel=1.0e-3)
Beispiel #17
0
    def __init__(self, name, filename):  # name means datid!!
        self.name = name
        sau.load_pha(name, filename)
        self.data = sau.get_data(name)
        self.arf = sau.get_data(name)
        self.rmf = sau.get_data(name)

        try:  # Read keywords from pha header
            self.threshold = self.data.header['ETH']
        except KeyError:
            print(" ! WARNING: no threshold found, using 200 GeV")
            self.threshold = 2e8  # default value 200 GeV
        self.emax = 1e11  # default value 100 TeV

        try:
            self.zenith = self.data.header['ZENITH']
        except KeyError:
            print("WARNING: no mean zenith angle found, using 45 deg")
            self.zenith = 45.0  # default value 200 GeV

        try:
            self.offset = self.data.header['OFFSET']
        except KeyError:
            print("WARNING: no offset angle found, using 1.0 deg")
            self.offset = 1.0  # default value 200 GeV

        try:
            self.telcode = self.data.header['TELCODE']
        except KeyError:
            print("WARNING: no telcode found, using 0")
            self.telcode = 0  # default value 200 GeV
Beispiel #18
0
def test_wstat_calc_stat_info(hide_logging, make_data_path, clean_astro_ui):
    "bug #147"
    ui.load_pha("stat", make_data_path("3c273.pi"))
    ui.set_source("stat", ui.powlaw1d.p1)
    ui.set_stat("wstat")
    ui.fit("stat")
    ui.get_stat_info()
Beispiel #19
0
def setup_model(make_data_path):
    """Set up a model that is reasonably close to the data.

    Returns the expected statistic values for various filters.
    """

    infile = make_data_path('q1127_src1_grp30.pi')

    ui.clean()
    ui.load_pha(infile)
    ui.subtract()

    ui.set_stat('chi2datavar')
    ui.set_source(ui.powlaw1d.pl)

    pl = ui.get_model_component('pl')
    pl.ampl = 5.28e-4
    pl.gamma = 1.04

    # These statistic values were created using CIAO 4.9 on a
    # Ubuntu machine. The quality=2 values are for high energies
    # (above ~ 10 keV or so), and so a filter of 0.5-8.0 keV should
    # give the same answer with or without ignore_bad.
    #
    return {
        'all': 2716.7086246284807,
        'bad': 2716.682482792285,
        '0.5-8.0': 1127.7165108405597
    }
Beispiel #20
0
 def test_warnings_are_gone_pha(self):
     pha = self.make_path("3c273.pi")
     with warnings.catch_warnings(record=True) as w:
         warnings.simplefilter("always")
         with NamedTemporaryFile() as f:
             ui.load_pha(pha)
             ui.save_data(1, f.name, ascii=False, clobber=True)
         assert len(w) == 0
Beispiel #21
0
 def test_warnings_are_gone_pha(self):
     pha = self.make_path("3c273.pi")
     with warnings.catch_warnings(record=True) as w:
         warnings.simplefilter("always")
         with NamedTemporaryFile() as f:
             ui.load_pha(pha)
             ui.save_data(1, f.name, ascii=False, clobber=True)
         assert len(w) == 0
Beispiel #22
0
def test_background():

    tmpdir = tempfile.mkdtemp()
    curdir = os.getcwd()
    os.chdir(tmpdir)

    kT_sim = 1.0
    Z_sim = 0.0
    norm_sim = 4.0e-2
    nH_sim = 0.04
    redshift = 0.01

    exp_time = (200., "ks")
    area = (1000., "cm**2")

    wcs = create_dummy_wcs()

    abs_model = WabsModel(nH_sim)

    events = EventList.create_empty_list(exp_time, area, wcs)

    spec_model = TableApecModel(0.05, 12.0, 5000, thermal_broad=False)
    spec = spec_model.return_spectrum(kT_sim, Z_sim, redshift, norm_sim)

    new_events = events.add_background(spec_model.ebins, spec, prng=prng,
                                       absorb_model=abs_model)

    new_events = ACIS_I(new_events, rebin=False, convolve_psf=False, prng=prng)

    new_events.write_spectrum("background_evt.pi", clobber=True)

    os.system("cp %s ." % new_events.parameters["ARF"])
    os.system("cp %s ." % new_events.parameters["RMF"])

    load_user_model(mymodel, "wapec")
    add_user_pars("wapec", ["nH", "kT", "metallicity", "redshift", "norm"],
                  [0.01, 4.0, 0.2, redshift, norm_sim*0.8],
                  parmins=[0.0, 0.1, 0.0, -20.0, 0.0],
                  parmaxs=[10.0, 20.0, 10.0, 20.0, 1.0e9],
                  parfrozen=[False, False, False, True, False])

    load_pha("background_evt.pi")
    set_stat("cstat")
    set_method("simplex")
    ignore(":0.5, 8.0:")
    set_model("wapec")
    fit()
    set_covar_opt("sigma", 1.6)
    covar()
    res = get_covar_results()

    assert np.abs(res.parvals[0]-nH_sim) < res.parmaxes[0]
    assert np.abs(res.parvals[1]-kT_sim) < res.parmaxes[1]
    assert np.abs(res.parvals[2]-Z_sim) < res.parmaxes[2]
    assert np.abs(res.parvals[3]-norm_sim) < res.parmaxes[3]

    os.chdir(curdir)
    shutil.rmtree(tmpdir)
Beispiel #23
0
def test_background():

    tmpdir = tempfile.mkdtemp()
    curdir = os.getcwd()
    os.chdir(tmpdir)

    kT_sim = 1.0
    Z_sim = 0.0
    norm_sim = 4.0e-2
    nH_sim = 0.04
    redshift = 0.01

    exp_time = (200., "ks")
    area = (1000., "cm**2")
    fov = (10.0, "arcmin")

    prng = 24

    agen = ApecGenerator(0.05, 12.0, 5000, broadening=False)
    spec = agen.get_spectrum(kT_sim, Z_sim, redshift, norm_sim)
    spec.apply_foreground_absorption(norm_sim)

    events = make_background(area, exp_time, fov, (30.0, 45.0), spec, prng=prng)
    events.write_simput_file("bkgnd", overwrite=True)

    instrument_simulator("bkgnd_simput.fits", "bkgnd_evt.fits", 
                         exp_time, "sq_acisi_cy19", [30.0, 45.0],
                         overwrite=True, foreground=False, ptsrc_bkgnd=False,
                         instr_bkgnd=False,
                         prng=prng)

    write_spectrum("bkgnd_evt.fits", "background_evt.pi", overwrite=True)

    os.system("cp %s %s ." % (arf.filename, rmf.filename))

    load_user_model(mymodel, "wapec")
    add_user_pars("wapec", ["nH", "kT", "metallicity", "redshift", "norm"],
                  [0.01, 4.0, 0.2, redshift, norm_sim*0.8],
                  parmins=[0.0, 0.1, 0.0, -20.0, 0.0],
                  parmaxs=[10.0, 20.0, 10.0, 20.0, 1.0e9],
                  parfrozen=[False, False, False, True, False])

    load_pha("background_evt.pi")
    set_stat("cstat")
    set_method("simplex")
    ignore(":0.5, 8.0:")
    set_model("wapec")
    fit()
    res = get_fit_results()

    assert np.abs(res.parvals[0]-nH_sim)/nH_sim < 0.1
    assert np.abs(res.parvals[1]-kT_sim)/kT_sim < 0.05
    assert np.abs(res.parvals[2]-Z_sim) < 0.05
    assert np.abs(res.parvals[3]-norm_sim)/norm_sim < 0.05

    os.chdir(curdir)
    shutil.rmtree(tmpdir)
Beispiel #24
0
    def setUp(self):
        # hide warning messages from file I/O
        self._old_logger_level = logger.level
        logger.setLevel(logging.ERROR)

        self._id = 1
        fname = self.make_path('3c273.pi')
        ui.load_pha(self._id, fname)
        self._pha = ui.get_data(self._id)
Beispiel #25
0
    def setUp(self):
        # hide warning messages from file I/O
        self._old_logger_level = logger.level
        logger.setLevel(logging.ERROR)

        self._id = 1
        fname = self.make_path('3c273.pi')
        ui.load_pha(self._id, fname)
        self._pha = ui.get_data(self._id)
Beispiel #26
0
    def __init__(self, name, filename=None):
        self.name = name
        if filename is not None:
            sau.load_pha(name, filename)
            self.data = sau.get_data(name)
            self.arf = self.data.get_arf()
            self.rmf = self.data.get_rmf()

            # Read keywords from pha header
            try:
                self.threshold = self.data.header['ETH']
            except KeyError:
                print("WARNING: no threshold found using 200 GeV")
                self.threshold = 2e8  # default value 200 GeV
                self.emax = 1e11  # default value 100 TeV

            try:
                self.zenith = self.data.header['ZENITH']
            except KeyError:
                print("WARNING: no mean zenith angle found using 45 deg")
                self.zenith = 45.0  # default value 45 deg

            try:
                self.offset = self.data.header['OFFSET']
            except KeyError:
                print("WARNING: no offset angle found using 1.0 deg")
                self.offset = 1.0  # default value 1 deg

            try:
                self.n_tels = self.data.header['N_TELS']
            except KeyError:
                print("WARNING: no number of telescopes found using 0")
                self.n_tels = 0  # default value

            try:
                self.eff = self.data.header['EFFICIEN']
            except KeyError:
                print("WARNING: no efficiency found using 1.0")
                self.eff = 1.00  # default value

            try:
                self.tstart = self.data.header['TSTART']
            except KeyError:
                print("WARNING: no tstart found using 0")
                self.tstart = 0.  # default value

            try:
                self.tstop = self.data.header['TSTOP']
            except KeyError:
                print("WARNING: no tstop found using tsart+1800")
                self.tstop = self.tstart + 1800  # default value

        else:
            self.data = sau.get_data(name)
            self.arf = self.data.get_arf()
            self.rmf = self.data.get_rmf()
Beispiel #27
0
def test_load_pha2(make_data_path, caplog):
    """Basic test that a pha2 file can be read in."""

    basename = '3c120_pha2'

    orig_ids = ui.list_data_ids()
    assert orig_ids == []

    # The file is stored gzip-encoded
    infile = make_data_path(basename)
    ui.load_pha(infile)

    pha_ids = ui.list_data_ids()
    assert len(pha_ids) == 12

    # list_data_ids doesn't guarantee an order
    # Do an explicit check, rather than via a set (testing
    # all at once) to make it easier to see what is missing
    # (if any)
    #
    for i in range(1, 13):
        assert i in pha_ids

    for i in range(1, 13):
        d = ui.get_data(i)
        validate_pha(d, bkg=True)

        # There is no indication of what "part" this data set
        # represents in the file name
        #
        assert d.name == infile

        b = ui.get_bkg(i, bkg_id=1)
        validate_pha(b, bkg=False)
        assert b.name == infile

        b = ui.get_bkg(i, bkg_id=2)
        validate_pha(b, bkg=False)
        assert b.name == infile

    # Test Log messages
    msg_one = "systematic errors were not found in file '{}'".format(infile)
    msg_two = """statistical errors were found in file '{}' 
but not used; to use them, re-read with use_errors=True""".format(infile)
    msg_three = "read background_up into a dataset from file {}".format(infile)
    msg_four = "read background_down into a dataset from file {}".format(
        infile)
    msg_five = "Multiple data sets have been input: 1-12"

    assert caplog.record_tuples == [
        ('sherpa.astro.io', logging.WARNING, msg_one),
        ('sherpa.astro.io', logging.INFO, msg_two),
        ('sherpa.astro.io', logging.INFO, msg_three),
        ('sherpa.astro.io', logging.INFO, msg_four),
        ('sherpa.astro.ui.utils', logging.INFO, msg_five),
    ]
Beispiel #28
0
def test_point_source():

    tmpdir = tempfile.mkdtemp()
    curdir = os.getcwd()
    os.chdir(tmpdir)

    nH_sim = 0.02
    norm_sim = 1.0e-4
    alpha_sim = 0.95
    redshift = 0.02

    exp_time = (100., "ks")
    area = (3000., "cm**2")

    spec = Spectrum.from_powerlaw(alpha_sim, redshift, norm_sim, 
                                  emin=0.1, emax=11.5, nbins=2000)

    spec.apply_foreground_absorption(nH_sim, model="tbabs")

    positions = [(30.01, 45.0)]

    events = make_point_sources(area, exp_time, positions, (30.0, 45.0),
                                spec, prng=prng)

    events.write_simput_file("ptsrc", overwrite=True)

    instrument_simulator("ptsrc_simput.fits", "ptsrc_evt.fits",
                         exp_time, "sq_aciss_cy19", [30.0, 45.0],
                         overwrite=True, foreground=False, ptsrc_bkgnd=False,
                         instr_bkgnd=False,
                         prng=prng)

    write_spectrum("ptsrc_evt.fits", "point_source_evt.pi", overwrite=True)

    os.system("cp %s %s ." % (arf.filename, rmf.filename))

    load_user_model(mymodel, "tplaw")
    add_user_pars("tplaw", ["nH", "norm", "redshift", "alpha"],
                  [0.02, norm_sim*0.8, redshift, 0.9],
                  parmins=[0.0, 0.0, 0.0, 0.1],
                  parmaxs=[10.0, 1.0e9, 10.0, 10.0],
                  parfrozen=[True, False, True, False])

    load_pha("point_source_evt.pi")
    set_stat("cstat")
    set_method("simplex")
    ignore(":0.4, 9.0:")
    set_model("tplaw")
    fit()
    res = get_fit_results()

    assert np.abs(res.parvals[0]-norm_sim)/norm_sim < 0.05
    assert np.abs(res.parvals[1]-alpha_sim)/alpha_sim < 0.05

    os.chdir(curdir)
    shutil.rmtree(tmpdir)
Beispiel #29
0
def test_point_source():

    tmpdir = tempfile.mkdtemp()
    curdir = os.getcwd()
    os.chdir(tmpdir)

    nH_sim = 0.02
    norm_sim = 1.0e-4
    alpha_sim = 0.95
    redshift = 0.02

    exp_time = (100., "ks")
    area = (3000., "cm**2")

    spec = Spectrum.from_powerlaw(alpha_sim, redshift, norm_sim, 
                                  emin=0.1, emax=11.5, nbins=2000)

    spec.apply_foreground_absorption(nH_sim, model="tbabs")

    positions = [(30.01, 45.0)]

    events = make_point_sources(area, exp_time, positions, (30.0, 45.0),
                                spec, prng=prng)

    events.write_simput_file("ptsrc", overwrite=True)

    instrument_simulator("ptsrc_simput.fits", "ptsrc_evt.fits",
                         exp_time, "sq_aciss_cy20", [30.0, 45.0],
                         overwrite=True, foreground=False, ptsrc_bkgnd=False,
                         instr_bkgnd=False,
                         prng=prng)

    write_spectrum("ptsrc_evt.fits", "point_source_evt.pi", overwrite=True)

    os.system("cp %s %s ." % (arf.filename, rmf.filename))

    load_user_model(mymodel, "tplaw")
    add_user_pars("tplaw", ["nH", "norm", "redshift", "alpha"],
                  [0.02, norm_sim*0.8, redshift, 0.9],
                  parmins=[0.0, 0.0, 0.0, 0.1],
                  parmaxs=[10.0, 1.0e9, 10.0, 10.0],
                  parfrozen=[True, False, True, False])

    load_pha("point_source_evt.pi")
    set_stat("cstat")
    set_method("simplex")
    ignore(":0.4, 9.0:")
    set_model("tplaw")
    fit()
    res = get_fit_results()

    assert np.abs(res.parvals[0]-norm_sim)/norm_sim < 0.05
    assert np.abs(res.parvals[1]-alpha_sim)/alpha_sim < 0.05

    os.chdir(curdir)
    shutil.rmtree(tmpdir)
Beispiel #30
0
    def test_sherpa(self, tmpdir, extraction):
        """Same as above for files to be used with sherpa"""
        extraction.run(outdir=tmpdir, use_sherpa=True)

        import sherpa.astro.ui as sau
        sau.load_pha(str(tmpdir / 'ogip_data' / 'pha_obs23523.fits'))
        arf = sau.get_arf()
        actual = arf._arf._specresp
        desired = extraction.observations[0].aeff.data.data.value
        assert_allclose(actual, desired)
Beispiel #31
0
    def test_fits_io(self):
        """
        Test that basic FITS I/O functions work.

        This test ensures that the FITS backend can be used to perform basic
        I/O functions.
        """
        ui.load_pha(self.fits)
        with NamedTemporaryFile() as f:
            ui.save_pha(f.name, ascii=False, clobber=True)
Beispiel #32
0
def test_calc_flux_pha_invalid_model(func, make_data_path, clean_astro_ui):
    """Don't allow strings for model parameter"""

    infile = make_data_path('3c273.pi')
    ui.load_pha(infile)
    ui.set_source('powerlaw.pl')

    emsg = "'model' must be a model object"
    with pytest.raises(ArgumentTypeErr, match=emsg):
        func(0.5, 7, model='pl')
Beispiel #33
0
    def test_fits_io(self):
        """
        Test that basic FITS I/O functions work.

        This test ensures that the FITS backend can be used to perform basic
        I/O functions.
        """
        ui.load_pha(self.fits)
        with NamedTemporaryFile() as f:
            ui.save_pha(f.name, ascii=False, clobber=True)
Beispiel #34
0
    def test_sherpa(self, tmpdir, extraction):
        """Same as above for files to be used with sherpa"""
        extraction.run(outdir=tmpdir, use_sherpa=True)

        import sherpa.astro.ui as sau
        sau.load_pha(str(tmpdir / 'ogip_data' / 'pha_obs23523.fits'))
        arf = sau.get_arf()
        actual = arf._arf._specresp
        desired = extraction.observations[0].aeff.data.data.value
        assert_allclose(actual, desired)
Beispiel #35
0
 def test_get_stat_info(self):
     fname_3c273 = self.make_path("3c273.pi")
     ui.load_pha(fname_3c273)
     src = ui.xspowerlaw.pl
     ui.set_source(src)
     ui.guess('pl')
     ui.set_stat('wstat')
     stat_info = ui.get_stat_info()[0]
     assert stat_info.dof == 44
     assert stat_info.numpoints == 46
def test_more_ui_string_model_with_rmf(make_data_path):

    ui.load_pha("foo", make_data_path('pi2286.fits'))
    ui.load_rmf("foo", make_data_path('rmf2286.fits'))

    # Check that get_rmf(id)('modelexpression') works. If it
    # raises an error the test will fail.
    #
    m = ui.get_rmf("foo")("powlaw1d.pl1")
    assert isinstance(m, RMFModelPHA)
Beispiel #37
0
 def test_get_stat_info(self):
     fname_3c273 = self.make_path("3c273.pi")
     ui.load_pha(fname_3c273)
     src = ui.xspowerlaw.pl
     ui.set_source(src)
     ui.guess('pl')
     ui.set_stat('wstat')
     stat_info = ui.get_stat_info()[0]
     assert stat_info.dof == 44
     assert stat_info.numpoints == 46
Beispiel #38
0
def test_load_multi_arfsrmfs(make_data_path, clean_astro_ui):
    """Added in #728 to ensure cache parameter is sent along by
    MultiResponseSumModel (fix #717).

    This has since been simplified to switch from xsapec to
    powlaw1d as it drops the need for XSPEC and is a simpler
    model, so is less affected by changes in the model code.

    A fit of the Sherpa powerlaw-model to 3c273.pi with a
    single response in CIAO 4.11 (background subtracted,
    0.5-7 keV) returns gamma = 1.9298, ampl = 1.73862e-4
    so doubling the response should halve the amplitude but
    leave the gamma value the same when using two responses,
    as below. This is with chi2datavar.
    """

    pha_pi = make_data_path("3c273.pi")
    ui.load_pha(1, pha_pi)
    ui.load_pha(2, pha_pi)

    arf = make_data_path("3c273.arf")
    rmf = make_data_path("3c273.rmf")

    ui.load_multi_arfs(1, [arf, arf], [1, 2])
    ui.load_multi_arfs(2, [arf, arf], [1, 2])

    ui.load_multi_rmfs(1, [rmf, rmf], [1, 2])
    ui.load_multi_rmfs(2, [rmf, rmf], [1, 2])

    ui.notice(0.5, 7)
    ui.subtract(1)
    ui.subtract(2)

    src = ui.create_model_component('powlaw1d', 'src')
    ui.set_model(1, src)
    ui.set_model(2, src)

    # ensure the test is repeatable by running with a known
    # statistic and method
    #
    ui.set_method('levmar')
    ui.set_stat('chi2datavar')

    # Really what we care about for fixing #717 is that
    # fit does not error out, but it's useful to know that
    # the fit has changed the parameter values (which were
    # both 1 before the fit).
    #
    ui.fit()
    fr = ui.get_fit_results()
    assert fr.succeeded
    assert fr.datasets == (1, 2)

    assert src.gamma.val == pytest.approx(1.9298, rel=1.0e-4)
    assert src.ampl.val == pytest.approx(1.73862e-4 / 2, rel=1.0e-4)
Beispiel #39
0
def basic_pha1(make_data_path):
    """Create a basic PHA-1 data set/setup"""

    ui.set_default_id('tst')
    ui.load_pha(make_data_path('3c273.pi'))
    ui.subtract()
    ui.notice(0.5, 7)
    ui.set_source(ui.powlaw1d.pl)
    pl = ui.get_model_component('pl')
    pl.gamma = 1.93
    pl.ampl = 1.74e-4
Beispiel #40
0
    def testReadImplicit(self):
        """Exclude .gz from the file name"""

        idval = "13"
        fname = self.head + '_pha3.fits'
        ui.load_pha(idval, fname)

        self.validate_pha(idval)

        pha = ui.get_data(idval)
        bpha = ui.get_bkg(idval, bkg_id=1)
        self.assertEqual(pha.name, bpha.name)
Beispiel #41
0
    def test_sherpa(self, tmpdir, extraction):
        """Same as above for files to be used with sherpa"""
        import sherpa.astro.ui as sau

        extraction.run()
        extraction.write(outdir=tmpdir, use_sherpa=True, overwrite=True)
        sau.load_pha(str(tmpdir / "ogip_data" / "pha_obs23523.fits"))
        arf = sau.get_arf()

        actual = arf._arf._specresp
        desired = extraction.spectrum_observations[0].aeff.data.data.to_value("cm2")
        assert_allclose(actual, desired)
Beispiel #42
0
def test_bug_276(make_data_path):
    ui.load_pha(make_data_path('3c273.pi'))
    ui.set_model('polynom1d.p1')
    ui.fit()
    ui.covar()
    scal = ui.get_covar_results().parmaxes
    ui.sample_flux(ui.get_model_component('p1'),
                   0.5,
                   1,
                   num=5,
                   correlated=False,
                   scales=scal)
Beispiel #43
0
    def test_sherpa(self, tmpdir, extraction):
        """Same as above for files to be used with sherpa"""
        import sherpa.astro.ui as sau

        extraction.run()
        extraction.write(outdir=tmpdir, use_sherpa=True, overwrite=True)
        sau.load_pha(str(tmpdir / "ogip_data" / "pha_obs23523.fits"))
        arf = sau.get_arf()

        actual = arf._arf._specresp
        desired = extraction.spectrum_observations[0].aeff.data.data.value
        assert_allclose(actual, desired)
Beispiel #44
0
 def test_string_model_with_rmf(self):
     ui.load_pha("foo", self.pha)
     ui.load_rmf("foo", self.rmf)
     # Check that get_rmf(id)('modelexpression') works
     caught = False
     try:
         m = ui.get_rmf("foo")("powlaw1d.pl1")
     except:
         caught = True
     if caught:
         self.fail("Exception caught when it shouldn't")
     from sherpa.astro.instrument import RMFModelPHA
     self.assertTrue(isinstance(m, RMFModelPHA))
Beispiel #45
0
    def testReadExplicit(self):
        """Include .gz in the file name"""

        idval = 12
        fname = self.head + '_pha3.fits.gz'
        ui.load_pha(idval, fname)

        self.validate_pha(idval)

        # TODO: does this indicate that the file name, as read in,
        #       should have the .gz added to it to match the data
        #       read in, or left as is?
        pha = ui.get_data(idval)
        bpha = ui.get_bkg(idval, bkg_id=1)
        self.assertEqual(pha.name, bpha.name + '.gz')
Beispiel #46
0
def test_sherpa_fit(tmpdir):
    # this is to make sure that the written PHA files work with sherpa
    pha1 = gammapy_extra.filename("datasets/hess-crab4_pha/pha_obs23592.fits")

    import sherpa.astro.ui as sau
    from sherpa.models import PowLaw1D
    sau.load_pha(pha1)
    sau.set_stat('wstat')
    model = PowLaw1D('powlaw1d.default')
    model.ref = 1e9
    model.ampl = 1
    model.gamma = 2
    sau.set_model(model * 1e-20)
    sau.fit()
    assert_allclose(model.pars[0].val, 2.0281484215403616, atol=1e-4)
    assert_allclose(model.pars[2].val, 2.3528406790143097, atol=1e-4)
Beispiel #47
0
    def test_sherpa_fit(self, tmpdir):
        # this is to make sure that the written PHA files work with sherpa
        import sherpa.astro.ui as sau
        from sherpa.models import PowLaw1D

        self.obs_list.write(tmpdir, use_sherpa=True)
        filename = tmpdir / 'pha_obs23523.fits'
        sau.load_pha(str(filename))
        sau.set_stat('wstat')
        model = PowLaw1D('powlaw1d.default')
        model.ref = 1e9
        model.ampl = 1
        model.gamma = 2
        sau.set_model(model * 1e-20)
        sau.fit()
        assert_allclose(model.pars[0].val, 2.0881699260935838)
        assert_allclose(model.pars[2].val, 1.6234222129479836)
def test_can_use_pspc_data(make_data_path):
    """A basic check that we can read in and use the ROSAT PSPC data.

    Unlike the previous tests, that directly access the io module,
    this uses the ui interface.
    """

    # The PSPC PHA file does not have the ANCRFILE/RESPFILE keywords
    # set up, so the responses has to be manually added.
    #
    ui.load_pha(make_data_path(PHAFILE), use_errors=True)
    assert ui.get_analysis() == 'channel'

    ui.load_rmf(make_data_path(RMFFILE))
    assert ui.get_analysis() == 'energy'

    ui.set_source(ui.powlaw1d.pl)
    ui.set_par('pl.gamma', 1.7)
    ui.set_par('pl.ampl', 2e-6)

    s = ui.get_stat_info()[0]
    assert s.numpoints == 63
    assert s.dof == 61

    # Value obtained from XSPEC 12.9.1p; Sherpa returns
    # sexpected = 973.2270845920297
    sexpected = 973.23
    assert_allclose(s.statval, sexpected, rtol=0, atol=0.005)

    # apply an energy filter to remove the "bogus" points
    ui.ignore(None, 0.05)

    s = ui.get_stat_info()[0]
    assert s.numpoints == 62
    assert s.dof == 60
    assert_allclose(s.statval, sexpected, rtol=0, atol=0.005)

    ui.ignore(2.01, None)

    s = ui.get_stat_info()[0]
    assert s.numpoints == 7
    assert s.dof == 5

    assert_allclose(s.statval, sexpected, rtol=0, atol=0.005)
Beispiel #49
0
    def load_pha(self, id, arg=None, use_errors=False):
        if arg is None:
            id, arg = arg, id

        if id is not None:
            if self is DATASTACK:
                ui.load_pha(id, arg, use_errors)
                return
            else:
                raise AttributeError("When called from a datastack instance, an ID cannot be provided to a load function ("+id+")")

        # File Stacks. If the file argument is a stack file, expand the file and call this function for each file
        #   in the stack.
        try:
            files = stk.build(arg)
            for file in files:
                self._load_func(ui.load_pha, file, use_errors)
        except:
            self._load_func(ui.load_pha, arg, use_errors)
Beispiel #50
0
    def setUp(self):

        self._old_logger_level = logger.getEffectiveLevel()
        logger.setLevel(logging.ERROR)

        ui.set_stat('wstat')

        infile1 = self.make_path('3c273.pi')
        infile2 = self.make_path('9774.pi')
        ui.load_pha(1, infile1)
        ui.load_pha(2, infile2)

        # Since 9774.pi isn't grouped, group it. Note that this
        # call groups the background to 20 counts per bin. In this
        # case we do not want that; instead we want to use the same
        # grouping scheme as the source file.
        #
        # Note: this is related to issue 227
        #
        ui.group_counts(2, 20)
        ui.set_grouping(2, bkg_id=1, val=ui.get_grouping(2))

        # There's no need to have the same model in both datasets,
        # but assume the same source model can be used, with a
        # normalization difference.
        #
        ui.set_source(1, ui.powlaw1d.pl1)
        ui.set_source(2, ui.const1d.c2 * ui.get_source(1))

        # The powerlaw slope and normalization are
        # intended to be "a reasonable approximation"
        # to the data, just to make sure that any statistic
        # calculation doesn't blow-up too much.
        #
        # Note: the model values for 3c273 are slighly different
        #       to the single-PHA-file case, so stat results are
        #       slightly different
        #
        ui.set_par("pl1.gamma", 1.7)
        ui.set_par("pl1.ampl", 1.6e-4)
        ui.set_par("c2.c0", 45)
Beispiel #51
0
    def load_pha(self, specfile, annulus):
        """
        Load a pha file and add to the datasets for stacked analysis.

        :param specfile: extracted source PHA/PI spectrum file
        :param annulus: annulus for spectrum file
        """
        dataid = len(self.datasets)
        print 'Loading spectrum file %s as dataset id %d' % (specfile, dataid)
        SherpaUI.load_pha(dataid, specfile)

        try:
            obsid = int(pycrates.read_file(specfile).get_key_value('OBS_ID'))
        except (TypeError, ValueError):
            obsid = 0
        dataset = dict(file=specfile,
                       obsid=obsid,
                       id=dataid,
                       annulus=annulus
                       )
        self.datasets.append(dataset)
        self.obsids.add(obsid)
Beispiel #52
0
    def test_sherpa_fit(self, tmpdir):
        # this is to make sure that the written PHA files work with sherpa
        import sherpa.astro.ui as sau
        from sherpa.models import PowLaw1D

        # TODO: this works a little bit, but some info and warnings
        # from Sherpa remain. Not sure what to do, OK as-is for now.
        import logging

        logging.getLogger("sherpa").setLevel("ERROR")

        self.obs_list.write(tmpdir, use_sherpa=True)
        filename = tmpdir / "pha_obs23523.fits"
        sau.load_pha(str(filename))
        sau.set_stat("wstat")
        model = PowLaw1D("powlaw1d.default")
        model.ref = 1e9
        model.ampl = 1
        model.gamma = 2
        sau.set_model(model * 1e-20)
        sau.fit()
        assert_allclose(model.pars[0].val, 2.732, rtol=1e-3)
        assert_allclose(model.pars[2].val, 4.647, rtol=1e-3)
Beispiel #53
0
    def setUp(self):

        self._old_logger_level = logger.getEffectiveLevel()
        logger.setLevel(logging.ERROR)

        ui.set_stat('wstat')

        infile = self.make_path('9774.pi')
        ui.load_pha(1, infile)

        ui.group_counts(1, 20)

        # Unlike the test_wstat_two_scalar case, the grouping
        # is not copied over.
        # ui.set_grouping(1, bkg_id=1, val=ui.get_grouping(1))

        ui.set_source(1, ui.const1d.c1 * ui.powlaw1d.pl1)

        # These should be the same as test_wstat_two_scalar
        #
        ui.set_par("pl1.gamma", 1.7)
        ui.set_par("pl1.ampl", 1.6e-4)
        ui.set_par("c1.c0", 45)
Beispiel #54
0
    def setUp(self):

        # defensive programming (one of the tests has been seen to fail
        # when the whole test suite is run without this)
        ui.clean()

        self._old_logger_level = logger.getEffectiveLevel()
        logger.setLevel(logging.ERROR)

        ui.set_stat('wstat')

        infile = self.make_path('3c273.pi')
        ui.load_pha(1, infile)

        ui.set_source(1, ui.powlaw1d.pl)

        # The powerlaw slope and normalization are
        # intended to be "a reasonable approximation"
        # to the data, just to make sure that any statistic
        # calculation doesn't blow-up too much.
        #
        ui.set_par("pl.gamma", 1.782)
        ui.set_par("pl.ampl", 1.622e-4)
Beispiel #55
0
def test_vapec_beta_model():

    bms = BetaModelSource()

    tmpdir = tempfile.mkdtemp()
    curdir = os.getcwd()
    os.chdir(tmpdir)

    prng = 45

    ds = bms.ds

    A = 30000.
    exp_time = 1.0e4
    redshift = 0.05
    nH_sim = 0.02

    sphere = ds.sphere("c", (0.5, "Mpc"))

    kT_sim = bms.kT
    Z_sim = bms.Z
    O_sim = bms.O
    Ca_sim = bms.Ca

    var_elem = {"O": ("stream", "oxygen"),
                "Ca": ("stream", "calcium")}

    thermal_model = ThermalSourceModel("apec", 0.1, 11.5, 20000,
                                       var_elem=var_elem,
                                       Zmet=("gas","metallicity"), 
                                       prng=prng)

    photons = PhotonList.from_data_source(sphere, redshift, A, exp_time,
                                          thermal_model)

    D_A = photons.parameters["fid_d_a"]

    norm_sim = sphere.quantities.total_quantity("emission_measure")
    norm_sim *= 1.0e-14/(4*np.pi*D_A*D_A*(1.+redshift)*(1.+redshift))
    norm_sim = float(norm_sim.in_cgs())

    events = photons.project_photons("z", [30.0, 45.0], absorb_model="tbabs",
                                     nH=nH_sim, prng=prng, no_shifting=True)

    new_events = Lynx_Calorimeter(events, prng=prng)

    os.system("cp %s %s ." % (arf.filename, rmf.filename))

    new_events.write_channel_spectrum("var_abund_beta_model_evt.pha", overwrite=True)

    load_user_model(mymodel_var, "tbapec")
    add_user_pars("tbapec", ["nH", "kT", "abund", "redshift", "norm", "O", "Ca"],
                  [nH_sim, 4.0, Z_sim, redshift, norm_sim*0.8, 0.3, 0.5],
                  parmins=[0.0, 0.1, 0.0, -20.0, 0.0, 0.0, 0.0],
                  parmaxs=[10.0, 20.0, 10.0, 20.0, 1.0e9, 10.0, 10.0],
                  parfrozen=[True, False, True, True, False, False, False])

    load_pha("var_abund_beta_model_evt.pha")
    set_stat("cstat")
    set_method("levmar")
    ignore(":0.6, 8.0:")
    set_model("tbapec")
    fit()
    res = get_fit_results()

    assert np.abs(res.parvals[0]-kT_sim)/kT_sim < 0.05
    assert np.abs(res.parvals[1]-norm_sim)/norm_sim < 0.05
    assert np.abs(res.parvals[2]-O_sim)/O_sim < 0.05
    assert np.abs(res.parvals[3]-Ca_sim)/Ca_sim < 0.15

    os.chdir(curdir)
    shutil.rmtree(tmpdir)
Beispiel #56
0
 def test_bug38(self):
     ui.load_pha("3c273", self.pha3c273)
     ui.notice_id("3c273", 0.3, 2)
     ui.group_counts("3c273", 30)
     ui.group_counts("3c273", 15)
Beispiel #57
0
 def test_bug38(self):
     ui.load_pha('3c273', self.pha3c273)
     ui.notice_id('3c273', 0.3, 2)
     ui.group_counts('3c273', 30)
     ui.group_counts('3c273', 15)
Beispiel #58
0
 def test_wstat_calc_stat_info(self):
     ui.load_pha("stat", self.make_path("3c273.pi"))
     ui.set_source("stat", ui.powlaw1d.p1)
     ui.set_stat("wstat")
     ui.fit("stat")
     ui.get_stat_info()