コード例 #1
0
def test_query_case_7(ds_setup, ds_datadir):

    datadir = ds_datadir
    stkname = '@' + '/'.join((datadir, 'pha.lis'))
    datastack.load_pha(stkname)

    f = datastack.query_by_header_keyword('INSTRUME', 'ACIS')

    assert f == [1, 2]

    f = datastack.query_by_obsid(7867)

    assert f == [2]

    ds = datastack.DataStack()

    ds.load_pha(stkname)

    f = ds.query_by_obsid('4938')

    assert f == [3]

    f = datastack.query_by_obsid(ds, '7867')

    assert f == [4]
コード例 #2
0
def test_show_stack(ds_setup, ds_datadir, capsys):
    """Test the show_stack and html representation for a stack
    """

    # These files use MJD_OBS in the header
    ls = '@' + '/'.join((ds_datadir, 'pha.lis'))
    datastack.load_pha(ls)
    # clear out the current output
    captured = capsys.readouterr()

    datastack.show_stack()
    captured = capsys.readouterr()
    lines = captured.out.split('\n')
    assert len(lines) == 4
    assert 'id|name|OBS_ID|MJD_OBS' in lines[0]
    assert f'1|{ds_datadir}/acisf04938_000N002_r0043_pha3.fits|4938|53493.55' in lines[1]
    assert f'2|{ds_datadir}/acisf07867_000N001_r0002_pha3.fits|7867|54374.00' in lines[2]

    # Now, check the html representation
    # We do not want to hard-code the exact html here to allow
    # minor formatting changes without breaking this test since this
    # test is not about format, but about content.
    # So, we just test that a few token that we expect are present and other
    # are not.
    html = datastack.DATASTACK._repr_html_()
    for token in ['datastack with 2 datasets',
                  'acisf04938_000N002_r0043_pha3.fits',
                  'acisf07867_000N001_r0002_pha3.fits',
                  'MJD_OBS', 'OBS_ID']:
        assert token in html
    for token in ['MJD-OBS', 'GRATING', 'INSTRUME']:
        assert token not in html
コード例 #3
0
def test_operations_datastack_group(ds_setup, ds_datadir):
    '''We are testing one of several grouping schemes here.'''
    datadir = ds_datadir
    datastack.load_pha("myid", '/'.join((datadir, "3c273.pi")))
    d1 = datastack.get_data('myid')
    datastack.group_counts('myid', 5)
    assert np.allclose(d1.get_dep(filter=True)[15:20], [5., 5., 6., 7., 10.])
    datastack.ungroup('myid')
    assert np.all(d1.get_dep(filter=True)[15:20] == [3., 7., 1., 6., 4.])
コード例 #4
0
def test_show_stack(ds_setup, ds_datadir, capsys):
    """Test the show_stack handling: MJD_OBS
    """

    # These files use MJD_OBS in the header
    ls = '@' + '/'.join((ds_datadir, 'pha.lis'))
    datastack.load_pha(ls)

    validate_show_stack(capsys, ds_datadir, 'MJD_OBS', 'MJD_OBS')
コード例 #5
0
def test_operations_datastack_subtract(ds_setup, ds_datadir):

    datadir = ds_datadir
    datastack.load_pha("myid", '/'.join((datadir, "3c273.pi")))
    d1 = datastack.get_data('myid')
    assert np.all(d1.get_dep()[15:20] == [3., 7., 1., 6., 4.])
    datastack.subtract('myid')
    assert np.allclose(d1.get_dep()[15:20], [2.86507936, 6.86507936, 1.,
                                             6., 4.])
    datastack.unsubtract('myid')
    assert np.all(d1.get_dep()[15:20] == [3., 7., 1., 6., 4.])
コード例 #6
0
def test_pha_case_6(ds_setup, ds_datadir):

    datadir = ds_datadir
    ls = '@' + '/'.join((datadir, 'pha.lis'))
    rmf1 = '/'.join((datadir, "acisf04938_000N002_r0043_rmf3.fits"))
    rmf2 = '/'.join((datadir, "acisf07867_000N001_r0002_rmf3.fits"))
    arf1 = '/'.join((datadir, "acisf04938_000N002_r0043_arf3.fits"))
    arf2 = '/'.join((datadir, "acisf07867_000N001_r0002_arf3.fits"))
    datastack.load_pha(ls)

    datastack.load_bkg_rmf([], rmf1)
    datastack.load_bkg_rmf([], rmf2)

    datastack.load_bkg_arf([], arf1)
    datastack.load_bkg_arf([], arf2)

    # Define background models
    bkg_arfs = datastack.get_bkg_arf([])
    bkg_scales = datastack.get_bkg_scale([])
    bkg_models = [
        ui.const1d.c1 * acis_bkg_model('acis7s'),
        ui.const1d.c2 * acis_bkg_model('acis7s')
    ]
    bkg_rsps = datastack.get_response([], bkg_id=1)
    for i in range(2):
        id_ = i + 1
        # Make the ARF spectral response flat.  This is required for using
        # the acis_bkg_model.
        bkg_arfs[i].specresp = bkg_arfs[i].specresp * 0 + 1.
        datastack.set_bkg_full_model(id_, bkg_rsps[i](bkg_models[i]))

    # Fit background
    datastack.notice(0.5, 8.)
    datastack.set_method("neldermead")
    datastack.set_stat("cash")

    datastack.thaw(c1.c0)
    datastack.thaw(c2.c0)
    datastack.fit_bkg()
    datastack.freeze(c1.c0)
    datastack.freeze(c2.c0)

    # Define source models
    rsps = datastack.get_response([])
    src_model = ui.powlaw1d.pow1
    src_models = [src_model, src_model * ui.const1d.ratio_12]
    for i in range(2):
        id_ = i + 1
        datastack.set_full_model(id_,
                                 (rsps[i](src_models[i]) +
                                  bkg_scales[i] * bkg_rsps[i](bkg_models[i])))

    datastack.fit()
コード例 #7
0
    def test_case_6(self):
        datadir = '/'.join((self._this_dir, 'data'))
        ls = '@'+'/'.join((datadir, 'pha.lis'))
        rmf1 = '/'.join((datadir, "acisf04938_000N002_r0043_rmf3.fits"))
        rmf2 = '/'.join((datadir, "acisf07867_000N001_r0002_rmf3.fits"))
        arf1 = '/'.join((datadir, "acisf04938_000N002_r0043_arf3.fits"))
        arf2 = '/'.join((datadir, "acisf07867_000N001_r0002_arf3.fits"))
        datastack.load_pha(ls)

        datastack.load_bkg_rmf([], rmf1)
        datastack.load_bkg_rmf([], rmf2)

        datastack.load_bkg_arf([], arf1)
        datastack.load_bkg_arf([], arf2)

        # Define background models
        bkg_arfs = datastack.get_bkg_arf([])
        bkg_scales = datastack.get_bkg_scale([])
        bkg_models = [ui.const1d.c1 * acis_bkg_model('acis7s'),
                      ui.const1d.c2 * acis_bkg_model('acis7s')]
        bkg_rsps = datastack.get_response([], bkg_id=1)
        for i in range(2):
            id_ = i + 1
            # Make the ARF spectral response flat.  This is required for using
            # the acis_bkg_model.
            bkg_arfs[i].specresp = bkg_arfs[i].specresp * 0 + 1.
            datastack.set_bkg_full_model(id_, bkg_rsps[i](bkg_models[i]))

        # Fit background
        datastack.notice(0.5, 8.)
        datastack.set_method("neldermead")
        datastack.set_stat("cash")

        datastack.thaw(c1.c0)
        datastack.thaw(c2.c0)
        datastack.fit_bkg()
        datastack.freeze(c1.c0)
        datastack.freeze(c2.c0)

        # Define source models
        rsps = datastack.get_response([])
        src_model = ui.powlaw1d.pow1
        src_models = [src_model,
                      src_model * ui.const1d.ratio_12]
        for i in range(2):
            id_ = i + 1
            datastack.set_full_model(id_, (rsps[i](src_models[i]) +
                                           bkg_scales[i] *
                                           bkg_rsps[i](bkg_models[i])))

        datastack.fit()
コード例 #8
0
def test_show_stack4(ds_setup, ds_datadir, capsys):
    """Test the show_stack handling: No MJD_OBS or MJD-OBS keyword
    """

    ls = '@' + '/'.join((ds_datadir, 'pha.lis'))
    datastack.load_pha(ls)

    # Remove the MJD-OBS keyword.
    #
    for idval in [1, 2]:
        d = datastack.get_data(idval)
        del d.header['MJD_OBS']

    validate_show_stack(capsys, ds_datadir, None, None)
コード例 #9
0
def test_show_stack3(ds_setup, ds_datadir, capsys):
    """Test the show_stack handling: mixed MJD_OBS and MJD-OBS
    """

    # These files use MJD_OBS in the header
    ls = '@' + '/'.join((ds_datadir, 'pha.lis'))
    datastack.load_pha(ls)

    # Change to MJD-OBS (second file only)
    #
    for idval in [2]:
        d = datastack.get_data(idval)
        mjdobs = d.header['MJD_OBS']
        d.header['MJD-OBS'] = mjdobs
        del d.header['MJD_OBS']

    validate_show_stack(capsys, ds_datadir, 'MJD_OBS', 'MJD-OBS')
コード例 #10
0
def test_show_stack2(ds_setup, ds_datadir, capsys):
    """Test the show_stack handling: MJD-OBS

    This is test_show_stack but with the data files adjusted
    to have MJD-OBS rather than MJD_OBS keywords
    """

    # These files use MJD_OBS in the header
    ls = '@' + '/'.join((ds_datadir, 'pha.lis'))
    datastack.load_pha(ls)

    # Change to MJD-OBS
    #
    for idval in [1, 2]:
        d = datastack.get_data(idval)
        mjdobs = d.header['MJD_OBS']
        d.header['MJD-OBS'] = mjdobs
        del d.header['MJD_OBS']

    validate_show_stack(capsys, ds_datadir, 'MJD-OBS', 'MJD-OBS')
コード例 #11
0
ファイル: test_datastack.py プロジェクト: vorugantia/sherpa
    def test_case_7(self):
        datastack.load_pha('@' + '/'.join((self._this_dir, 'data', 'pha.lis')))

        f = datastack.query_by_header_keyword('INSTRUME', 'ACIS')

        assert f == [1, 2]

        f = datastack.query_by_obsid(7867)

        assert f == [2]

        ds = datastack.DataStack()

        ds.load_pha('@' + '/'.join((self._this_dir, 'data', 'pha.lis')))

        f = ds.query_by_obsid('4938')

        assert f == [3]

        f = datastack.query_by_obsid(ds, '7867')

        assert f == [4]
コード例 #12
0
    def test_case_7(self):
        datastack.load_pha('@'+'/'.join((self._this_dir, 'data', 'pha.lis')))

        f = datastack.query_by_header_keyword('INSTRUME', 'ACIS')

        assert f == [1,2]

        f = datastack.query_by_obsid('7867')

        assert f == [2]

        ds = datastack.DataStack()

        ds.load_pha('@'+'/'.join((self._this_dir, 'data', 'pha.lis')))

        f = ds.query_by_obsid('4938')

        assert f == [3]

        f = datastack.query_by_obsid(ds, '7867')

        assert f == [4]
コード例 #13
0
def test_query_missing_keyword(ds_setup, ds_datadir):
    """What happens when the keyword does not exist?

    This only checks a case where the keyword is missing in
    both files.
    """

    datadir = ds_datadir
    stkname = '@' + '/'.join((datadir, 'pha.lis'))

    # Note: since there is a conversion between float to string,
    # there's a possibility this check may fail on some systems
    #
    key1 = 'EXPOSUR2'
    val1 = '50441.752296469'
    key2 = 'EXPOSUR7'
    val2 = '21860.439777374'

    datastack.load_pha(stkname)
    f = datastack.query_by_header_keyword('MISSKEY', 'ACIS')
    assert f == []

    f = datastack.query_by_header_keyword(key1, val1)
    assert f == [1]

    f = datastack.query_by_header_keyword(key2, val2)
    assert f == [2]

    ds = datastack.DataStack()
    ds.load_pha(stkname)
    f = ds.query_by_header_keyword('MISSKEY', 'ACIS')
    assert f == []

    f = ds.query_by_header_keyword(key1, val1)
    assert f == [3]

    f = ds.query_by_header_keyword(key2, val2)
    assert f == [4]
コード例 #14
0
ファイル: test_datastack.py プロジェクト: vorugantia/sherpa
    def test_case_1(self):
        datadir = '/'.join((self._this_dir, 'data'))
        ls = '@' + '/'.join((datadir, '3c273.lis'))
        datastack.load_pha(ls, use_errors=True)

        assert 2 == len(datastack.DATASTACK.datasets)
        assert 2 == len(ui._session._data)

        datastack.load_pha("myid", '/'.join((datadir, "3c273.pi")))

        assert 2 == len(datastack.DATASTACK.datasets)
        assert 3 == len(ui._session._data)

        datastack.load_pha('/'.join((datadir, "3c273.pi")))

        assert 3 == len(datastack.DATASTACK.datasets)
        assert 4 == len(ui._session._data)

        datastack.load_pha([], '/'.join((datadir, "3c273.pi")))

        assert 4 == len(datastack.DATASTACK.datasets)
        assert 5 == len(ui._session._data)

        ds = datastack.DataStack()

        datastack.load_pha(ds, ls)

        assert 4 == len(datastack.DATASTACK.datasets)
        assert 7 == len(ui._session._data)
        assert 2 == len(ds.datasets)

        datastack.load_pha(ds, '/'.join((datadir, "3c273.pi")))

        assert 4 == len(datastack.DATASTACK.datasets)
        assert 8 == len(ui._session._data)
        assert 3 == len(ds.datasets)

        dids = datastack.DATASTACK.get_stack_ids()
        assert dids == [1, 2, 3, 4]

        sids = set(ui._session._data.keys())
        assert sids == {1, 2, 3, 4, 5, 6, 7, "myid"}

        datastack.set_source([1, 2], "powlaw1d.pID")
        datastack.set_source([3, 4], "brokenpowerlaw.bpID")

        dsids = ds.get_stack_ids()
        assert dsids == [5, 6, 7]

        p1 = ui._session._model_components['p1']
        p2 = ui._session._model_components['p2']
        bp3 = ui._session._model_components['bp3']
        bp4 = ui._session._model_components['bp4']

        assert p1 is not None
        assert p2 is not None
        assert bp3 is not None
        assert bp4 is not None

        datastack.set_source(1, "polynom1d.poly1")
        datastack.set_source([2, 3, 4], "atten.attID")

        poly1 = ui._session._model_components['poly1']
        a2 = ui._session._model_components['att2']
        a3 = ui._session._model_components['att3']
        a4 = ui._session._model_components['att4']

        assert poly1 is not None
        assert a2 is not None
        assert a3 is not None
        assert a4 is not None

        datastack.clean()

        assert 0 == len(datastack.DATASTACK.datasets)
        assert 0 == len(ui._session._data)
        assert 3 == len(ds.datasets)
コード例 #15
0
    def test_case_1(self):
        datadir = '/'.join((self._this_dir, 'data'))
        ls = '@'+'/'.join((datadir, '3c273.lis'))
        datastack.load_pha(ls, use_errors=True)

        assert 2 == len(datastack.DATASTACK.datasets)
        assert 2 == len(ui._session._data)

        datastack.load_pha("myid", '/'.join((datadir, "3c273.pi")))

        assert 2 == len(datastack.DATASTACK.datasets)
        assert 3 == len(ui._session._data)

        datastack.load_pha('/'.join((datadir, "3c273.pi")))

        assert 3 == len(datastack.DATASTACK.datasets)
        assert 4 == len(ui._session._data)

        datastack.load_pha([], '/'.join((datadir, "3c273.pi")))

        assert 4 == len(datastack.DATASTACK.datasets)
        assert 5 == len(ui._session._data)

        ds = datastack.DataStack()

        datastack.load_pha(ds, ls)

        assert 4 == len(datastack.DATASTACK.datasets)
        assert 7 == len(ui._session._data)
        assert 2 == len(ds.datasets)

        datastack.load_pha(ds, '/'.join((datadir, "3c273.pi")))

        assert 4 == len(datastack.DATASTACK.datasets)
        assert 8 == len(ui._session._data)
        assert 3 == len(ds.datasets)

        dids = datastack.DATASTACK.get_stack_ids()
        assert dids == [1,2,3,4]

        sids = ui._session._data.keys()
        assert sids == [1,2,3,4,5,6,7, "myid"]

        datastack.set_source([1,2], "powlaw1d.pID")
        datastack.set_source([3,4], "brokenpowerlaw.bpID")

        dsids = ds.get_stack_ids()
        assert dsids == [5,6,7]

        p1 = ui._session._model_components['p1']
        p2 = ui._session._model_components['p2']
        bp3 = ui._session._model_components['bp3']
        bp4 = ui._session._model_components['bp4']

        assert p1 is not None
        assert p2 is not None
        assert bp3 is not None
        assert bp4 is not None

        datastack.set_source(1, "polynom1d.poly1")
        datastack.set_source([2,3,4], "atten.attID")

        poly1 = ui._session._model_components['poly1']
        a2 = ui._session._model_components['att2']
        a3 = ui._session._model_components['att3']
        a4 = ui._session._model_components['att4']

        assert poly1 is not None
        assert a2 is not None
        assert a3 is not None
        assert a4 is not None

        datastack.clean()

        assert 0 == len(datastack.DATASTACK.datasets)
        assert 0 == len(ui._session._data)
        assert 3 == len(ds.datasets)