Exemplo n.º 1
0
def xenonnt_online(output_folder='./strax_data',
                   we_are_the_daq=False,
                   **kwargs):
    """XENONnT online processing and analysis"""
    context_options = {**straxen.contexts.common_opts, **kwargs}

    st = strax.Context(storage=[
        straxen.RunDB(readonly=not we_are_the_daq,
                      runid_field='number',
                      new_data_path=output_folder),
    ],
                       config=straxen.contexts.xnt_common_config,
                       **context_options)
    st.register([straxen.DAQReader, straxen.LEDCalibration])

    if not we_are_the_daq:
        st.storage += [
            strax.DataDirectory('/dali/lgrandi/xenonnt/raw',
                                readonly=True,
                                take_only=straxen.DAQReader.provides),
            strax.DataDirectory('/dali/lgrandi/xenonnt/processed',
                                readonly=True)
        ]
        if output_folder:
            st.storage.append(strax.DataDirectory(output_folder))

        st.context_config['forbid_creation_of'] = ('raw_records', 'records')

    return st
Exemplo n.º 2
0
def test_fuzzy_matching():
    with tempfile.TemporaryDirectory() as temp_dir:
        st = strax.Context(storage=strax.DataDirectory(temp_dir),
                           register=[Records, Peaks])

        st.make(run_id=run_id, targets='peaks')

        # Changing option causes data not to match
        st.set_config(dict(some_option=1))
        assert not st.is_stored(run_id, 'peaks')
        assert st.list_available('peaks') == []

        # In fuzzy context, data does match
        st2 = st.new_context(fuzzy_for=('peaks', ))
        assert st2.is_stored(run_id, 'peaks')
        assert st2.list_available('peaks') == [run_id]

        # And we can actually load it
        st2.get_meta(run_id, 'peaks')
        st2.get_array(run_id, 'peaks')

        # Fuzzy for options also works
        st3 = st.new_context(fuzzy_for_options=('some_option', ))
        assert st3.is_stored(run_id, 'peaks')

    # No saving occurs at all while fuzzy matching
    with tempfile.TemporaryDirectory() as temp_dir:
        st = strax.Context(storage=strax.DataDirectory(temp_dir),
                           register=[Records, Peaks],
                           fuzzy_for=('records', ))
        st.make(run_id, 'peaks')
        assert not st.is_stored(run_id, 'peaks')
        assert not st.is_stored(run_id, 'records')
Exemplo n.º 3
0
def demo():
    """Return strax context used in the straxen demo notebook"""
    straxen.download_test_data()

    st = strax.Context(storage=[
        strax.DataDirectory('./strax_data'),
        strax.DataDirectory('./strax_test_data',
                            deep_scan=True,
                            provide_run_metadata=True,
                            readonly=True)
    ],
                       forbid_creation_of=straxen.daqreader.DAQReader.provides,
                       config=dict(**x1t_common_config),
                       **x1t_context_config)

    # Use configs that are always available
    st.set_config(
        dict(
            hev_gain_model=('1T_to_pe_placeholder', False),
            gain_model=('1T_to_pe_placeholder', False),
            elife_conf=('elife_constant', 1e6),
            electron_drift_velocity=("electron_drift_velocity_constant",
                                     1.3325e-4),
        ))
    return st
Exemplo n.º 4
0
def test_storage_converter():
    with tempfile.TemporaryDirectory() as temp_dir:
        st = strax.Context(storage=strax.DataDirectory(temp_dir),
                           register=[Records, Peaks])
        st.make(run_id=run_id, targets='peaks')

        with tempfile.TemporaryDirectory() as temp_dir_2:
            st = strax.Context(storage=[
                strax.DataDirectory(temp_dir, readonly=True),
                strax.DataDirectory(temp_dir_2)
            ],
                               register=[Records, Peaks],
                               storage_converter=True)
            store_1, store_2 = st.storage

            # Data is now in store 1, but not store 2
            key = st._key_for(run_id, 'peaks')
            store_1.find(key)
            with pytest.raises(strax.DataNotAvailable):
                store_2.find(key)

            st.make(run_id, 'peaks')

            # Data is now in both stores
            store_1.find(key)
            store_2.find(key)
Exemplo n.º 5
0
def demo():
    """Return strax context used in the straxen demo notebook"""
    straxen.download_test_data()

    st = strax.Context(storage=[
        strax.DataDirectory('./strax_data'),
        strax.DataDirectory('./strax_test_data',
                            deep_scan=True,
                            provide_run_metadata=True,
                            readonly=True)
    ],
                       forbid_creation_of=straxen.daqreader.DAQReader.provides,
                       config=dict(**x1t_common_config),
                       **x1t_context_config)

    # Use configs that are always available
    st.set_config(
        dict(
            hev_gain_model=(
                'to_pe_per_run', straxen.aux_repo +
                '3548132b55f81a43654dba5141366041e1daaf01/strax_files/to_pe.npy'
            ),
            gain_model=(
                'to_pe_per_run', straxen.aux_repo +
                '3548132b55f81a43654dba5141366041e1daaf01/strax_files/to_pe.npy'
            ),
        ))
    st.register(straxen.RecordsFromPax)
    return st
Exemplo n.º 6
0
def demo():
    """Return strax context used in the straxen demo notebook"""
    straxen.download_test_data()

    st = strax.Context(storage=[
        strax.DataDirectory('./strax_data'),
        strax.DataDirectory('./strax_test_data',
                            deep_scan=True,
                            provide_run_metadata=True,
                            readonly=True)
    ],
                       forbid_creation_of=straxen.daqreader.DAQReader.provides,
                       config=dict(**x1t_common_config),
                       **x1t_context_config)

    # Use configs that are always available
    st.set_config(
        dict(
            hev_gain_model=('1T_to_pe_placeholder', False),
            gain_model=('1T_to_pe_placeholder', False),
            elife=1e6,
            electron_drift_velocity=("electron_drift_velocity_constant",
                                     1.3325e-4),
            se_gain=28.2,
            avg_se_gain=28.2,
            rel_extraction_eff=1.0,
            s1_xyz_map=
            f'itp_map://resource://{pax_file("XENON1T_s1_xyz_lce_true_kr83m_SR1_pax-680_fdc-3d_v0.json")}?fmt=json',
            s2_xy_map=
            f'itp_map://resource://{pax_file("XENON1T_s2_xy_ly_SR1_v2.2.json")}?fmt=json',
        ))
    return st
Exemplo n.º 7
0
def demo():
    """Return strax context used in the straxen demo notebook"""
    return strax.Context(
            storage=[strax.DataDirectory('./strax_data'),
                     strax.DataDirectory('./strax_test_data')],
            register=straxen.plugins.pax_interface.RecordsFromPax,
            forbid_creation_of=('raw_records',),
            **common_opts)
Exemplo n.º 8
0
def demo():
    """Return strax context used in the nEXO_strax demo notebook"""
    sx.download_test_data()
    return strax.Context(storage=[
        strax.DataDirectory('./strax_data'),
        strax.DataDirectory('./strax_test_data')
    ],
                         register=sx.RecordsFromPax,
                         forbid_creation_of=('raw_records', ),
                         **common_opts)
Exemplo n.º 9
0
def fake_daq():
    """Context for processing fake DAQ data in the current directory"""
    return strax.Context(
        storage=[
            strax.DataDirectory('./strax_data', provide_run_metadata=False),
            # Fake DAQ puts run doc JSON in same folder:
            strax.DataDirectory('./from_fake_daq', readonly=True)
        ],
        config=dict(input_dir='./from_fake_daq'),
        **common_opts)
Exemplo n.º 10
0
def demo():
    """Return strax context used in the straxen demo notebook"""
    return straxen.XENONContext(
        storage=[
            strax.DataDirectory('./strax_data'),
            strax.DataDirectory(straxen.straxen_dir + '/data', readonly=True)
        ],
        register=straxen.plugins.pax_interface.RecordsFromPax,
        register_all=straxen.plugins.plugins,
        forbid_creation_of=('raw_records', ),
    )
Exemplo n.º 11
0
def merge(
        runid_str,  # run number padded with 0s
        dtype,  # data type 'level' e.g. records, peaklets
        st,  # strax context
        path  # path where the data is stored
):

    # get the storage path, since will need to reset later
    _storage_paths = [storage.path for storage in st.storage]

    # initialize plugin needed for processing
    plugin = st._get_plugins((dtype, ), runid_str)[dtype]
    st._set_plugin_config(plugin, runid_str, tolerant=False)
    plugin.setup()

    for keystring in plugin.provides:
        key = strax.DataKey(runid_str, keystring, plugin.lineage)
        saver = st.storage[0].saver(key, plugin.metadata(runid_str, keystring))
        # monkey patch the saver
        tmpname = os.path.split(saver.tempdirname)[1]
        dirname = os.path.split(saver.dirname)[1]
        saver.tempdirname = os.path.join(path, tmpname)
        saver.dirname = os.path.join(path, dirname)
        saver.is_forked = True
        # merge the jsons
        saver.close()

    # change the storage frontend to use the merged data
    st.storage[0] = strax.DataDirectory(path)

    # rechunk the data if we can
    for keystring in plugin.provides:
        rechunk = True
        if isinstance(plugin.rechunk_on_save, immutabledict):
            if not plugin.rechunk_on_save[keystring]:
                rechunk = False
        else:
            if not plugin.rechunk_on_save:
                rechunk = False

        if rechunk:
            print(f"Rechunking {keystring}")
            st.copy_to_frontend(runid_str, keystring, 1, rechunk=True)
        else:
            print(
                f"Not rechunking {keystring}. Just copy to the staging directory."
            )
            key = st.key_for(runid_str, keystring)
            src = os.path.join(st.storage[0].path, str(key))
            dest = os.path.join(st.storage[1].path, str(key))
            shutil.copytree(src, dest)

    # reset in case we need to merge more data
    st.storage = [strax.DataDirectory(path) for path in _storage_paths]
Exemplo n.º 12
0
def xenonnt_online(output_folder='./strax_data',
                   we_are_the_daq=False,
                   _minimum_run_number=7157,
                   _database_init=True,
                   **kwargs):
    """XENONnT online processing and analysis"""
    context_options = {**straxen.contexts.common_opts, **kwargs}

    st = strax.Context(config=straxen.contexts.xnt_common_config,
                       **context_options)
    st.register_all(have_nT_plugins)
    st.register([straxen.DAQReader, straxen.LEDCalibration])

    st.storage = [
        straxen.RunDB(readonly=not we_are_the_daq,
                      minimum_run_number=_minimum_run_number,
                      runid_field='number',
                      new_data_path=output_folder,
                      rucio_path='/dali/lgrandi/rucio/')
    ] if _database_init else []
    if not we_are_the_daq:
        st.storage += [
            strax.DataDirectory('/dali/lgrandi/xenonnt/raw',
                                readonly=True,
                                take_only=straxen.DAQReader.provides),
            strax.DataDirectory('/dali/lgrandi/xenonnt/processed',
                                readonly=True)
        ]
        if output_folder:
            st.storage.append(strax.DataDirectory(output_folder))

        st.context_config[
            'forbid_creation_of'] = straxen.daqreader.DAQReader.provides + (
                'records', )
    # Only the online monitor backend for the DAQ
    elif _database_init:
        st.storage += [
            straxen.OnlineMonitor(readonly=not we_are_the_daq,
                                  take_only=(
                                      'veto_intervals',
                                      'online_peak_monitor',
                                      'online_veto_monitor',
                                  ))
        ]

    # Remap the data if it is before channel swap (because of wrongly cabled
    # signal cable connectors) These are runs older than run 8797. Runs
    # newer than 8796 are not affected. See:
    # https://github.com/XENONnT/straxen/pull/166 and
    # https://xe1t-wiki.lngs.infn.it/doku.php?id=xenon:xenonnt:dsg:daq:sector_swap
    st.set_context_config(
        {'apply_data_function': (straxen.common.remap_old, )})
    return st
Exemplo n.º 13
0
def demo():
    """Return strax context used in the straxen demo notebook"""
    straxen.download_test_data()
    return strax.Context(storage=[
        strax.DataDirectory('./strax_data'),
        strax.DataDirectory('./strax_test_data',
                            deep_scan=True,
                            provide_run_metadata=True,
                            readonly=True)
    ],
                         register=straxen.RecordsFromPax,
                         forbid_creation_of=('raw_records', ),
                         config=dict(**x1t_common_config),
                         **x1t_context_config)
Exemplo n.º 14
0
def MC_test():
    return strax.Context(storage=[
        strax.DataDirectory('./strax_data'),
    ],
                         # register=
                         # register_all = [sx.nEXO_MC_reader,]
                         )
Exemplo n.º 15
0
def test_random_access():
    """Test basic random access
    TODO: test random access when time info is not provided directly
    """
    with tempfile.TemporaryDirectory() as temp_dir:
        # Hack to enable testing if only required chunks are loaded
        Peaks.rechunk_on_save = False

        st = strax.Context(storage=strax.DataDirectory(temp_dir),
                           register=[Records, Peaks])

        with pytest.raises(strax.DataNotAvailable):
            # Time range selection requires data already available
            st.get_df(run_id, 'peaks', time_range=(3, 5))

        st.make(run_id=run_id, targets='peaks')

        # Second part of hack: corrupt data by removing one chunk
        dirname = str(st._key_for(run_id, 'peaks'))
        os.remove(
            os.path.join(temp_dir, dirname,
                         strax.dirname_to_prefix(dirname) + '-000000'))

        with pytest.raises(FileNotFoundError):
            st.get_array(run_id, 'peaks')

        df = st.get_array(run_id, 'peaks', time_range=(3, 5))
        assert len(df) == 2 * recs_per_chunk
        assert df['time'].min() == 3
        assert df['time'].max() == 4
Exemplo n.º 16
0
def _run_plugins(st, make_all=False, run_id=run_id, **proces_kwargs):
    """
    Try all plugins (except the DAQReader) for a given context (st) to see if
    we can really push some (empty) data from it and don't have any nasty
    problems like that we are referring to some non existant dali folder.
    """

    with tempfile.TemporaryDirectory() as temp_dir:
        st.storage = [strax.DataDirectory(temp_dir)]

        # As we use a temporary directory we should have a clean start
        assert not st.is_stored(run_id, 'raw_records'), 'have RR???'

        # Create event info
        target = 'event_info'
        st.make(run_id=run_id, targets=target, **proces_kwargs)

        # The stuff should be there
        assert st.is_stored(run_id, target), f'Could not make {target}'

        if not make_all:
            return

        end_targets = set(st._get_end_targets(st._plugin_class_registry))
        for p in end_targets - set(forbidden_plugins):
            st.make(run_id, p)
        # Now make sure we can get some data for all plugins
        all_datatypes = set(st._plugin_class_registry.keys())
        for p in all_datatypes - set(forbidden_plugins):
            should_be_stored = (st._plugin_class_registry[p].save_when ==
                                strax.SaveWhen.ALWAYS)
            if should_be_stored:
                is_stored = st.is_stored(run_id, p)
                assert is_stored, f"{p} did not save correctly!"
    print("Wonderful all plugins work (= at least they don't fail), bye bye")
Exemplo n.º 17
0
def test_run_selection():
    mock_rundb = [
        dict(name='0', mode='funny', tags=[dict(name='bad')]),
        dict(name='1',
             mode='nice',
             tags=[dict(name='interesting'),
                   dict(name='bad')]),
        dict(name='2', mode='nice', tags=[dict(name='interesting')])
    ]

    with tempfile.TemporaryDirectory() as temp_dir:
        sf = strax.DataDirectory(path=temp_dir)

        # Write mock runs db
        for d in mock_rundb:
            sf.write_run_metadata(d['name'], d)

        st = strax.Context(storage=sf)
        assert len(st.scan_runs()) == len(mock_rundb)
        assert st.run_metadata('0') == mock_rundb[0]

        assert len(st.select_runs(run_mode='nice') == 1)
        assert len(st.select_runs(include_tags='interesting') == 2)
        assert len(
            st.select_runs(include_tags='interesting', exclude_tags='bad') ==
            1)
        assert len(
            st.select_runs(include_tags='interesting', run_mode='nice') == 1)
Exemplo n.º 18
0
def _run_plugins(st, make_all=False, run_id=test_run_id_1T, **process_kwargs):
    """
    Try all plugins (except the DAQReader) for a given context (st) to see if
    we can really push some (empty) data from it and don't have any nasty
    problems like that we are referring to some non existant dali folder.
    """

    with tempfile.TemporaryDirectory() as temp_dir:
        st.storage = [strax.DataDirectory(temp_dir)]
        # As we use a temporary directory we should have a clean start
        assert not st.is_stored(run_id, 'raw_records'), 'have RR???'

        if not make_all:
            return

        end_targets = set(st._get_end_targets(st._plugin_class_registry))
        if st.context_config['allow_multiprocess']:
            st.make(run_id,
                    list(end_targets),
                    allow_multiple=True,
                    **process_kwargs)
        else:
            for data_type in end_targets:
                st.make(run_id, data_type)
        # Now make sure we can get some data for all plugins
        all_datatypes = set(st._plugin_class_registry.keys())
        for data_type in all_datatypes:
            savewhen = st._plugin_class_registry[data_type].save_when
            if isinstance(savewhen, (dict, immutabledict)):
                savewhen = savewhen[data_type]
            should_be_stored = savewhen == strax.SaveWhen.ALWAYS
            if should_be_stored:
                is_stored = st.is_stored(run_id, data_type)
                assert is_stored, f"{data_type} did not save correctly!"
    print("Wonderful all plugins work (= at least they don't fail), bye bye")
Exemplo n.º 19
0
def test_copy_to_frontend():
    """
    Write some data, add a new storage frontend and make sure that our
    copy to that frontend is successful
    """
    # We need two directories for the test
    with tempfile.TemporaryDirectory() as temp_dir:
        with tempfile.TemporaryDirectory() as temp_dir_2:
            context = _get_context(temp_dir)
            # Make some data
            context.get_array(run_id, 'records')
            assert context.is_stored(run_id, 'records')

            # Add the second frontend
            context.storage += [strax.DataDirectory(temp_dir_2)]
            context.copy_to_frontend(run_id,
                                     'records',
                                     target_compressor='lz4')

            # Make sure both frontends have the same data.
            assert os.listdir(temp_dir) == os.listdir(temp_dir)
            rec_folder = os.listdir(temp_dir)[0]
            assert (os.listdir(os.path.join(temp_dir,
                                            rec_folder)) == os.listdir(
                                                os.path.join(
                                                    temp_dir_2, rec_folder)))
Exemplo n.º 20
0
def _get_context(temp_dir=tempfile.gettempdir()) -> strax.Context:
    """Get a context for the tests below"""
    context = strax.Context(
        storage=strax.DataDirectory(temp_dir, deep_scan=True),
        register=[Records, Peaks],
        use_per_run_defaults=True,
    )
    return context
Exemplo n.º 21
0
def fake_daq():
    """Context for processing fake DAQ data in the current directory"""
    st = strax.Context(
        storage=[strax.DataDirectory('./strax_data'),
                 # Fake DAQ puts run doc JSON in same folder:
                 strax.DataDirectory('./from_fake_daq',
                                     provide_run_metadata=True,
                                     readonly=True)],
        config=dict(daq_input_dir='./from_fake_daq',
                    daq_chunk_duration=int(2e9),
                    daq_compressor='lz4',
                    n_readout_threads=8,
                    daq_overlap_chunk_duration=int(2e8),
                    **x1t_common_config),
        **x1t_context_config)
    st.register(straxen.Fake1TDAQReader)
    return st
Exemplo n.º 22
0
def strax_workshop_dali():
    return strax.Context(
        storage=[
            strax.DataDirectory('/dali/lgrandi/aalbers/strax_data_raw',
                                take_only='raw_records',
                                deep_scan=False,
                                readonly=True),
            strax.DataDirectory('/dali/lgrandi/aalbers/strax_data',
                                readonly=True,
                                provide_run_metadata=False),
            strax.DataDirectory('./strax_data', provide_run_metadata=False)
        ],
        register=sx.plugins.pax_interface.RecordsFromPax,
        # When asking for runs that don't exist, throw an error rather than
        # starting the pax converter
        forbid_creation_of=('raw_records', ),
        **common_opts)
Exemplo n.º 23
0
 def setUp(self):
     self.test_run_id = '0'
     self.target = 'records'
     self.path = os.path.join(tempfile.gettempdir(), 'strax_data')
     self.st = strax.Context(use_per_run_defaults=True,
                             register=[Records],
                             storage=[strax.DataDirectory(self.path)])
     assert not self.st.is_stored(self.test_run_id, self.target)
Exemplo n.º 24
0
def xenon1t_analysis(local_only=False):
    """Return strax context used for XENON1T re-analysis with
    the latest strax version
    """
    return strax.Context(
        storage=[
            sx.RunDB(local_only=local_only),
            strax.DataDirectory('/dali/lgrandi/aalbers/strax_data_raw',
                                take_only='raw_records',
                                deep_scan=False,
                                readonly=True),
            strax.DataDirectory('./strax_data'),
        ],
        register=sx.RecordsFromPax,
        # When asking for runs that don't exist, throw an error rather than
        # starting the pax converter
        forbid_creation_of=('raw_records', ),
        **common_opts)
Exemplo n.º 25
0
def xenon1t_simulation(output_folder='./strax_data'):
    import wfsim
    st = strax.Context(storage=strax.DataDirectory(output_folder),
                       config=dict(fax_config='fax_config_1t.json',
                                   detector='XENON1T',
                                   **x1t_common_config),
                       **x1t_context_config)
    st.register(wfsim.RawRecordsFromFax1T)
    return st
Exemplo n.º 26
0
def test_byte_strings_as_run_id():
    with tempfile.TemporaryDirectory() as temp_dir:
        st = strax.Context(storage=strax.DataDirectory(temp_dir,
                                                       deep_scan=True),
                           register=[Records])

        records_bytes = st.get_array(b'0', 'records')
        records = st.get_array('0', 'records')
        assert np.all(records_bytes == records)
Exemplo n.º 27
0
def test_nt_minianalyses():
    """Number of tests to be run on nT like configs"""
    if not straxen.utilix_is_configured():
        return
    with tempfile.TemporaryDirectory() as temp_dir:
        try:
            print("Temporary directory is ", temp_dir)
            os.chdir(temp_dir)
            from .test_plugins import DummyRawRecords, testing_config_nT, test_run_id_nT
            st = straxen.contexts.xenonnt_online()
            rundb = st.storage[0]
            rundb.readonly = True
            st.storage = [rundb, strax.DataDirectory(temp_dir)]

            # We want to test the FDC map that only works with CMT
            test_conf = testing_config_nT.copy()
            del test_conf['fdc_map']

            st.set_config(test_conf)
            st.set_context_config(dict(forbid_creation_of=()))
            st.register(DummyRawRecords)

            rr = st.get_array(test_run_id_nT, 'raw_records')
            st.make(test_run_id_nT, 'records')
            st.make(test_run_id_nT, 'peak_basics')

            st.daq_plot(
                test_run_id_nT,
                time_range=(rr['time'][0], strax.endtime(rr)[-1]),
                vmin=0.1,
                vmax=1,
            )

            st.plot_records_matrix(
                test_run_id_nT,
                time_range=(rr['time'][0], strax.endtime(rr)[-1]),
                vmin=0.1,
                vmax=1,
                group_by='ADC ID',
            )
            plt_clf()

            st.make(test_run_id_nT, 'event_info')
            st.load_corrected_positions(
                test_run_id_nT,
                time_range=(rr['time'][0], strax.endtime(rr)[-1]),
            )
            # This would be nice to add but with empty events it does not work
            # st.event_display(test_run_id_nT,
            #                  time_range=(rr['time'][0],
            #                              strax.endtime(rr)[-1]),
            #                  )
        # On windows, you cannot delete the current process'git p
        # working directory, so we have to chdir out first.
        finally:
            os.chdir('..')
Exemplo n.º 28
0
def xenon1t_simulation(output_folder='./strax_data'):
    import wfsim
    st = strax.Context(storage=strax.DataDirectory(output_folder),
                       config=dict(fax_config='fax_config_1t.json',
                                   detector='XENON1T',
                                   check_raw_record_overlaps=False,
                                   **straxen.contexts.x1t_common_config),
                       **straxen.contexts.common_opts)
    st.register(wfsim.RawRecordsFromFax1T)
    return st
Exemplo n.º 29
0
def xenon1t_dali(output_folder='./strax_data', build_lowlevel=False):
    return strax.Context(
        storage=[
            strax.DataDirectory('/dali/lgrandi/xenon1t/strax_converted/raw',
                                take_only='raw_records',
                                provide_run_metadata=True,
                                readonly=True),
            strax.DataDirectory(
                '/dali/lgrandi/xenon1t/strax_converted/processed',
                readonly=True),
            strax.DataDirectory(output_folder)
        ],
        register=straxen.RecordsFromPax,
        config=dict(**x1t_common_config),
        # When asking for runs that don't exist, throw an error rather than
        # starting the pax converter
        forbid_creation_of=(('raw_records', ) if build_lowlevel else
                            ('raw_records', 'records', 'peaklets')),
        **x1t_context_config)
Exemplo n.º 30
0
    def get_plugin(config):
        with tempfile.TemporaryDirectory() as temp_dir:
            context = strax.Context(
                storage=strax.DataDirectory(temp_dir, deep_scan=True),
                config=config,
                register=[DummyPlugin],
                use_per_run_defaults=True,
            )

            return context.get_single_plugin('321', 'dummy_data')