Ejemplo n.º 1
0
def test_csv_clusters_reader(tmp_path, fatras, conf_const, trk_geo, rng):
    s = Sequencer(numThreads=1, events=10)  # we're not going to use this one
    evGen, simAlg, _ = fatras(s)
    s = Sequencer(numThreads=1, events=10)
    s.addReader(evGen)
    s.addAlgorithm(simAlg)
    digiAlg = PlanarSteppingAlgorithm(
        level=acts.logging.WARNING,
        inputSimHits=simAlg.config.outputSimHits,
        outputClusters="clusters",
        outputSourceLinks="sourcelinks",
        outputDigiSourceLinks="digiSourceLink",
        outputMeasurements="measurements",
        outputMeasurementParticlesMap="meas_ptcl_map",
        outputMeasurementSimHitsMap="meas_sh_map",
        trackingGeometry=trk_geo,
        randomNumbers=rng,
        planarModuleStepper=PlanarModuleStepper(),
    )
    s.addAlgorithm(digiAlg)

    out = tmp_path / "csv"
    out.mkdir()

    s.addWriter(
        CsvPlanarClusterWriter(
            level=acts.logging.WARNING,
            outputDir=str(out),
            inputSimHits=simAlg.config.outputSimHits,
            inputClusters=digiAlg.config.outputClusters,
            trackingGeometry=trk_geo,
        ))

    s.run()

    s = Sequencer(numThreads=1)

    s.addReader(
        conf_const(
            CsvPlanarClusterReader,
            level=acts.logging.WARNING,
            outputClusters="clusters",
            inputDir=str(out),
            outputHitIds="hits",
            outputMeasurementParticlesMap="meas_ptcl_map",
            outputSimHits="simhits",
            trackingGeometry=trk_geo,
        ))

    algs = [
        AssertCollectionExistsAlg(k, f"check_alg_{k}", acts.logging.WARNING)
        for k in ("clusters", "simhits", "meas_ptcl_map")
    ]
    for alg in algs:
        s.addAlgorithm(alg)

    s.run()

    for alg in algs:
        assert alg.events_seen == 10
Ejemplo n.º 2
0
def test_csv_simhits_reader(tmp_path, fatras, conf_const):
    s = Sequencer(numThreads=1, events=10)
    evGen, simAlg, digiAlg = fatras(s)

    out = tmp_path / "csv"
    out.mkdir()

    s.addWriter(
        CsvSimHitWriter(
            level=acts.logging.INFO,
            inputSimHits=simAlg.config.outputSimHits,
            outputDir=str(out),
            outputStem="hits",
        ))

    s.run()

    s = Sequencer(numThreads=1)

    s.addReader(
        conf_const(
            CsvSimHitReader,
            level=acts.logging.INFO,
            inputDir=str(out),
            inputStem="hits",
            outputSimHits="simhits",
        ))

    alg = AssertCollectionExistsAlg("simhits", "check_alg",
                                    acts.logging.WARNING)
    s.addAlgorithm(alg)

    s.run()

    assert alg.events_seen == 10
Ejemplo n.º 3
0
def test_root_material_track_reader(material_recording):

    # recreate sequencer

    s = Sequencer(numThreads=1)

    s.addReader(
        RootMaterialTrackReader(
            level=acts.logging.INFO,
            fileList=[str(material_recording / "geant4_material_tracks.root")],
        ))

    alg = AssertCollectionExistsAlg("material-tracks", "check_alg",
                                    acts.logging.WARNING)
    s.addAlgorithm(alg)

    s.run()

    assert alg.events_seen == 2
Ejemplo n.º 4
0
def test_event_recording(tmp_path):

    script = (
        Path(__file__).parent.parent.parent.parent
        / "Examples"
        / "Scripts"
        / "Python"
        / "event_recording.py"
    )
    assert script.exists()

    env = os.environ.copy()
    env["NEVENTS"] = "1"
    subprocess.check_call([str(script)], cwd=tmp_path, env=env)

    from acts.examples.hepmc3 import HepMC3AsciiReader

    out_path = tmp_path / "hepmc3"
    # out_path.mkdir()

    assert len([f for f in out_path.iterdir() if f.name.endswith("events.hepmc3")]) > 0
    assert all([f.stat().st_size > 100 for f in out_path.iterdir()])

    s = Sequencer(numThreads=1)

    s.addReader(
        HepMC3AsciiReader(
            level=acts.logging.INFO,
            inputDir=str(out_path),
            inputStem="events",
            outputEvents="hepmc-events",
        )
    )

    alg = AssertCollectionExistsAlg(
        "hepmc-events", name="check_alg", level=acts.logging.INFO
    )
    s.addAlgorithm(alg)

    s.run()

    assert alg.events_seen == 1
Ejemplo n.º 5
0
def test_csv_meas_reader(tmp_path, fatras, trk_geo, conf_const):
    s = Sequencer(numThreads=1, events=10)
    evGen, simAlg, digiAlg = fatras(s)

    out = tmp_path / "csv"
    out.mkdir()

    config = CsvMeasurementWriter.Config(
        inputMeasurements=digiAlg.config.outputMeasurements,
        inputClusters=digiAlg.config.outputClusters,
        inputSimHits=simAlg.config.outputSimHits,
        inputMeasurementSimHitsMap=digiAlg.config.outputMeasurementSimHitsMap,
        outputDir=str(out),
    )
    s.addWriter(CsvMeasurementWriter(level=acts.logging.INFO, config=config))
    s.run()

    # read back in
    s = Sequencer(numThreads=1)

    s.addReader(
        conf_const(
            CsvMeasurementReader,
            level=acts.logging.WARNING,
            outputMeasurements="measurements",
            outputMeasurementSimHitsMap="simhitsmap",
            outputSourceLinks="sourcelinks",
            inputDir=str(out),
        ))

    algs = [
        AssertCollectionExistsAlg(k, f"check_alg_{k}", acts.logging.WARNING)
        for k in ("measurements", "simhitsmap", "sourcelinks")
    ]
    for alg in algs:
        s.addAlgorithm(alg)

    s.run()

    for alg in algs:
        assert alg.events_seen == 10
Ejemplo n.º 6
0
def test_vertex_fitting(tmp_path):
    detector, trackingGeometry, decorators = getOpenDataDetector()

    field = acts.ConstantBField(acts.Vector3(0, 0, 2 * u.T))

    from vertex_fitting import runVertexFitting, VertexFinder

    s = Sequencer(events=100)

    runVertexFitting(
        field,
        vertexFinder=VertexFinder.Truth,
        outputDir=tmp_path,
        s=s,
    )

    alg = AssertCollectionExistsAlg(["fittedVertices"], name="check_alg")
    s.addAlgorithm(alg)

    s.run()
    assert alg.events_seen == s.config.events
Ejemplo n.º 7
0
def test_csv_particle_reader(tmp_path, conf_const, ptcl_gun):
    s = Sequencer(numThreads=1, events=10, logLevel=acts.logging.WARNING)
    evGen = ptcl_gun(s)

    out = tmp_path / "csv"

    out.mkdir()

    s.addWriter(
        conf_const(
            CsvParticleWriter,
            acts.logging.WARNING,
            inputParticles=evGen.config.outputParticles,
            outputStem="particle",
            outputDir=str(out),
        ))

    s.run()

    # reset the seeder
    s = Sequencer(numThreads=1, logLevel=acts.logging.WARNING)

    s.addReader(
        conf_const(
            CsvParticleReader,
            acts.logging.WARNING,
            inputDir=str(out),
            inputStem="particle",
            outputParticles="input_particles",
        ))

    alg = AssertCollectionExistsAlg("input_particles", "check_alg",
                                    acts.logging.WARNING)

    s.addAlgorithm(alg)

    s.run()

    assert alg.events_seen == 10
Ejemplo n.º 8
0
def test_root_particle_reader(tmp_path, conf_const, ptcl_gun):
    # need to write out some particles first
    s = Sequencer(numThreads=1, events=10, logLevel=acts.logging.WARNING)
    evGen = ptcl_gun(s)

    file = tmp_path / "particles.root"
    s.addWriter(
        conf_const(
            RootParticleWriter,
            acts.logging.WARNING,
            inputParticles=evGen.config.outputParticles,
            filePath=str(file),
        ))

    s.run()

    del s  # to properly close the root file

    # reset sequencer for reading

    s2 = Sequencer(numThreads=1, logLevel=acts.logging.WARNING)

    s2.addReader(
        conf_const(
            RootParticleReader,
            acts.logging.WARNING,
            particleCollection="input_particles",
            filePath=str(file),
        ))

    alg = AssertCollectionExistsAlg("input_particles", "check_alg",
                                    acts.logging.WARNING)
    s2.addAlgorithm(alg)

    s2.run()

    assert alg.events_seen == 10
Ejemplo n.º 9
0
def test_hepmc3_histogram(hepmc_data, tmp_path):

    from acts.examples.hepmc3 import (
        HepMC3AsciiReader,
        HepMCProcessExtractor,
    )

    s = Sequencer(numThreads=1)

    s.addReader(
        HepMC3AsciiReader(
            level=acts.logging.INFO,
            inputDir=str(hepmc_data.parent),
            inputStem="events",
            outputEvents="hepmc-events",
        ))

    s.addAlgorithm(
        HepMCProcessExtractor(
            level=acts.logging.INFO,
            inputEvents="hepmc-events",
            extractionProcess="Inelastic",
        ))

    # This segfaults, see https://github.com/acts-project/acts/issues/914
    # s.addWriter(
    #     RootNuclearInteractionParametersWriter(
    #         level=acts.logging.INFO, inputSimulationProcesses="event-fraction"
    #     )
    # )

    alg = AssertCollectionExistsAlg("hepmc-events",
                                    name="check_alg",
                                    level=acts.logging.INFO)
    s.addAlgorithm(alg)

    s.run()
Ejemplo n.º 10
0
def test_vertex_fitting_reading(
    tmp_path, ptcl_gun, rng, finder, inputTracks, entries, assert_root_hash
):

    ptcl_file = tmp_path / "particles.root"

    detector, trackingGeometry, decorators = GenericDetector.create()
    field = acts.ConstantBField(acts.Vector3(0, 0, 2 * u.T))

    from vertex_fitting import runVertexFitting, VertexFinder

    inputTrackSummary = None
    if inputTracks:
        from truth_tracking import runTruthTracking

        s2 = Sequencer(numThreads=1, events=100)
        runTruthTracking(
            trackingGeometry,
            field,
            digiConfigFile=Path(
                Path(__file__).parent.parent.parent.parent
                / "Examples/Algorithms/Digitization/share/default-smearing-config-generic.json"
            ),
            outputDir=tmp_path,
            s=s2,
        )
        s2.run()
        del s2
        inputTrackSummary = tmp_path / "tracksummary_fitter.root"
        assert inputTrackSummary.exists()
        assert ptcl_file.exists()
    else:
        s0 = Sequencer(events=100, numThreads=1)
        evGen = ptcl_gun(s0)
        s0.addWriter(
            RootParticleWriter(
                level=acts.logging.INFO,
                inputParticles=evGen.config.outputParticles,
                filePath=str(ptcl_file),
            )
        )
        s0.run()
        del s0

        assert ptcl_file.exists()

    finder = VertexFinder[finder]

    s3 = Sequencer(numThreads=1)

    runVertexFitting(
        field,
        inputParticlePath=ptcl_file,
        inputTrackSummary=inputTrackSummary,
        outputDir=tmp_path,
        vertexFinder=finder,
        s=s3,
    )

    alg = AssertCollectionExistsAlg(["fittedVertices"], name="check_alg")
    s3.addAlgorithm(alg)

    s3.run()

    vertexing_file = tmp_path / "performance_vertexing.root"
    assert vertexing_file.exists()

    assert_entries(vertexing_file, "vertexing", entries)
    assert_root_hash(vertexing_file.name, vertexing_file)