def test_digitization_example(trk_geo, tmp_path, assert_root_hash): from digitization import configureDigitization s = Sequencer(events=10, numThreads=-1) csv_dir = tmp_path / "csv" root_file = tmp_path / "measurements.root" assert not root_file.exists() assert not csv_dir.exists() field = acts.ConstantBField(acts.Vector3(0, 0, 2 * u.T)) configureDigitization(trk_geo, field, outputDir=tmp_path, s=s) s.run() assert root_file.exists() assert csv_dir.exists() assert len(list(csv_dir.iterdir())) == 3 * s.config.events assert all(f.stat().st_size > 50 for f in csv_dir.iterdir()) for tn, nev in ( (8, 407), (9, 0), (12, 11), (13, 375), (14, 2), (16, 25), (17, 146), (18, 9), ): assert_entries(root_file, f"vol{tn}", nev) assert_root_hash(root_file.name, root_file)
def test_truth_tracking(tmp_path, assert_root_hash, revFiltMomThresh): from truth_tracking import runTruthTracking detector, trackingGeometry, _ = getOpenDataDetector() field = acts.ConstantBField(acts.Vector3(0, 0, 2 * u.T)) seq = Sequencer(events=10, numThreads=1) root_files = [ ("trackstates_fitter.root", "trackstates", 19), ("tracksummary_fitter.root", "tracksummary", 10), ("performance_track_finder.root", "track_finder_tracks", 19), ("performance_track_fitter.root", None, -1), ] for fn, _, _ in root_files: fp = tmp_path / fn assert not fp.exists() runTruthTracking( trackingGeometry, field, digiConfigFile=Path( "thirdparty/OpenDataDetector/config/odd-digi-smearing-config.json", ), outputDir=tmp_path, reverseFilteringMomThreshold=revFiltMomThresh, s=seq, ) seq.run() del seq for fn, tn, ee in root_files: fp = tmp_path / fn assert fp.exists() assert fp.stat().st_size > 1024 if tn is not None: assert_entries(fp, tn, ee) assert_root_hash(fn, fp)
def test_digitization_example_input(trk_geo, tmp_path, assert_root_hash): from particle_gun import runParticleGun from digitization import configureDigitization ptcl_dir = tmp_path / "ptcl" ptcl_dir.mkdir() pgs = Sequencer(events=20, numThreads=-1) runParticleGun(str(ptcl_dir), s=pgs) pgs.run() s = Sequencer(numThreads=-1) csv_dir = tmp_path / "csv" root_file = tmp_path / "measurements.root" assert not root_file.exists() assert not csv_dir.exists() assert_root_hash( "particles.root", ptcl_dir / "particles.root", ) field = acts.ConstantBField(acts.Vector3(0, 0, 2 * u.T)) configureDigitization( trk_geo, field, outputDir=tmp_path, particlesInput=ptcl_dir / "particles.root", s=s, ) s.run() assert root_file.exists() assert csv_dir.exists() assert len(list(csv_dir.iterdir())) == 3 * pgs.config.events assert all(f.stat().st_size > 50 for f in csv_dir.iterdir()) for tn, nev in ( (7, 0), (8, 193), (9, 0), (12, 1), (13, 183), (14, 6), (16, 3), (17, 76), (18, 10), ): assert_entries(root_file, f"vol{tn}", nev) assert_root_hash(root_file.name, root_file)
def runParticleGun(outputDir, s=None): s = s or Sequencer(events=10, numThreads=-1) # Preliminaries rnd = RandomNumbers(seed=228) # Input vtxGen = GaussianVertexGenerator() vtxGen.stddev = Vector4(0, 0, 0, 0) ptclGen = ParametricParticleGenerator(p=(1 * u.GeV, 10 * u.GeV), eta=(-4, 4), numParticles=2) g = EventGenerator.Generator() g.multiplicity = FixedMultiplicityGenerator(n=1) g.vertex = vtxGen g.particles = ptclGen evGen = EventGenerator( level=acts.logging.INFO, generators=[g], outputParticles="particles_input", randomNumbers=rnd, ) s.addReader(evGen) s.addAlgorithm( ParticlesPrinter(level=acts.logging.INFO, inputParticles=evGen.config.outputParticles)) csv_dir = os.path.join(outputDir, "csv") if not os.path.exists(csv_dir): os.mkdir(csv_dir) s.addWriter( CsvParticleWriter( level=acts.logging.INFO, inputParticles=evGen.config.outputParticles, outputDir=csv_dir, outputStem="particles", ), ) root_file = os.path.join(outputDir, "particles.root") s.addWriter( RootParticleWriter( level=acts.logging.INFO, inputParticles=evGen.config.outputParticles, filePath=root_file, )) return s
def runParticleGun(outputDir, s=None): s = s or Sequencer(events=10, numThreads=-1) s.config.logLevel = acts.logging.INFO outputDir = Path(outputDir) return addParticleGun( s, EtaConfig(-4.0, 4.0), ParticleConfig(2), outputDirCsv=outputDir / "csv", outputDirRoot=outputDir, printParticles=True, )
def test_csv_multitrajectory_writer(tmp_path): detector, trackingGeometry, decorators = GenericDetector.create() field = acts.ConstantBField(acts.Vector3(0, 0, 2 * u.T)) from truth_tracking import runTruthTracking s = Sequencer(numThreads=1, events=10) runTruthTracking( trackingGeometry, field, digiConfigFile=Path( str( Path(__file__).parent.parent.parent.parent / "Examples/Algorithms/Digitization/share/default-smearing-config-generic.json" )), outputDir=tmp_path, s=s, ) csv_dir = tmp_path / "csv" csv_dir.mkdir() s.addWriter( CsvMultiTrajectoryWriter( level=acts.logging.INFO, inputTrajectories="trajectories", inputMeasurementParticlesMap="measurement_particles_map", outputDir=str(csv_dir), )) s.run() del s assert len([f for f in csv_dir.iterdir() if f.is_file()]) == 10 assert all(f.stat().st_size > 20 for f in csv_dir.iterdir())
def test_hepmc3_histogram(hepmc_data, tmp_path): from acts.examples.hepmc3 import ( HepMC3AsciiReader, HepMCProcessExtractor, ) s = Sequencer(numThreads=1) s.addReader( HepMC3AsciiReader( level=acts.logging.INFO, inputDir=str(hepmc_data.parent), inputStem="events", outputEvents="hepmc-events", )) s.addAlgorithm( HepMCProcessExtractor( level=acts.logging.INFO, inputEvents="hepmc-events", extractionProcess="Inelastic", )) # This segfaults, see https://github.com/acts-project/acts/issues/914 # s.addWriter( # RootNuclearInteractionParametersWriter( # level=acts.logging.INFO, inputSimulationProcesses="event-fraction" # ) # ) alg = AssertCollectionExistsAlg("hepmc-events", name="check_alg", level=acts.logging.INFO) s.addAlgorithm(alg) s.run()
def test_root_material_track_reader(material_recording): # recreate sequencer s = Sequencer(numThreads=1) s.addReader( RootMaterialTrackReader( level=acts.logging.INFO, fileList=[str(material_recording / "geant4_material_tracks.root")], )) alg = AssertCollectionExistsAlg("material-tracks", "check_alg", acts.logging.WARNING) s.addAlgorithm(alg) s.run() assert alg.events_seen == 2
def test_fatras(trk_geo, tmp_path, field, assert_root_hash): from fatras import runFatras csv = tmp_path / "csv" csv.mkdir() nevents = 10 root_files = [ ( "fatras_particles_final.root", "particles", nevents, ), ( "fatras_particles_initial.root", "particles", nevents, ), ( "hits.root", "hits", 115, ), ] assert len(list(csv.iterdir())) == 0 for rf, _, _ in root_files: assert not (tmp_path / rf).exists() seq = Sequencer(events=nevents) runFatras(trk_geo, field, str(tmp_path), s=seq).run() del seq assert_csv_output(csv, "particles_final") assert_csv_output(csv, "particles_initial") assert_csv_output(csv, "hits") for f, tn, exp_entries in root_files: rfp = tmp_path / f assert rfp.exists() assert rfp.stat().st_size > 2**10 * 10 assert_entries(rfp, tn, exp_entries) assert_root_hash(f, rfp)
def test_event_recording(tmp_path): script = ( Path(__file__).parent.parent.parent.parent / "Examples" / "Scripts" / "Python" / "event_recording.py" ) assert script.exists() env = os.environ.copy() env["NEVENTS"] = "1" subprocess.check_call([str(script)], cwd=tmp_path, env=env) from acts.examples.hepmc3 import HepMC3AsciiReader out_path = tmp_path / "hepmc3" # out_path.mkdir() assert len([f for f in out_path.iterdir() if f.name.endswith("events.hepmc3")]) > 0 assert all([f.stat().st_size > 100 for f in out_path.iterdir()]) s = Sequencer(numThreads=1) s.addReader( HepMC3AsciiReader( level=acts.logging.INFO, inputDir=str(out_path), inputStem="events", outputEvents="hepmc-events", ) ) alg = AssertCollectionExistsAlg( "hepmc-events", name="check_alg", level=acts.logging.INFO ) s.addAlgorithm(alg) s.run() assert alg.events_seen == 1
def runMaterialValidation( trackingGeometry, decorators, field, outputDir, outputName="propagation-material", s=None, ): s = s or Sequencer(events=1000, numThreads=-1) for decorator in decorators: s.addContextDecorator(decorator) nav = acts.Navigator(trackingGeometry=trackingGeometry) stepper = acts.StraightLineStepper() # stepper = acts.EigenStepper(field) prop = acts.examples.ConcretePropagator(acts.Propagator(stepper, nav)) rnd = acts.examples.RandomNumbers(seed=42) alg = acts.examples.PropagationAlgorithm( propagatorImpl=prop, level=acts.logging.INFO, randomNumberSvc=rnd, ntests=1000, sterileLogger=True, propagationStepCollection="propagation-steps", recordMaterialInteractions=True, ) s.addAlgorithm(alg) s.addWriter( RootMaterialTrackWriter( level=acts.logging.INFO, collection=alg.config.propagationMaterialCollection, filePath=os.path.join(outputDir, (outputName + ".root")), storeSurface=True, storeVolume=True, )) return s
def test_particle_gun(tmp_path, assert_root_hash): from particle_gun import runParticleGun s = Sequencer(events=20, numThreads=-1) csv_dir = tmp_path / "csv" root_file = tmp_path / "particles.root" assert not csv_dir.exists() assert not root_file.exists() runParticleGun(str(tmp_path), s=s).run() assert csv_dir.exists() assert root_file.exists() assert len([f for f in csv_dir.iterdir() if f.name.endswith("particles.csv")]) > 0 assert all([f.stat().st_size > 100 for f in csv_dir.iterdir()]) assert root_file.stat().st_size > 200 assert_entries(root_file, "particles", 20) assert_root_hash(root_file.name, root_file)
def test_csv_simhits_writer(tmp_path, fatras, conf_const): s = Sequencer(numThreads=1, events=10) evGen, simAlg, digiAlg = fatras(s) out = tmp_path / "csv" out.mkdir() s.addWriter( conf_const( CsvSimHitWriter, level=acts.logging.INFO, inputSimHits=simAlg.config.outputSimHits, outputDir=str(out), outputStem="hits", )) s.run() assert len([f for f in out.iterdir() if f.is_file()]) == s.config.events assert all(f.stat().st_size > 200 for f in out.iterdir())
def test_root_simhits_writer(tmp_path, fatras, conf_const, assert_root_hash): s = Sequencer(numThreads=1, events=10) evGen, simAlg, digiAlg = fatras(s) out = tmp_path / "meas.root" assert not out.exists() s.addWriter( conf_const( RootSimHitWriter, level=acts.logging.INFO, inputSimHits=simAlg.config.outputSimHits, filePath=str(out), )) s.run() assert out.exists() assert out.stat().st_size > 2e4 assert_root_hash(out.name, out)
def test_vertex_fitting(tmp_path): detector, trackingGeometry, decorators = getOpenDataDetector() field = acts.ConstantBField(acts.Vector3(0, 0, 2 * u.T)) from vertex_fitting import runVertexFitting, VertexFinder s = Sequencer(events=100) runVertexFitting( field, vertexFinder=VertexFinder.Truth, outputDir=tmp_path, s=s, ) alg = AssertCollectionExistsAlg(["fittedVertices"], name="check_alg") s.addAlgorithm(alg) s.run() assert alg.events_seen == s.config.events
def test_csv_particle_writer(tmp_path, conf_const, ptcl_gun): s = Sequencer(numThreads=1, events=10) evGen = ptcl_gun(s) out = tmp_path / "csv" out.mkdir() s.addWriter( conf_const( CsvParticleWriter, acts.logging.INFO, inputParticles=evGen.config.outputParticles, outputStem="particle", outputDir=str(out), )) s.run() assert len([f for f in out.iterdir() if f.is_file()]) == s.config.events assert all(f.stat().st_size > 200 for f in out.iterdir())
def test_root_particle_writer(tmp_path, conf_const, ptcl_gun, assert_root_hash): s = Sequencer(numThreads=1, events=10) evGen = ptcl_gun(s) file = tmp_path / "particles.root" assert not file.exists() s.addWriter( conf_const( RootParticleWriter, acts.logging.INFO, inputParticles=evGen.config.outputParticles, filePath=str(file), )) s.run() assert file.exists() assert file.stat().st_size > 1024 * 10 assert_root_hash(file.name, file)
def test_root_meas_writer(tmp_path, fatras, trk_geo, assert_root_hash): s = Sequencer(numThreads=1, events=10) evGen, simAlg, digiAlg = fatras(s) out = tmp_path / "meas.root" assert not out.exists() config = RootMeasurementWriter.Config( inputMeasurements=digiAlg.config.outputMeasurements, inputClusters=digiAlg.config.outputClusters, inputSimHits=simAlg.config.outputSimHits, inputMeasurementSimHitsMap=digiAlg.config.outputMeasurementSimHitsMap, filePath=str(out), trackingGeometry=trk_geo, ) config.addBoundIndicesFromDigiConfig(digiAlg.config) s.addWriter(RootMeasurementWriter(level=acts.logging.INFO, config=config)) s.run() assert out.exists() assert out.stat().st_size > 40000 assert_root_hash(out.name, out)
def test_csv_meas_writer(tmp_path, fatras, trk_geo, conf_const): s = Sequencer(numThreads=1, events=10) evGen, simAlg, digiAlg = fatras(s) out = tmp_path / "csv" out.mkdir() s.addWriter( conf_const( CsvMeasurementWriter, level=acts.logging.INFO, inputMeasurements=digiAlg.config.outputMeasurements, inputClusters=digiAlg.config.outputClusters, inputSimHits=simAlg.config.outputSimHits, inputMeasurementSimHitsMap=digiAlg.config. outputMeasurementSimHitsMap, outputDir=str(out), )) s.run() assert len([f for f in out.iterdir() if f.is_file()]) == s.config.events * 3 assert all(f.stat().st_size > 10 for f in out.iterdir())
def runMaterialMapping( trackingGeometry, decorators, outputDir, inputDir, mapName="material-map", mapSurface=True, mapVolume=True, s=None, ): s = s or Sequencer(numThreads=1) for decorator in decorators: s.addContextDecorator(decorator) wb = WhiteBoard(acts.logging.INFO) context = AlgorithmContext(0, 0, wb) for decorator in decorators: assert decorator.decorate(context) == ProcessCode.SUCCESS # Read material step information from a ROOT TTRee s.addReader( RootMaterialTrackReader( level=acts.logging.INFO, collection="material-tracks", fileList=[os.path.join(inputDir, "geant4_material_tracks.root")], ) ) stepper = StraightLineStepper() mmAlgCfg = MaterialMapping.Config(context.geoContext, context.magFieldContext) mmAlgCfg.trackingGeometry = trackingGeometry mmAlgCfg.collection = "material-tracks" if mapSurface: navigator = Navigator( trackingGeometry=trackingGeometry, resolveSensitive=True, resolveMaterial=True, resolvePassive=True, ) propagator = Propagator(stepper, navigator) mapper = SurfaceMaterialMapper(level=acts.logging.INFO, propagator=propagator) mmAlgCfg.materialSurfaceMapper = mapper if mapVolume: navigator = Navigator( trackingGeometry=trackingGeometry, ) propagator = Propagator(stepper, navigator) mapper = VolumeMaterialMapper( level=acts.logging.INFO, propagator=propagator, mappingStep=999 ) mmAlgCfg.materialVolumeMapper = mapper jmConverterCfg = MaterialMapJsonConverter.Config( processSensitives=True, processApproaches=True, processRepresenting=True, processBoundaries=True, processVolumes=True, context=context.geoContext, ) jmw = JsonMaterialWriter( level=acts.logging.VERBOSE, converterCfg=jmConverterCfg, fileName=os.path.join(outputDir, mapName), writeFormat=JsonFormat.Json, ) s.addWriter( RootMaterialTrackWriter( level=acts.logging.INFO, collection=mmAlgCfg.mappingMaterialCollection, filePath=os.path.join( outputDir, mapName + "_tracks.root", ), storeSurface=True, storeVolume=True, ) ) mmAlgCfg.materialWriters = [jmw] s.addAlgorithm(MaterialMapping(level=acts.logging.INFO, config=mmAlgCfg)) return s
def test_volume_material_mapping(material_recording, tmp_path, assert_root_hash): map_file = tmp_path / "material-map-volume_tracks.root" assert not map_file.exists() s = Sequencer(numThreads=1) geo_map = Path(__file__).parent / "geometry-volume-map.json" assert geo_map.exists() assert geo_map.stat().st_size > 10 with geo_map.open() as fh: assert json.load(fh) detector, trackingGeometry, decorators = getOpenDataDetector( mdecorator=acts.IMaterialDecorator.fromFile(geo_map) ) from material_mapping import runMaterialMapping runMaterialMapping( trackingGeometry, decorators, mapName="material-map-volume", outputDir=str(tmp_path), inputDir=material_recording, s=s, ) s.run() # MaterialMapping alg only writes on destruct. # See https://github.com/acts-project/acts/issues/881 del s mat_file = tmp_path / "material-map-volume.json" assert mat_file.exists() assert mat_file.stat().st_size > 10 with mat_file.open() as fh: assert json.load(fh) assert map_file.exists() assert_entries(map_file, "material-tracks", 200) assert_root_hash(map_file.name, map_file) val_file = tmp_path / "propagation-volume-material.root" assert not val_file.exists() # test the validation as well # we need to destroy the ODD to reload with material # del trackingGeometry # del detector detector, trackingGeometry, decorators = getOpenDataDetector( mdecorator=acts.IMaterialDecorator.fromFile(mat_file) ) from material_validation import runMaterialValidation s = Sequencer(events=10, numThreads=1) field = acts.NullBField() runMaterialValidation( trackingGeometry, decorators, field, outputDir=str(tmp_path), outputName="propagation-volume-material", s=s, ) s.run() assert val_file.exists() assert_root_hash(val_file.name, val_file)
def test_csv_meas_reader(tmp_path, fatras, trk_geo, conf_const): s = Sequencer(numThreads=1, events=10) evGen, simAlg, digiAlg = fatras(s) out = tmp_path / "csv" out.mkdir() config = CsvMeasurementWriter.Config( inputMeasurements=digiAlg.config.outputMeasurements, inputClusters=digiAlg.config.outputClusters, inputSimHits=simAlg.config.outputSimHits, inputMeasurementSimHitsMap=digiAlg.config.outputMeasurementSimHitsMap, outputDir=str(out), ) s.addWriter(CsvMeasurementWriter(level=acts.logging.INFO, config=config)) s.run() # read back in s = Sequencer(numThreads=1) s.addReader( conf_const( CsvMeasurementReader, level=acts.logging.WARNING, outputMeasurements="measurements", outputMeasurementSimHitsMap="simhitsmap", outputSourceLinks="sourcelinks", inputDir=str(out), )) algs = [ AssertCollectionExistsAlg(k, f"check_alg_{k}", acts.logging.WARNING) for k in ("measurements", "simhitsmap", "sourcelinks") ] for alg in algs: s.addAlgorithm(alg) s.run() for alg in algs: assert alg.events_seen == 10
def seq(): return Sequencer(events=10, numThreads=1)
def test_csv_simhits_reader(tmp_path, fatras, conf_const): s = Sequencer(numThreads=1, events=10) evGen, simAlg, digiAlg = fatras(s) out = tmp_path / "csv" out.mkdir() s.addWriter( CsvSimHitWriter( level=acts.logging.INFO, inputSimHits=simAlg.config.outputSimHits, outputDir=str(out), outputStem="hits", )) s.run() s = Sequencer(numThreads=1) s.addReader( conf_const( CsvSimHitReader, level=acts.logging.INFO, inputDir=str(out), inputStem="hits", outputSimHits="simhits", )) alg = AssertCollectionExistsAlg("simhits", "check_alg", acts.logging.WARNING) s.addAlgorithm(alg) s.run() assert alg.events_seen == 10
def test_csv_particle_reader(tmp_path, conf_const, ptcl_gun): s = Sequencer(numThreads=1, events=10, logLevel=acts.logging.WARNING) evGen = ptcl_gun(s) out = tmp_path / "csv" out.mkdir() s.addWriter( conf_const( CsvParticleWriter, acts.logging.WARNING, inputParticles=evGen.config.outputParticles, outputStem="particle", outputDir=str(out), )) s.run() # reset the seeder s = Sequencer(numThreads=1, logLevel=acts.logging.WARNING) s.addReader( conf_const( CsvParticleReader, acts.logging.WARNING, inputDir=str(out), inputStem="particle", outputParticles="input_particles", )) alg = AssertCollectionExistsAlg("input_particles", "check_alg", acts.logging.WARNING) s.addAlgorithm(alg) s.run() assert alg.events_seen == 10
def test_ckf_tracks_example( tmp_path, assert_root_hash, truthSmeared, truthEstimated, detector ): csv = tmp_path / "csv" assert not csv.exists() srcdir = Path(__file__).resolve().parent.parent.parent.parent if detector == "generic": detector, trackingGeometry, decorators = GenericDetector.create() geometrySelection = ( srcdir / "Examples/Algorithms/TrackFinding/share/geoSelection-genericDetector.json" ) digiConfigFile = ( srcdir / "Examples/Algorithms/Digitization/share/default-smearing-config-generic.json" ) elif detector == "odd": matDeco = acts.IMaterialDecorator.fromFile( srcdir / "thirdparty/OpenDataDetector/data/odd-material-maps.root", level=acts.logging.INFO, ) detector, trackingGeometry, decorators = getOpenDataDetector(matDeco) digiConfigFile = ( srcdir / "thirdparty/OpenDataDetector/config/odd-digi-smearing-config.json" ) geometrySelection = ( srcdir / "thirdparty/OpenDataDetector/config/odd-seeding-config.json" ) else: raise ValueError(f"Invalid detector {detector}") field = acts.ConstantBField(acts.Vector3(0, 0, 2 * u.T)) events = 100 s = Sequencer(events=events, numThreads=1) # Digitization is not thread-safe root_files = [ ( "performance_ckf.root", None, ), ( "trackstates_ckf.root", "trackstates", ), ( "tracksummary_ckf.root", "tracksummary", ), ] if not truthSmeared: root_files += [ ( "performance_seeding_trees.root", "track_finder_tracks", ), ] for rf, _ in root_files: assert not (tmp_path / rf).exists() from ckf_tracks import runCKFTracks runCKFTracks( trackingGeometry, decorators, field=field, outputCsv=True, outputDir=tmp_path, geometrySelection=geometrySelection, digiConfigFile=digiConfigFile, truthSmearedSeeded=truthSmeared, truthEstimatedSeeded=truthEstimated, s=s, ) s.run() del s # files are closed in destructors, not great assert csv.exists() for rf, tn in root_files: rp = tmp_path / rf assert rp.exists() if tn is not None: assert_root_hash(rf, rp) assert len([f for f in csv.iterdir() if f.name.endswith("CKFtracks.csv")]) == events assert all([f.stat().st_size > 300 for f in csv.iterdir()])
def test_csv_clusters_reader(tmp_path, fatras, conf_const, trk_geo, rng): s = Sequencer(numThreads=1, events=10) # we're not going to use this one evGen, simAlg, _ = fatras(s) s = Sequencer(numThreads=1, events=10) s.addReader(evGen) s.addAlgorithm(simAlg) digiAlg = PlanarSteppingAlgorithm( level=acts.logging.WARNING, inputSimHits=simAlg.config.outputSimHits, outputClusters="clusters", outputSourceLinks="sourcelinks", outputDigiSourceLinks="digiSourceLink", outputMeasurements="measurements", outputMeasurementParticlesMap="meas_ptcl_map", outputMeasurementSimHitsMap="meas_sh_map", trackingGeometry=trk_geo, randomNumbers=rng, planarModuleStepper=PlanarModuleStepper(), ) s.addAlgorithm(digiAlg) out = tmp_path / "csv" out.mkdir() s.addWriter( CsvPlanarClusterWriter( level=acts.logging.WARNING, outputDir=str(out), inputSimHits=simAlg.config.outputSimHits, inputClusters=digiAlg.config.outputClusters, trackingGeometry=trk_geo, )) s.run() s = Sequencer(numThreads=1) s.addReader( conf_const( CsvPlanarClusterReader, level=acts.logging.WARNING, outputClusters="clusters", inputDir=str(out), outputHitIds="hits", outputMeasurementParticlesMap="meas_ptcl_map", outputSimHits="simhits", trackingGeometry=trk_geo, )) algs = [ AssertCollectionExistsAlg(k, f"check_alg_{k}", acts.logging.WARNING) for k in ("clusters", "simhits", "meas_ptcl_map") ] for alg in algs: s.addAlgorithm(alg) s.run() for alg in algs: assert alg.events_seen == 10
def test_seeding_orthogonal(tmp_path, trk_geo, field, assert_root_hash): from seeding import runSeeding, SeedingAlgorithm field = acts.ConstantBField(acts.Vector3(0, 0, 2 * acts.UnitConstants.T)) csv = tmp_path / "csv" csv.mkdir() seq = Sequencer(events=10, numThreads=1) root_files = [ ( "estimatedparams.root", "estimatedparams", 309, ), ( "performance_seeding_trees.root", "track_finder_tracks", 309, ), ( "performance_seeding_hists.root", None, 0, ), ( "particles.root", "particles", seq.config.events, ), ( "fatras_particles_final.root", "particles", seq.config.events, ), ( "fatras_particles_initial.root", "particles", seq.config.events, ), ] for fn, _, _ in root_files: fp = tmp_path / fn assert not fp.exists() assert len(list(csv.iterdir())) == 0 runSeeding( trk_geo, field, outputDir=str(tmp_path), s=seq, seedingAlgorithm=SeedingAlgorithm.Orthogonal, ).run() del seq for fn, tn, exp_entries in root_files: fp = tmp_path / fn assert fp.exists() assert fp.stat().st_size > 100 if tn is not None: assert_entries(fp, tn, exp_entries) assert_root_hash(fn, fp) assert_csv_output(csv, "particles") assert_csv_output(csv, "particles_final") assert_csv_output(csv, "particles_initial")
def test_vertex_fitting_reading( tmp_path, ptcl_gun, rng, finder, inputTracks, entries, assert_root_hash ): ptcl_file = tmp_path / "particles.root" detector, trackingGeometry, decorators = GenericDetector.create() field = acts.ConstantBField(acts.Vector3(0, 0, 2 * u.T)) from vertex_fitting import runVertexFitting, VertexFinder inputTrackSummary = None if inputTracks: from truth_tracking import runTruthTracking s2 = Sequencer(numThreads=1, events=100) runTruthTracking( trackingGeometry, field, digiConfigFile=Path( Path(__file__).parent.parent.parent.parent / "Examples/Algorithms/Digitization/share/default-smearing-config-generic.json" ), outputDir=tmp_path, s=s2, ) s2.run() del s2 inputTrackSummary = tmp_path / "tracksummary_fitter.root" assert inputTrackSummary.exists() assert ptcl_file.exists() else: s0 = Sequencer(events=100, numThreads=1) evGen = ptcl_gun(s0) s0.addWriter( RootParticleWriter( level=acts.logging.INFO, inputParticles=evGen.config.outputParticles, filePath=str(ptcl_file), ) ) s0.run() del s0 assert ptcl_file.exists() finder = VertexFinder[finder] s3 = Sequencer(numThreads=1) runVertexFitting( field, inputParticlePath=ptcl_file, inputTrackSummary=inputTrackSummary, outputDir=tmp_path, vertexFinder=finder, s=s3, ) alg = AssertCollectionExistsAlg(["fittedVertices"], name="check_alg") s3.addAlgorithm(alg) s3.run() vertexing_file = tmp_path / "performance_vertexing.root" assert vertexing_file.exists() assert_entries(vertexing_file, "vertexing", entries) assert_root_hash(vertexing_file.name, vertexing_file)
def test_root_particle_reader(tmp_path, conf_const, ptcl_gun): # need to write out some particles first s = Sequencer(numThreads=1, events=10, logLevel=acts.logging.WARNING) evGen = ptcl_gun(s) file = tmp_path / "particles.root" s.addWriter( conf_const( RootParticleWriter, acts.logging.WARNING, inputParticles=evGen.config.outputParticles, filePath=str(file), )) s.run() del s # to properly close the root file # reset sequencer for reading s2 = Sequencer(numThreads=1, logLevel=acts.logging.WARNING) s2.addReader( conf_const( RootParticleReader, acts.logging.WARNING, particleCollection="input_particles", filePath=str(file), )) alg = AssertCollectionExistsAlg("input_particles", "check_alg", acts.logging.WARNING) s2.addAlgorithm(alg) s2.run() assert alg.events_seen == 10