def test_digitization_example(trk_geo, tmp_path, assert_root_hash): from digitization import configureDigitization s = Sequencer(events=10, numThreads=-1) csv_dir = tmp_path / "csv" root_file = tmp_path / "measurements.root" assert not root_file.exists() assert not csv_dir.exists() field = acts.ConstantBField(acts.Vector3(0, 0, 2 * u.T)) configureDigitization(trk_geo, field, outputDir=tmp_path, s=s) s.run() assert root_file.exists() assert csv_dir.exists() assert len(list(csv_dir.iterdir())) == 3 * s.config.events assert all(f.stat().st_size > 50 for f in csv_dir.iterdir()) for tn, nev in ( (8, 407), (9, 0), (12, 11), (13, 375), (14, 2), (16, 25), (17, 146), (18, 9), ): assert_entries(root_file, f"vol{tn}", nev) assert_root_hash(root_file.name, root_file)
def test_csv_multitrajectory_writer(tmp_path): detector, trackingGeometry, decorators = GenericDetector.create() field = acts.ConstantBField(acts.Vector3(0, 0, 2 * u.T)) from truth_tracking import runTruthTracking s = Sequencer(numThreads=1, events=10) runTruthTracking( trackingGeometry, field, digiConfigFile=Path( str( Path(__file__).parent.parent.parent.parent / "Examples/Algorithms/Digitization/share/default-smearing-config-generic.json" )), outputDir=tmp_path, s=s, ) csv_dir = tmp_path / "csv" csv_dir.mkdir() s.addWriter( CsvMultiTrajectoryWriter( level=acts.logging.INFO, inputTrajectories="trajectories", inputMeasurementParticlesMap="measurement_particles_map", outputDir=str(csv_dir), )) s.run() del s assert len([f for f in csv_dir.iterdir() if f.is_file()]) == 10 assert all(f.stat().st_size > 20 for f in csv_dir.iterdir())
def test_digitization_example_input(trk_geo, tmp_path, assert_root_hash): from particle_gun import runParticleGun from digitization import configureDigitization ptcl_dir = tmp_path / "ptcl" ptcl_dir.mkdir() pgs = Sequencer(events=20, numThreads=-1) runParticleGun(str(ptcl_dir), s=pgs) pgs.run() s = Sequencer(numThreads=-1) csv_dir = tmp_path / "csv" root_file = tmp_path / "measurements.root" assert not root_file.exists() assert not csv_dir.exists() assert_root_hash( "particles.root", ptcl_dir / "particles.root", ) field = acts.ConstantBField(acts.Vector3(0, 0, 2 * u.T)) configureDigitization( trk_geo, field, outputDir=tmp_path, particlesInput=ptcl_dir / "particles.root", s=s, ) s.run() assert root_file.exists() assert csv_dir.exists() assert len(list(csv_dir.iterdir())) == 3 * pgs.config.events assert all(f.stat().st_size > 50 for f in csv_dir.iterdir()) for tn, nev in ( (7, 0), (8, 193), (9, 0), (12, 1), (13, 183), (14, 6), (16, 3), (17, 76), (18, 10), ): assert_entries(root_file, f"vol{tn}", nev) assert_root_hash(root_file.name, root_file)
def test_truth_tracking(tmp_path, assert_root_hash, revFiltMomThresh): from truth_tracking import runTruthTracking detector, trackingGeometry, _ = getOpenDataDetector() field = acts.ConstantBField(acts.Vector3(0, 0, 2 * u.T)) seq = Sequencer(events=10, numThreads=1) root_files = [ ("trackstates_fitter.root", "trackstates", 19), ("tracksummary_fitter.root", "tracksummary", 10), ("performance_track_finder.root", "track_finder_tracks", 19), ("performance_track_fitter.root", None, -1), ] for fn, _, _ in root_files: fp = tmp_path / fn assert not fp.exists() runTruthTracking( trackingGeometry, field, digiConfigFile=Path( "thirdparty/OpenDataDetector/config/odd-digi-smearing-config.json", ), outputDir=tmp_path, reverseFilteringMomThreshold=revFiltMomThresh, s=seq, ) seq.run() del seq for fn, tn, ee in root_files: fp = tmp_path / fn assert fp.exists() assert fp.stat().st_size > 1024 if tn is not None: assert_entries(fp, tn, ee) assert_root_hash(fn, fp)
def test_vertex_fitting(tmp_path): detector, trackingGeometry, decorators = getOpenDataDetector() field = acts.ConstantBField(acts.Vector3(0, 0, 2 * u.T)) from vertex_fitting import runVertexFitting, VertexFinder s = Sequencer(events=100) runVertexFitting( field, vertexFinder=VertexFinder.Truth, outputDir=tmp_path, s=s, ) alg = AssertCollectionExistsAlg(["fittedVertices"], name="check_alg") s.addAlgorithm(alg) s.run() assert alg.events_seen == s.config.events
def _factory(s): evGen = ptcl_gun(s) field = acts.ConstantBField( acts.Vector3(0, 0, 2 * acts.UnitConstants.T)) simAlg = acts.examples.FatrasSimulation( level=acts.logging.INFO, inputParticles=evGen.config.outputParticles, outputParticlesInitial="particles_initial", outputParticlesFinal="particles_final", outputSimHits="simhits", randomNumbers=rng, trackingGeometry=trk_geo, magneticField=field, generateHitsOnSensitive=True, ) s.addAlgorithm(simAlg) # Digitization digiCfg = acts.examples.DigitizationConfig( acts.examples.readDigiConfigFromJson( str( Path(__file__).parent.parent.parent.parent / "Examples/Algorithms/Digitization/share/default-smearing-config-generic.json" )), trackingGeometry=trk_geo, randomNumbers=rng, inputSimHits=simAlg.config.outputSimHits, ) digiAlg = acts.examples.DigitizationAlgorithm(digiCfg, acts.logging.INFO) s.addAlgorithm(digiAlg) return evGen, simAlg, digiAlg
def test_algebra(): v3 = acts.Vector3(1, 2, 3) with pytest.raises(TypeError): acts.Vector3(1, 2, 3, 4) with pytest.raises(TypeError): acts.Vector3(1, 2) v3 = acts.Vector3([1, 2, 3]) with pytest.raises(TypeError): acts.Vector3([1, 2, 3, 4]) with pytest.raises(TypeError): acts.Vector3([1, 2]) with pytest.raises(TypeError): acts.Vector3() v4 = acts.Vector4(1, 2, 3, 4) with pytest.raises(TypeError): v4 = acts.Vector4(1, 2, 3) v4 = acts.Vector4([1, 2, 3, 4]) with pytest.raises(TypeError): acts.Vector4([1, 2, 3]) with pytest.raises(TypeError): acts.Vector4()
def test_vertex_fitting_reading( tmp_path, ptcl_gun, rng, finder, inputTracks, entries, assert_root_hash ): ptcl_file = tmp_path / "particles.root" detector, trackingGeometry, decorators = GenericDetector.create() field = acts.ConstantBField(acts.Vector3(0, 0, 2 * u.T)) from vertex_fitting import runVertexFitting, VertexFinder inputTrackSummary = None if inputTracks: from truth_tracking import runTruthTracking s2 = Sequencer(numThreads=1, events=100) runTruthTracking( trackingGeometry, field, digiConfigFile=Path( Path(__file__).parent.parent.parent.parent / "Examples/Algorithms/Digitization/share/default-smearing-config-generic.json" ), outputDir=tmp_path, s=s2, ) s2.run() del s2 inputTrackSummary = tmp_path / "tracksummary_fitter.root" assert inputTrackSummary.exists() assert ptcl_file.exists() else: s0 = Sequencer(events=100, numThreads=1) evGen = ptcl_gun(s0) s0.addWriter( RootParticleWriter( level=acts.logging.INFO, inputParticles=evGen.config.outputParticles, filePath=str(ptcl_file), ) ) s0.run() del s0 assert ptcl_file.exists() finder = VertexFinder[finder] s3 = Sequencer(numThreads=1) runVertexFitting( field, inputParticlePath=ptcl_file, inputTrackSummary=inputTrackSummary, outputDir=tmp_path, vertexFinder=finder, s=s3, ) alg = AssertCollectionExistsAlg(["fittedVertices"], name="check_alg") s3.addAlgorithm(alg) s3.run() vertexing_file = tmp_path / "performance_vertexing.root" assert vertexing_file.exists() assert_entries(vertexing_file, "vertexing", entries) assert_root_hash(vertexing_file.name, vertexing_file)
def test_ckf_tracks_example( tmp_path, assert_root_hash, truthSmeared, truthEstimated, detector ): csv = tmp_path / "csv" assert not csv.exists() srcdir = Path(__file__).resolve().parent.parent.parent.parent if detector == "generic": detector, trackingGeometry, decorators = GenericDetector.create() geometrySelection = ( srcdir / "Examples/Algorithms/TrackFinding/share/geoSelection-genericDetector.json" ) digiConfigFile = ( srcdir / "Examples/Algorithms/Digitization/share/default-smearing-config-generic.json" ) elif detector == "odd": matDeco = acts.IMaterialDecorator.fromFile( srcdir / "thirdparty/OpenDataDetector/data/odd-material-maps.root", level=acts.logging.INFO, ) detector, trackingGeometry, decorators = getOpenDataDetector(matDeco) digiConfigFile = ( srcdir / "thirdparty/OpenDataDetector/config/odd-digi-smearing-config.json" ) geometrySelection = ( srcdir / "thirdparty/OpenDataDetector/config/odd-seeding-config.json" ) else: raise ValueError(f"Invalid detector {detector}") field = acts.ConstantBField(acts.Vector3(0, 0, 2 * u.T)) events = 100 s = Sequencer(events=events, numThreads=1) # Digitization is not thread-safe root_files = [ ( "performance_ckf.root", None, ), ( "trackstates_ckf.root", "trackstates", ), ( "tracksummary_ckf.root", "tracksummary", ), ] if not truthSmeared: root_files += [ ( "performance_seeding_trees.root", "track_finder_tracks", ), ] for rf, _ in root_files: assert not (tmp_path / rf).exists() from ckf_tracks import runCKFTracks runCKFTracks( trackingGeometry, decorators, field=field, outputCsv=True, outputDir=tmp_path, geometrySelection=geometrySelection, digiConfigFile=digiConfigFile, truthSmearedSeeded=truthSmeared, truthEstimatedSeeded=truthEstimated, s=s, ) s.run() del s # files are closed in destructors, not great assert csv.exists() for rf, tn in root_files: rp = tmp_path / rf assert rp.exists() if tn is not None: assert_root_hash(rf, rp) assert len([f for f in csv.iterdir() if f.name.endswith("CKFtracks.csv")]) == events assert all([f.stat().st_size > 300 for f in csv.iterdir()])
def field(): return acts.ConstantBField(acts.Vector3(0, 0, 2 * u.T))
def test_seeding_orthogonal(tmp_path, trk_geo, field, assert_root_hash): from seeding import runSeeding, SeedingAlgorithm field = acts.ConstantBField(acts.Vector3(0, 0, 2 * acts.UnitConstants.T)) csv = tmp_path / "csv" csv.mkdir() seq = Sequencer(events=10, numThreads=1) root_files = [ ( "estimatedparams.root", "estimatedparams", 309, ), ( "performance_seeding_trees.root", "track_finder_tracks", 309, ), ( "performance_seeding_hists.root", None, 0, ), ( "particles.root", "particles", seq.config.events, ), ( "fatras_particles_final.root", "particles", seq.config.events, ), ( "fatras_particles_initial.root", "particles", seq.config.events, ), ] for fn, _, _ in root_files: fp = tmp_path / fn assert not fp.exists() assert len(list(csv.iterdir())) == 0 runSeeding( trk_geo, field, outputDir=str(tmp_path), s=seq, seedingAlgorithm=SeedingAlgorithm.Orthogonal, ).run() del seq for fn, tn, exp_entries in root_files: fp = tmp_path / fn assert fp.exists() assert fp.stat().st_size > 100 if tn is not None: assert_entries(fp, tn, exp_entries) assert_root_hash(fn, fp) assert_csv_output(csv, "particles") assert_csv_output(csv, "particles_final") assert_csv_output(csv, "particles_initial")
logger.info("Using vertex finder: %s", vertexFinder.name) return addVertexFitting( s, field, outputDirRoot=outputDir if outputRoot else None, associatedParticles=associatedParticles, vertexFinder=vertexFinder, ) if "__main__" == __name__: detector, trackingGeometry, decorators = GenericDetector.create() field = acts.ConstantBField(acts.Vector3(0, 0, 2 * u.T)) inputParticlePath = Path("particles.root") if not inputParticlePath.exists(): inputParticlePath = None inputTrackSummary = None for p in ("tracksummary_fitter.root", "tracksummary_ckf.root"): p = Path(p) if p.exists(): inputTrackSummary = p break runVertexFitting( field, vertexFinder=VertexFinder.Truth,
propagationStepCollection="propagation-steps", recordMaterialInteractions=True, ) s.addAlgorithm(alg) s.addWriter( RootMaterialTrackWriter( level=acts.logging.INFO, collection=alg.config.propagationMaterialCollection, filePath=os.path.join(outputDir, (outputName + ".root")), storeSurface=True, storeVolume=True, )) return s if "__main__" == __name__: matDeco = acts.IMaterialDecorator.fromFile("material-map.json") detector, trackingGeometry, decorators = getOpenDataDetector( mdecorator=matDeco) field = acts.ConstantBField(acts.Vector3(0, 0, 2 * acts.UnitConstants.T)) runMaterialValidation(trackingGeometry, decorators, field, outputDir=os.getcwd()).run()
#!/usr/bin/env python3 import pathlib, acts, acts.examples, itk u = acts.UnitConstants geo_dir = pathlib.Path("acts-detector-examples") outputDir = pathlib.Path.cwd() detector, trackingGeometry, decorators = itk.buildITkGeometry(geo_dir) field = acts.ConstantBField(acts.Vector3(0.0, 0.0, 2.0 * u.T)) rnd = acts.examples.RandomNumbers(seed=42) from particle_gun import addParticleGun, MomentumConfig, EtaConfig, ParticleConfig from fatras import addFatras from digitization import addDigitization from seeding import addSeeding, SeedingAlgorithm, TruthSeedRanges from ckf_tracks import addCKFTracks s = acts.examples.Sequencer(events=100, numThreads=-1) s = addParticleGun( s, MomentumConfig(1.0 * u.GeV, 10.0 * u.GeV, True), EtaConfig(-4.0, 4.0, True), ParticleConfig(1, acts.PdgParticle.eMuon, True), rnd=rnd, ) s = addFatras( s, trackingGeometry, field, outputDirRoot=outputDir, rnd=rnd,
def test_ckf_tracks_example_truth_smeared(tmp_path, assert_root_hash): # the example as written is only compatible with the generic detector detector, trackingGeometry, decorators = GenericDetector.create() field = acts.ConstantBField(acts.Vector3(0, 0, 2 * u.T)) events = 10 s = Sequencer(events=events, numThreads=1) # Digitization is not thread-safe root_files = [ ("performance_ckf.root", None, None), ( "trackstates_ckf.root", "trackstates", 80, ), ( "tracksummary_ckf.root", "tracksummary", 10, ), ] csv = tmp_path / "csv" assert not csv.exists() for rf, _, _ in root_files: assert not (tmp_path / rf).exists() from ckf_tracks import runCKFTracks runCKFTracks( trackingGeometry, decorators, field=field, geometrySelection=Path( Path(__file__).parent.parent.parent.parent / "Examples/Algorithms/TrackFinding/share/geoSelection-genericDetector.json" ), digiConfigFile=Path( Path(__file__).parent.parent.parent.parent / "Examples/Algorithms/Digitization/share/default-smearing-config-generic.json" ), outputCsv=True, outputDir=tmp_path, truthSmearedSeeded=True, truthEstimatedSeeded=False, s=s, ) s.run() del s # files are closed in destructors, not great assert csv.exists() for rf, tn, nume in root_files: rp = tmp_path / rf assert rp.exists() if tn is not None and nume is not None: assert_entries(rp, tn, nume) assert_root_hash(rf, rp) assert len([f for f in csv.iterdir() if f.name.endswith("CKFtracks.csv")]) == events assert all([f.stat().st_size > 300 for f in csv.iterdir()])
def test_constant_bfield(): with pytest.raises(TypeError): acts.ConstantBField() v = acts.Vector3(1, 2, 3) cb = acts.ConstantBField(v) assert cb