def test_csv_multitrajectory_writer(tmp_path): detector, trackingGeometry, decorators = GenericDetector.create() field = acts.ConstantBField(acts.Vector3(0, 0, 2 * u.T)) from truth_tracking import runTruthTracking s = Sequencer(numThreads=1, events=10) runTruthTracking( trackingGeometry, field, digiConfigFile=Path( str( Path(__file__).parent.parent.parent.parent / "Examples/Algorithms/Digitization/share/default-smearing-config-generic.json" )), outputDir=tmp_path, s=s, ) csv_dir = tmp_path / "csv" csv_dir.mkdir() s.addWriter( CsvMultiTrajectoryWriter( level=acts.logging.INFO, inputTrajectories="trajectories", inputMeasurementParticlesMap="measurement_particles_map", outputDir=str(csv_dir), )) s.run() del s assert len([f for f in csv_dir.iterdir() if f.is_file()]) == 10 assert all(f.stat().st_size > 20 for f in csv_dir.iterdir())
def test_vertex_fitting_reading( tmp_path, ptcl_gun, rng, finder, inputTracks, entries, assert_root_hash ): ptcl_file = tmp_path / "particles.root" detector, trackingGeometry, decorators = GenericDetector.create() field = acts.ConstantBField(acts.Vector3(0, 0, 2 * u.T)) from vertex_fitting import runVertexFitting, VertexFinder inputTrackSummary = None if inputTracks: from truth_tracking import runTruthTracking s2 = Sequencer(numThreads=1, events=100) runTruthTracking( trackingGeometry, field, digiConfigFile=Path( Path(__file__).parent.parent.parent.parent / "Examples/Algorithms/Digitization/share/default-smearing-config-generic.json" ), outputDir=tmp_path, s=s2, ) s2.run() del s2 inputTrackSummary = tmp_path / "tracksummary_fitter.root" assert inputTrackSummary.exists() assert ptcl_file.exists() else: s0 = Sequencer(events=100, numThreads=1) evGen = ptcl_gun(s0) s0.addWriter( RootParticleWriter( level=acts.logging.INFO, inputParticles=evGen.config.outputParticles, filePath=str(ptcl_file), ) ) s0.run() del s0 assert ptcl_file.exists() finder = VertexFinder[finder] s3 = Sequencer(numThreads=1) runVertexFitting( field, inputParticlePath=ptcl_file, inputTrackSummary=inputTrackSummary, outputDir=tmp_path, vertexFinder=finder, s=s3, ) alg = AssertCollectionExistsAlg(["fittedVertices"], name="check_alg") s3.addAlgorithm(alg) s3.run() vertexing_file = tmp_path / "performance_vertexing.root" assert vertexing_file.exists() assert_entries(vertexing_file, "vertexing", entries) assert_root_hash(vertexing_file.name, vertexing_file)
def test_ckf_tracks_example( tmp_path, assert_root_hash, truthSmeared, truthEstimated, detector ): csv = tmp_path / "csv" assert not csv.exists() srcdir = Path(__file__).resolve().parent.parent.parent.parent if detector == "generic": detector, trackingGeometry, decorators = GenericDetector.create() geometrySelection = ( srcdir / "Examples/Algorithms/TrackFinding/share/geoSelection-genericDetector.json" ) digiConfigFile = ( srcdir / "Examples/Algorithms/Digitization/share/default-smearing-config-generic.json" ) elif detector == "odd": matDeco = acts.IMaterialDecorator.fromFile( srcdir / "thirdparty/OpenDataDetector/data/odd-material-maps.root", level=acts.logging.INFO, ) detector, trackingGeometry, decorators = getOpenDataDetector(matDeco) digiConfigFile = ( srcdir / "thirdparty/OpenDataDetector/config/odd-digi-smearing-config.json" ) geometrySelection = ( srcdir / "thirdparty/OpenDataDetector/config/odd-seeding-config.json" ) else: raise ValueError(f"Invalid detector {detector}") field = acts.ConstantBField(acts.Vector3(0, 0, 2 * u.T)) events = 100 s = Sequencer(events=events, numThreads=1) # Digitization is not thread-safe root_files = [ ( "performance_ckf.root", None, ), ( "trackstates_ckf.root", "trackstates", ), ( "tracksummary_ckf.root", "tracksummary", ), ] if not truthSmeared: root_files += [ ( "performance_seeding_trees.root", "track_finder_tracks", ), ] for rf, _ in root_files: assert not (tmp_path / rf).exists() from ckf_tracks import runCKFTracks runCKFTracks( trackingGeometry, decorators, field=field, outputCsv=True, outputDir=tmp_path, geometrySelection=geometrySelection, digiConfigFile=digiConfigFile, truthSmearedSeeded=truthSmeared, truthEstimatedSeeded=truthEstimated, s=s, ) s.run() del s # files are closed in destructors, not great assert csv.exists() for rf, tn in root_files: rp = tmp_path / rf assert rp.exists() if tn is not None: assert_root_hash(rf, rp) assert len([f for f in csv.iterdir() if f.name.endswith("CKFtracks.csv")]) == events assert all([f.stat().st_size > 300 for f in csv.iterdir()])
ptMin=500 * u.MeV, )) logger.info("Using vertex finder: %s", vertexFinder.name) return addVertexFitting( s, field, outputDirRoot=outputDir if outputRoot else None, associatedParticles=associatedParticles, vertexFinder=vertexFinder, ) if "__main__" == __name__: detector, trackingGeometry, decorators = GenericDetector.create() field = acts.ConstantBField(acts.Vector3(0, 0, 2 * u.T)) inputParticlePath = Path("particles.root") if not inputParticlePath.exists(): inputParticlePath = None inputTrackSummary = None for p in ("tracksummary_fitter.root", "tracksummary_ckf.root"): p = Path(p) if p.exists(): inputTrackSummary = p break runVertexFitting(
)) return s if "__main__" == __name__: matDeco = None # matDeco = acts.IMaterialDecorator.fromFile("material.json") # matDeco = acts.IMaterialDecorator.fromFile("material.root") ## Generic detector: Default ( detector, trackingGeometry, contextDecorators, ) = GenericDetector.create(mdecorator=matDeco) ## Alternative: Aligned detector in a couple of modes # detector, trackingGeometry, contextDecorators = AlignedDetector.create( # decoratorLogLevel=acts.logging.INFO, # # These parameters need to be tuned so that GC doesn't break # # with multiple threads # iovSize=10, # flushSize=10, # # External alignment store # mode=AlignedDetector.Config.Mode.External, # # OR: Internal alignment storage # # mode=AlignedDetector.Config.Mode.Internal, # ) ## Alternative: DD4hep detector
def test_ckf_tracks_example_truth_smeared(tmp_path, assert_root_hash): # the example as written is only compatible with the generic detector detector, trackingGeometry, decorators = GenericDetector.create() field = acts.ConstantBField(acts.Vector3(0, 0, 2 * u.T)) events = 10 s = Sequencer(events=events, numThreads=1) # Digitization is not thread-safe root_files = [ ("performance_ckf.root", None, None), ( "trackstates_ckf.root", "trackstates", 80, ), ( "tracksummary_ckf.root", "tracksummary", 10, ), ] csv = tmp_path / "csv" assert not csv.exists() for rf, _, _ in root_files: assert not (tmp_path / rf).exists() from ckf_tracks import runCKFTracks runCKFTracks( trackingGeometry, decorators, field=field, geometrySelection=Path( Path(__file__).parent.parent.parent.parent / "Examples/Algorithms/TrackFinding/share/geoSelection-genericDetector.json" ), digiConfigFile=Path( Path(__file__).parent.parent.parent.parent / "Examples/Algorithms/Digitization/share/default-smearing-config-generic.json" ), outputCsv=True, outputDir=tmp_path, truthSmearedSeeded=True, truthEstimatedSeeded=False, s=s, ) s.run() del s # files are closed in destructors, not great assert csv.exists() for rf, tn, nume in root_files: rp = tmp_path / rf assert rp.exists() if tn is not None and nume is not None: assert_entries(rp, tn, nume) assert_root_hash(rf, rp) assert len([f for f in csv.iterdir() if f.name.endswith("CKFtracks.csv")]) == events assert all([f.stat().st_size > 300 for f in csv.iterdir()])
def runDigitizationConfig( trackingGeometry, input: Path, output: Path, ): inputConfig = readDigiConfigFromJson(str(input)) digiConfigurator = DigitizationConfigurator() digiConfigurator.compactify = True digiConfigurator.inputDigiComponents = inputConfig trackingGeometry.visitSurfaces(digiConfigurator) outputConfig = GeometryHierarchyMap_DigiComponentsConfig( digiConfigurator.outputDigiComponents) writeDigiConfigToJson(outputConfig, str(output)) if "__main__" == __name__: detector, trackingGeometry, _ = GenericDetector.create() runDigitizationConfig( trackingGeometry=trackingGeometry, input=Path(__file__).parent / "../../Algorithms/Digitization/share/default-smearing-config-generic.json", output=Path.cwd() / "digi-config-out.json", )