def test_04(self): # test iter f = dataio.I3FrameSequence([self.name]) iter = f.__iter__() iter2 = iter.__iter__() f.rewind() for fr in f: pass f.close()
def test_05(self): # test direct index f = dataio.I3FrameSequence([self.name]) if f[0].Stop != icetray.I3Frame.Geometry: raise Exception('no Geometry frame') if f[1].Stop != icetray.I3Frame.Calibration: raise Exception('no Calibration frame') if f[2].Stop != icetray.I3Frame.DetectorStatus: raise Exception('no DetectorStatus frame') f.close()
def test_02(self): # test pop_frame for a non-reading file f = dataio.I3FrameSequence([self.name]) f.close() try: f.pop_frame() except Exception: pass else: raise Exception( 'should not be able to pop_frame() on a non-reading file')
def test_03(self): # get mixed frames f = dataio.I3FrameSequence([self.name]) f.pop_frame() f.pop_frame() f.pop_frame() f.pop_frame() fr = f.get_mixed_frames() self.assertEqual(len(fr), 3, 'not 3 mixed frames') if fr[0].Stop != icetray.I3Frame.Geometry: raise Exception('no mixed Geometry frame') if fr[1].Stop != icetray.I3Frame.Calibration: raise Exception('no mixed Calibration frame') if fr[2].Stop != icetray.I3Frame.DetectorStatus: raise Exception('no mixed DetectorStatus frame') f.close()
print("-> adding {} of type {}".format(name, params["type"])) params = params["uniform"] perturber.add( name, all_parametrizations[name], UniformDistribution([ dataclasses.make_pair(*limits) for limits in params["limits"] ])) else: raise NotImplementedError( "Perturbation '{}' of type '{}' not implemented.".format( name, params["type"])) print("done") # Setting up some other things gcdFrames = list(dataio.I3File(GCDFile)) inputStream = dataio.I3FrameSequence([InputFiles]) summary = dataclasses.I3MapStringDouble() intermediateOutputFiles = [] # # # Run PhotonProp # # # start a model counter model_counter = 0 # Execute photon propagation print("Executing photon propagation...", end="") while inputStream.more():
def run_snowstorm_propagation(cfg, infile, outfile): """Run SnowStorm Propagation. Adopted from: https://code.icecube.wisc.edu/projects/icecube/browser/IceCube/ meta-projects/combo/stable/simprod-scripts/resources/scripts/ SnowSuite/3-Snowstorm.py Parameters ---------- cfg : dict Dictionary with configuration settings. infile : str Path to input file. outfile : str Path to output file. """ start_time = time.time() # -------- # Settings # -------- default_args = { # required 'NumEventsPerModel': 100, 'DOMOversizeFactor': 1., 'UseI3PropagatorService': True, # optional 'UseGPUs': True, 'SummaryFile': 'summary_snowstorm.yaml', 'UseOnlyDeviceNumber': None, 'MCTreeName': 'I3MCTree', 'OutputMCTreeName': None, 'FlasherInfoVectName': None, 'FlasherPulseSeriesName': None, 'PhotonSeriesName': None, 'MCPESeriesName': "I3MCPESeriesMap", 'DisableTilt': False, 'UnWeightedPhotons': False, 'UnWeightedPhotonsScalingFactor': None, 'UseGeant4': False, 'ParticleHistory': True, 'ParticleHistoryGranularity': 1*icetray.I3Units.m, 'CrossoverEnergyEM': None, 'CrossoverEnergyHadron': None, 'UseCascadeExtension': True, 'StopDetectedPhotons': True, 'PhotonHistoryEntries': 0, 'DoNotParallelize': False, 'UnshadowedFraction': 1.0, 'WavelengthAcceptance': None, 'DOMRadius': 0.16510*icetray.I3Units.m, 'CableOrientation': None, 'OverrideApproximateNumberOfWorkItems': None, 'IgnoreSubdetectors': ["IceTop"], 'ExtraArgumentsToI3CLSimClientModule': dict(), } # overwrite default settings default_args.update(cfg) cfg = default_args snowstorm_config = cfg['snowstorm_config'] if cfg['SummaryFile'] is not None: cfg['SummaryFile'] = cfg['SummaryFile'].format(**cfg) ice_model_location = \ os.path.expandvars(snowstorm_config["IceModelLocation"]) hole_ice_parameterization = \ os.path.expandvars(snowstorm_config["HoleIceParameterization"]) # set units to meter cfg['ParticleHistoryGranularity'] *= icetray.I3Units.m cfg['DOMRadius'] *= icetray.I3Units.m # Print out most important settings click.echo('\n---------------') click.echo('Script Settigns') click.echo('---------------') click.echo('\tInput: {}'.format(infile)) click.echo('\tGCDFile: {}'.format(cfg['gcd'])) click.echo('\tOutput: {}'.format(outfile)) for key in ['DOMOversizeFactor', 'UseI3PropagatorService', 'UseGPUs', 'SummaryFile']: click.echo('\t{}: {}'.format(key, cfg[key])) click.echo('---------------\n') # get random service random_services, _ = create_random_services( dataset_number=cfg['dataset_number'], run_number=cfg['run_number'], seed=cfg['seed'], n_services=1, use_gslrng=cfg['random_service_use_gslrng']) random_service = random_services[0] """ Setup and run Snowstorm (aka MultiSim) by running a series of short trays, each with a different ice model. This works by front-loading as much of the expensive initialization (reading the GCD file, setting up PROPOSAL/Geant4, etc) as possible, so that only the propagation kernel needs to be recompiled for every tray. """ # instantiate baseline detector setup. # this will help construct the baseline characteristics before applying # the perturbers print("Setting up detector... ", end="") clsimParams = setupDetector( GCDFile=cfg['gcd'], SimulateFlashers=bool(cfg['FlasherInfoVectName'] or cfg['FlasherPulseSeriesName']), IceModelLocation=ice_model_location, DisableTilt=cfg['DisableTilt'], UnWeightedPhotons=cfg['UnWeightedPhotons'], UnWeightedPhotonsScalingFactor=cfg['UnWeightedPhotonsScalingFactor'], UseI3PropagatorService=cfg['UseI3PropagatorService'], UseGeant4=cfg['UseGeant4'], CrossoverEnergyEM=cfg['CrossoverEnergyEM'], CrossoverEnergyHadron=cfg['CrossoverEnergyHadron'], UseCascadeExtension=cfg['UseCascadeExtension'], StopDetectedPhotons=cfg['StopDetectedPhotons'], DOMOversizeFactor=cfg['DOMOversizeFactor'], UnshadowedFraction=cfg['UnshadowedFraction'], HoleIceParameterization=hole_ice_parameterization, WavelengthAcceptance=cfg['WavelengthAcceptance'], DOMRadius=cfg['DOMRadius'], CableOrientation=cfg['CableOrientation'], IgnoreSubdetectors=cfg['IgnoreSubdetectors'], ) print("done") print("Setting up OpenCLDevices... ", end="") openCLDevices = configureOpenCLDevices( UseGPUs=cfg['UseGPUs'], UseCPUs=not cfg['UseGPUs'], OverrideApproximateNumberOfWorkItems=cfg[ 'OverrideApproximateNumberOfWorkItems'], DoNotParallelize=cfg['DoNotParallelize'], UseOnlyDeviceNumber=cfg['UseOnlyDeviceNumber']) print("done") # ------------------- # Setup perturbations # ------------------- # create empty "perturber" object perturber = Perturber() # get perturbation_cfg dict to simplify calls perturbation_cfg = snowstorm_config["Perturbations"] # loop over all perturbations in the perturbation_cfg print("Setting up perturbers... ") for name, params in perturbation_cfg.items(): # catch special case of IceWavePlusModes if name == "IceWavePlusModes": if not params["apply"]: continue if params["type"] == "default": print("-> adding {} of type {}".format(name, params["type"])) perturber.add('IceWavePlusModes', *icewave.get_default_perturbation()) continue elif hasattr(snowstorm_perturbers, params["type"]): print("-> adding {} of type {}".format(name, params["type"])) get_perturber = getattr(snowstorm_perturbers, params["type"]) perturber.add('IceWavePlusModes', *get_perturber(**params['settings'])) continue else: msg = "IceWavePlusModes of type '{}' are not implemented(yet)." raise NotImplementedError(msg.format(params["type"])) # all other cases if params["type"] == "delta": print("-> adding {} of type {}".format(name, params["type"])) params = params["delta"] perturber.add(name, all_parametrizations[name], DeltaDistribution(params["x0"])) elif params["type"] == "gauss": print("-> adding {} of type {}".format(name, params["type"])) params = params["gauss"] # Caution: MultivariateNormal expect the covariance matrix as # first argument, so we need to use sigma**2 perturber.add(name, all_parametrizations[name], MultivariateNormal( dataclasses.I3Matrix(np.diag(params["sigma"])**2), params["mu"])) elif params["type"] == "uniform": print("-> adding {} of type {}".format(name, params["type"])) params = params["uniform"] perturber.add(name, all_parametrizations[name], UniformDistribution( [dataclasses.make_pair(*limits) for limits in params["limits"]])) else: msg = "Perturbation '{}' of type '{}' not implemented." raise NotImplementedError(msg.format(name, params["type"])) print("done") # Setting up some other things gcdFrames = list(dataio.I3File(cfg['gcd'])) inputStream = dataio.I3FrameSequence([infile]) summary = dataclasses.I3MapStringDouble() intermediateOutputFiles = [] # -------------- # Run PhotonProp # -------------- # start a model counter model_counter = 0 # Execute photon propagation print("Executing photon propagation...", end="") while inputStream.more(): # measure CLSimInit time time_CLSimInit_start = time.time() tray = I3Tray() tray.context['I3RandomService'] = random_service tray.context['I3SummaryService'] = summary # make a mutable copy of the config dict config = dict(clsimParams) # populate the M frame with I3FrameObjects from clsimParams model = icetray.I3Frame('M') for k, v in config.items(): if isinstance(v, icetray.I3FrameObject): model[k] = v # apply perturbations in the order they were configured perturber.perturb(random_service, model) # check for items in the M-frame that were changed/added # by the perturbers for k in model.keys(): if k.startswith('Snowstorm'): # keep all Snowstorm keys continue if k not in config: msg = "\n {} was put in the M frame, but does not appear in " msg += "the CLSim configuration dict" raise KeyError(msg.format(k)) if config[k] != model[k]: # if an items was changed, copy it back to clsimParams config[k] = model[k] else: # remove unmodified items from the M frame del model[k] # add "persistent" I3Reader tray.Add(FrameSequenceReader, Sequence=itertools.chain(gcdFrames, [model], inputStream)) # inject an S frame if it doesn't exist tray.Add(EnsureSFrame, Enable=len(intermediateOutputFiles) == 0) # write pertubations to frame def populate_s_frame(frame): perturber.to_frame(frame) tray.Add(populate_s_frame, Streams=[icetray.I3Frame.Stream('S')]) # Add Bumper to stop the tray after NumEventsPerModel Q-frames tray.Add(Bumper, NumFrames=cfg['NumEventsPerModel']) # initialize CLSim server and setup the propagators server_location = tempfile.mkstemp(prefix='clsim-server-')[1] address = 'ipc://'+server_location converters = setupPropagators( random_service, config, UseGPUs=cfg['UseGPUs'], UseCPUs=not cfg['UseGPUs'], OverrideApproximateNumberOfWorkItems=cfg[ 'OverrideApproximateNumberOfWorkItems'], DoNotParallelize=cfg['DoNotParallelize'], UseOnlyDeviceNumber=cfg['UseOnlyDeviceNumber'] ) server = clsim.I3CLSimServer( address, clsim.I3CLSimStepToPhotonConverterSeries(converters)) # stash server instance in the context to keep it alive tray.context['CLSimServer'] = server # recycle StepGenerator to prevent repeated, expensive initialization if 'StepGenerator' in cfg['ExtraArgumentsToI3CLSimClientModule']: stepGenerator = \ cfg['ExtraArgumentsToI3CLSimClientModule']['StepGenerator'] stepGenerator.SetMediumProperties(config['MediumProperties']) stepGenerator.SetWlenBias(config['WavelengthGenerationBias']) # add CLSim server to tray module_config = \ tray.Add( I3CLSimMakePhotonsWithServer, ServerAddress=address, DetectorSettings=config, MCTreeName=cfg['MCTreeName'], OutputMCTreeName=cfg['OutputMCTreeName'], FlasherInfoVectName=cfg['FlasherInfoVectName'], FlasherPulseSeriesName=cfg['FlasherPulseSeriesName'], PhotonSeriesName=cfg['PhotonSeriesName'], MCPESeriesName=cfg['MCPESeriesName'], RandomService=random_service, ParticleHistory=cfg['ParticleHistory'], ParticleHistoryGranularity=cfg['ParticleHistoryGranularity'], ExtraArgumentsToI3CLSimClientModule=cfg[ 'ExtraArgumentsToI3CLSimClientModule'], ) # recycle StepGenerator to prevent repeated, expensive initialization cfg['ExtraArgumentsToI3CLSimClientModule']['StepGenerator'] = \ module_config['StepGenerator'] # write to temporary output file intermediateOutputFiles.append( tempfile.mkstemp(suffix=(outfile.split("/"))[-1])[1]) tray.Add("I3Writer", Filename=intermediateOutputFiles[-1], DropOrphanStreams=[icetray.I3Frame.TrayInfo], Streams=[icetray.I3Frame.TrayInfo, icetray.I3Frame.Simulation, icetray.I3Frame.Stream('M'), icetray.I3Frame.Stream('m'), icetray.I3Frame.DAQ, icetray.I3Frame.Physics]) # gather statistics in the "I3SummaryService" tray.Add(GatherStatistics) # measure CLSimInit time time_CLSimInit = time.time() - time_CLSimInit_start summary["CLSimInitTime_{:03d}".format(model_counter)] = time_CLSimInit if "TotalCLSimInitTime" not in summary: summary["TotalCLSimInitTime"] = time_CLSimInit else: summary["TotalCLSimInitTime"] += time_CLSimInit # measure CLSimTray time time_CLSimTray_start = time.time() # Execute Tray tray.Execute() # measure CLSimTray time time_CLSimTray = time.time() - time_CLSimTray_start summary["CLSimTrayTime_{:03d}".format(model_counter)] = time_CLSimTray if "TotalCLSimTrayTime" not in summary: summary["TotalCLSimTrayTime"] = time_CLSimTray else: summary["TotalCLSimTrayTime"] += time_CLSimTray # remove the temp file made by the server location thingy os.unlink(server_location) # increase model counter model_counter += 1 print("done") # Add number of models to summary summary["TotalNumberOfModels"] = model_counter # Concatenate intermediate files print("Concatenating temporary files... ", end='') tray = I3Tray() tray.Add(dataio.I3Reader, "I3Reader", FilenameList=intermediateOutputFiles) tray.Add("I3Writer", Filename=outfile, DropOrphanStreams=[icetray.I3Frame.TrayInfo], Streams=[icetray.I3Frame.TrayInfo, icetray.I3Frame.Simulation, icetray.I3Frame.Stream('M'), icetray.I3Frame.Stream('m'), icetray.I3Frame.DAQ, icetray.I3Frame.Physics]) tray.Execute() tray.Finish() print("done") print("Cleaning up Temporary files... ") for fname in intermediateOutputFiles: os.unlink(fname) print("done") # Recalculate averages print("Writing summary file... ", end='') if cfg['UseGPUs']: if summary['TotalHostTime'] > 0.0: summary['DeviceUtilization'] = \ summary['TotalDeviceTime']/summary['TotalHostTime'] if summary['TotalNumPhotonsGenerated'] > 0.0: summary['AverageDeviceTimePerPhoton'] = \ summary['TotalDeviceTime']/summary['TotalNumPhotonsGenerated'] if summary['TotalNumPhotonsGenerated'] > 0.0: summary['AverageHostTimePerPhoton'] = \ summary['TotalHostTime']/summary['TotalNumPhotonsGenerated'] if cfg['SummaryFile']: with open(cfg['SummaryFile'], 'w') as f: yaml.dump(dict(summary), f) print("done") print('--------') print('Summary:') print('--------') for key, value in summary.items(): print('\t{}: {}'.format(key, value)) print('--------\n') # Hurray! print("All finished!") # say something about the runtime end_time = time.time() print("That took "+str(end_time - start_time)+" seconds.")
return y def take_ratios(numerator, denominator): ratio = list() for i in range(len(numerator)): if (denominator[i] == 0): n_ratio = 0 else: n_ratio = numerator[i] / denominator[i] ratio.append(n_ratio) return ratio #load a long list of files file_name = 'file_list.txt' file_list = open(file_name).read().splitlines() infile = dataio.I3FrameSequence(file_list) normalization = len(file_list) print "loaded your MC in " + str(normalization) + " files!" #initialize values muon_1 = [] muon_1.append([]) #[0] energy muon_1.append([]) #[1] z position muon_1.append([]) #[2] all weights muon_1.append([]) #[3] passes PE >3 veto muon_2 = [] muon_2.append([])
import matplotlib.ticker as ticker import numpy as np def take_ratios(numerator, denominator): ratio = list() for i in range(len(numerator)): if (denominator[i] == 0): n_ratio = 0 else: n_ratio = numerator[i] / denominator[i] ratio.append(n_ratio) return ratio #import files file_name = args.infiles infile = dataio.I3FrameSequence(file_name) normalization = len(file_name) print "loaded your MC" #initialize values m_zen_1 = [] m_zen_1.append([]) m_zen_1.append([]) m_zen_2 = [] m_zen_2.append([]) m_zen_2.append([]) m_zen_3 = [] m_zen_3.append([]) m_zen_3.append([]) m_a_1 = [] m_a_1.append([])
def test_01(self): # closed file f = dataio.I3FrameSequence([self.name]) f.close() if f.more(): raise Exception('should not be able to more() a closed file')