def find_comparisons(pickle_path, model="", orders={}, energy=0, proc_list=[]): """Find a stored comparison object which corresponds to the parameters given. If file given in pickle_path, simply return pickled PickleRunner.""" if os.path.isfile(pickle_path): # File given. Simply return pickle object from file pickle_runner = save_load_object.load_from_file(pickle_path) if isinstance(pickle_runner, PickleRunner): return [pickle_runner] else: return [] if os.path.isdir(pickle_path): # Directory given. Return list of comparisons which # correspond to the parameters given object_list = [] # NOT YET FINISHED for pickle_file in glob.glob(os.path.join(pickle_path, "*")): # Ignore directories if os.path.isdir(pickle_file): continue # try loading a PickleRunner from the file try: pickle_runner = save_load_object.load_from_file( pickle_file) if isinstance(pickle_runner, PickleRunner): object_list.append(pickle_runner) logging.info("Loaded comparison runner from file %s" % \ pickle_file) except: pass object_list = filter(lambda runner:\ (not model or runner.model == model) and \ (not orders or runner.orders == orders) and \ (not energy or runner.energy == energy) and \ (not proc_list or \ runner.proc_list == proc_list), object_list) return object_list raise IOError, "Path %s is not valid pickle directory" % \ str(pickle_path)
def load_result(self, run_name): import madgraph.iolibs.save_load_object as save_load_object import madgraph.various.gen_crossxhtml as gen_crossxhtml result = save_load_object.load_from_file('/tmp/MGPROCESS/HTML/results.pkl') return result[run_name]
def load_result(self, run_name): import madgraph.iolibs.save_load_object as save_load_object import madgraph.various.gen_crossxhtml as gen_crossxhtml result = save_load_object.load_from_file(pjoin(self.run_dir,'HTML/results.pkl')) return result[run_name]
def load_result(self, run_name): import madgraph.iolibs.save_load_object as save_load_object import madgraph.madevent.gen_crossxhtml as gen_crossxhtml result = save_load_object.load_from_file('%s/HTML/results.pkl' % self.path) return result[run_name]
def import_full_model(model_path): """ a practical and efficient way to import one of those models (no restriction file use)""" assert model_path == find_ufo_path(model_path) # Check the validity of the model files_list_prov = ['couplings.py','lorentz.py','parameters.py', 'particles.py', 'vertices.py'] files_list = [] for filename in files_list_prov: filepath = os.path.join(model_path, filename) if not os.path.isfile(filepath): raise UFOImportError, "%s directory is not a valid UFO model: \n %s is missing" % \ (model_path, filename) files_list.append(filepath) # use pickle files if defined and up-to-date if files.is_uptodate(os.path.join(model_path, 'model.pkl'), files_list): try: model = save_load_object.load_from_file( \ os.path.join(model_path, 'model.pkl')) except Exception, error: logger.info('failed to load model from pickle file. Try importing UFO from File') else: # check path is correct if model.has_key('version_tag') and model.get('version_tag') == os.path.realpath(model_path) + str(misc.get_pkg_info()): _import_once.append(model_path) return model
def load_result(self, run_name): import madgraph.iolibs.save_load_object as save_load_object import madgraph.madevent.gen_crossxhtml as gen_crossxhtml result = save_load_object.load_from_file(pjoin(self.run_dir,'HTML/results.pkl')) return result[run_name]
def run_from_pickle(self): import madgraph.iolibs.save_load_object as save_load_object generate_all = save_load_object.load_from_file(pjoin(self.options['ms_dir'], 'madspin.pkl')) # Re-create information which are not save in the pickle. generate_all.evtfile = self.events_file generate_all.curr_event = madspin.Event(self.events_file, self.banner ) generate_all.mgcmd = self.mg5cmd generate_all.mscmd = self generate_all.pid2width = lambda pid: generate_all.banner.get('param_card', 'decay', abs(pid)).value generate_all.pid2mass = lambda pid: generate_all.banner.get('param_card', 'mass', abs(pid)).value if generate_all.path_me != self.options['ms_dir']: for decay in generate_all.all_ME.values(): decay['path'] = decay['path'].replace(generate_all.path_me, self.options['ms_dir']) for decay2 in decay['decays']: decay2['path'] = decay2['path'].replace(generate_all.path_me, self.options['ms_dir']) generate_all.path_me = self.options['ms_dir'] # directory can have been move generate_all.ms_dir = generate_all.path_me if not hasattr(self.banner, 'param_card'): self.banner.charge_card('slha') for name, block in self.banner.param_card.items(): if name.startswith('decay'): continue orig_block = generate_all.banner.param_card[name] if block != orig_block: raise Exception, """The directory %s is specific to a mass spectrum. Your event file is not compatible with this one. (Different param_card: %s different) orig block: %s new block: %s""" \ % (self.options['ms_dir'], name, orig_block, block) #replace init information generate_all.banner['init'] = self.banner['init'] # NOW we have all the information available for RUNNING if self.seed: #seed is specified need to use that one: open(pjoin(self.options['ms_dir'],'seeds.dat'),'w').write('%s\n'%self.seed) #remove all ranmar_state for name in glob.glob(pjoin(self.options['ms_dir'], '*', 'SubProcesses','*','ranmar_state.dat')): os.remove(name) generate_all.ending_run() self.branching_ratio = generate_all.branching_ratio evt_path = self.events_file.name try: self.events_file.close() except: pass misc.gzip(evt_path) decayed_evt_file=evt_path.replace('.lhe', '_decayed.lhe') misc.gzip(pjoin(self.options['curr_dir'],'decayed_events.lhe'), stdout=decayed_evt_file) if not self.mother: logger.info("Decayed events have been written in %s.gz" % decayed_evt_file)
def find_comparisons(pickle_path, model = "", orders={}, energy=0, proc_list = []): """Find a stored comparison object which corresponds to the parameters given. If file given in pickle_path, simply return pickled PickleRunner.""" if os.path.isfile(pickle_path): # File given. Simply return pickle object from file pickle_runner = save_load_object.load_from_file(pickle_path) if isinstance(pickle_runner, PickleRunner): return [pickle_runner] else: return [] if os.path.isdir(pickle_path): # Directory given. Return list of comparisons which # correspond to the parameters given object_list = [] # NOT YET FINISHED for pickle_file in glob.glob(os.path.join(pickle_path,"*")): # Ignore directories if os.path.isdir(pickle_file): continue # try loading a PickleRunner from the file try: pickle_runner = save_load_object.load_from_file(pickle_file) if isinstance(pickle_runner, PickleRunner): object_list.append(pickle_runner) logging.info("Loaded comparison runner from file %s" % \ pickle_file) except: pass object_list = filter(lambda runner:\ (not model or runner.model == model) and \ (not orders or runner.orders == orders) and \ (not energy or runner.energy == energy) and \ (not proc_list or \ runner.proc_list == proc_list), object_list) return object_list raise IOError, "Path %s is not valid pickle directory" % \ str(pickle_path)
def import_model(model_path, mgme_dir=MG4DIR, absolute=True): """create a model from a MG4 model directory.""" # Check for a valid directory model_path_old = model_path model_path = find_model_path(model_path, mgme_dir, absolute) files_list = [os.path.join(model_path, 'particles.dat'),\ os.path.join(model_path, 'interactions.dat')] for filepath in files_list: if not os.path.isfile(filepath): if not absolute: raise InvalidCmd, "%s directory is not a valid v4 model" % \ (model_path) else: return import_model(model_path_old, mgme_dir, False) # use pickle files if defined if files.is_uptodate(os.path.join(model_path, 'model.pkl'), files_list): model = save_load_object.load_from_file( \ os.path.join(model_path, 'model.pkl')) if model.has_key('version_tag') and model.get( 'version_tag') == os.path.realpath(model_path) + str( misc.get_pkg_info()): return model, model_path model = base_objects.Model() model.set('particles',files.read_from_file( \ os.path.join(model_path, 'particles.dat'), read_particles_v4)) model.set('interactions',files.read_from_file( \ os.path.join(model_path, 'interactions.dat'), read_interactions_v4, model['particles'])) model.set('name', os.path.split(model_path)[-1]) # save in a pickle files to fasten future usage if ReadWrite: try: save_load_object.save_to_file( os.path.join(model_path, 'model.pkl'), model) except Exception: logger.warning("fail to write %s. This is perfectly fine will just prevent speed boost in future load of this model" %\ os.path.join(model_path, 'model.pkl')) return model, model_path
def import_model(model_path, mgme_dir = MG4DIR, absolute=True): """create a model from a MG4 model directory.""" # Check for a valid directory model_path_old = model_path model_path = find_model_path(model_path, mgme_dir, absolute) files_list = [os.path.join(model_path, 'particles.dat'),\ os.path.join(model_path, 'interactions.dat')] for filepath in files_list: if not os.path.isfile(filepath): if not absolute: raise InvalidCmd, "%s directory is not a valid v4 model" % \ (model_path) else: return import_model(model_path_old, mgme_dir, False) # use pickle files if defined if files.is_uptodate(os.path.join(model_path, 'model.pkl'), files_list): model = save_load_object.load_from_file( \ os.path.join(model_path, 'model.pkl')) if model.has_key('version_tag') and model.get('version_tag') == os.path.realpath(model_path) + str(misc.get_pkg_info()): return model, model_path model = base_objects.Model() model.set('particles',files.read_from_file( \ os.path.join(model_path, 'particles.dat'), read_particles_v4)) model.set('interactions',files.read_from_file( \ os.path.join(model_path, 'interactions.dat'), read_interactions_v4, model['particles'])) model.set('name', os.path.split(model_path)[-1]) # save in a pickle files to fasten future usage if ReadWrite: try: save_load_object.save_to_file(os.path.join(model_path, 'model.pkl'), model) except Exception: logger.warning("fail to write %s. This is perfectly fine will just prevent speed boost in future load of this model" %\ os.path.join(model_path, 'model.pkl')) return model, model_path
def import_model(model_path, mgme_dir = MG4DIR): """create a model from a MG4 model directory.""" # Check for a valid directory model_path = find_model_path(model_path, mgme_dir) files_list = [os.path.join(model_path, 'particles.dat'),\ os.path.join(model_path, 'interactions.dat')] for filepath in files_list: if not os.path.isfile(filepath): raise InvalidCmd, "%s directory is not a valid v4 model" % \ (model_path) # use pickle files if defined if files.is_uptodate(os.path.join(model_path, 'model.pkl'), files_list): model = save_load_object.load_from_file( \ os.path.join(model_path, 'model.pkl')) if model.has_key('version_tag') and model.get('version_tag') == os.path.realpath(model_path) + str(misc.get_pkg_info()): return model, model_path model = base_objects.Model() model.set('particles',files.read_from_file( \ os.path.join(model_path, 'particles.dat'), read_particles_v4)) model.set('interactions',files.read_from_file( \ os.path.join(model_path, 'interactions.dat'), read_interactions_v4, model['particles'])) model.set('name', os.path.split(model_path)[-1]) # save in a pickle files to fasten future usage save_load_object.save_to_file(os.path.join(model_path, 'model.pkl'), model) return model, model_path
def setUp(self): """ Setup a toy-model with gluon and down-quark only """ # A gluon self.mypartlist.append(base_objects.Particle({'name':'g', 'antiname':'g', 'spin':3, 'color':8, 'mass':'zero', 'width':'zero', 'texname':'g', 'antitexname':'g', 'line':'curly', 'charge':0., 'pdg_code':21, 'propagating':True, 'is_part':True, 'self_antipart':True})) # A quark D and its antiparticle self.mypartlist.append(base_objects.Particle({'name':'d', 'antiname':'d~', 'spin':2, 'color':3, 'mass':'dmass', 'width':'zero', 'texname':'d', 'antitexname':'\bar d', 'line':'straight', 'charge':-1. / 3., 'pdg_code':1, 'propagating':True, 'is_part':True, 'self_antipart':False})) antid = copy.copy(self.mypartlist[1]) antid.set('is_part', False) # 3 gluon vertex self.myinterlist.append(base_objects.Interaction({ 'id': 1, 'particles': base_objects.ParticleList(\ [self.mypartlist[0]] * 3), 'color': [], 'lorentz':['L1'], 'couplings':{(0, 0):'G'}, 'orders':{'QCD':1}})) # 4 gluon vertex self.myinterlist.append(base_objects.Interaction({ 'id': 2, 'particles': base_objects.ParticleList(\ [self.mypartlist[0]] * 4), 'color': [], 'lorentz':['L1'], 'couplings':{(0, 0):'G^2'}, 'orders':{'QCD':2}})) # Gluon coupling to the down-quark self.myinterlist.append(base_objects.Interaction({ 'id': 3, 'particles': base_objects.ParticleList(\ [self.mypartlist[1], \ antid, \ self.mypartlist[0]]), 'color': [], 'lorentz':['L1'], 'couplings':{(0, 0):'GQQ'}, 'orders':{'QCD':1}})) self.mymodel.set('particles', self.mypartlist) self.mymodel.set('interactions', self.myinterlist) self.myproc.set('model',self.mymodel) self.myloopmodel = save_load_object.load_from_file(os.path.join(_input_file_path,\ 'test_toyLoopModel.pkl')) box_diagram, box_struct = self.def_box() pent_diagram, pent_struct = self.def_pent() self.box_drawing = draw_lib.LoopFeynmanDiagram( box_diagram, box_struct, self.myloopmodel)
def run_from_pickle(self): import madgraph.iolibs.save_load_object as save_load_object generate_all = save_load_object.load_from_file( pjoin(self.options['ms_dir'], 'madspin.pkl')) # Re-create information which are not save in the pickle. generate_all.evtfile = self.events_file generate_all.curr_event = madspin.Event(self.events_file) generate_all.mgcmd = self.mg5cmd generate_all.mscmd = self generate_all.pid2width = lambda pid: generate_all.banner.get( 'param_card', 'decay', abs(pid)).value generate_all.pid2mass = lambda pid: generate_all.banner.get( 'param_card', 'mass', abs(pid)).value if generate_all.path_me != self.options['ms_dir']: for decay in generate_all.all_ME.values(): decay['path'] = decay['path'].replace(generate_all.path_me, self.options['ms_dir']) for decay2 in decay['decays']: decay2['path'] = decay2['path'].replace( generate_all.path_me, self.options['ms_dir']) generate_all.path_me = self.options[ 'ms_dir'] # directory can have been move generate_all.ms_dir = generate_all.path_me if not hasattr(self.banner, 'param_card'): self.banner.charge_card('slha') for name, block in self.banner.param_card.items(): if name.startswith('decay'): continue orig_block = generate_all.banner.param_card[name] if block != orig_block: raise Exception, """The directory %s is specific to a mass spectrum. Your event file is not compatible with this one. (Different param_card: %s different) orig block: %s new block: %s""" \ % (self.options['ms_dir'], name, orig_block, block) # NOW we have all the information available for RUNNING if self.seed: #seed is specified need to use that one: open(pjoin(self.options['ms_dir'], 'seeds.dat'), 'w').write('%s\n' % self.seed) #remove all ranmar_state for name in glob.glob( pjoin(self.options['ms_dir'], '*', 'SubProcesses', '*', 'ranmar_state.dat')): os.remove(name) generate_all.ending_run() self.branching_ratio = generate_all.branching_ratio evt_path = self.events_file.name try: self.events_file.close() except: pass misc.call(['gzip -f %s' % evt_path], shell=True) decayed_evt_file = evt_path.replace('.lhe', '_decayed.lhe') shutil.move(pjoin(self.options['curr_dir'], 'decayed_events.lhe'), decayed_evt_file) misc.call(['gzip -f %s' % decayed_evt_file], shell=True) if not self.mother: logger.info("Decayed events have been written in %s.gz" % decayed_evt_file)
def finalize(self, flaglist, interface_history): """Distribute and organize the finalization of all contributions. """ # Make sure contributions are sorted at this stage # It is important to act on LO contributions first, then NLO, then etc... # because ME and currents must be added to the ME_accessor in order since there # are look-up operations on it in-between self.contributions.sort_contributions() # Save all the global couplings to write out afterwards global_wanted_couplings = [] # Forward the finalize request to each contribution for contrib in self.contributions: # Must clean the aloha Kernel before each aloha export for each contribution aloha.aloha_lib.KERNEL.clean() wanted_couplings_to_add_to_global = contrib.finalize( flaglist=flaglist, interface_history=interface_history) global_wanted_couplings.extend(wanted_couplings_to_add_to_global) # Generate the global ME7 MODEL if global_wanted_couplings: output_dir = pjoin(self.export_dir, 'Source', 'MODEL') # Writing out the model common to all the contributions that can share it model_export_options = { 'complex_mass': self.options['complex_mass_scheme'], 'export_format': 'madloop', # So as to have access to lha_read_mp.f 'mp': True, 'loop_induced': False } model_builder = export_v4.UFO_model_to_mg4(self.model, output_dir, model_export_options) model_builder.build(global_wanted_couplings) # Now possibly add content to the pool of global ME7 resources before removing superfluous files # and linking to necessary global ME7 resources for contrib in self.contributions: contrib.add_content_to_global_ME7_resources(self.export_dir) contrib.remove_superfluous_content() contrib.link_global_ME7_resources(self.export_dir) # Create the run_card self.create_run_card() # Add the cards generated in MODEL to the Cards directory self.copy_model_resources() # Now link the Sources files within each contribution for contrib in self.contributions: contrib.make_model_symbolic_link() # Copy the UFO model to the global ME7 resources Source directory ME7_ufo_path = pjoin( self.export_dir, 'Source', 'ME7_UFO_model_%s' % os.path.basename(self.model.get('modelpath'))) shutil.copytree(self.model.get('modelpath'), ME7_ufo_path) # And clear compiled files in it for path in misc.glob(pjoin(ME7_ufo_path, '*.pkl')) + misc.glob( pjoin(ME7_ufo_path, '*.pyc')): os.remove(path) # Now generate all the ME accessors and integrand. # Notice that some of the information provided here (RunCard, ModelReader, root_path, etc...) # can and will be overwritten by the actualized values when the ME7Interface will be launched. # We provide it here just so as to be complete. # Obtain all the Accessors to the Matrix Element and currents made available in this process output all_MEAccessors = accessors.MEAccessorDict() for contrib in self.contributions: contrib.add_ME_accessors(all_MEAccessors, self.export_dir) # Now make sure that the integrated counterterms without any contribution host # indeed have a non-existent reduced process. contributions.Contribution_V.remove_counterterms_with_no_reduced_process( all_MEAccessors, self.integrated_counterterms_refused_from_all_contribs) # Check there is none left over after this filtering if len(self.integrated_counterterms_refused_from_all_contribs) > 0: counterterm_list = (ct['integrated_counterterm'].nice_string( ) for ct in self.integrated_counterterms_refused_from_all_contribs) # These integrated counterterms should in principle been added msg = "The following list of integrated counterterm are in principle non-zero" msg += " but could not be included in any contributions generated:\n" msg += '\n'.join(counterterm_list) msg += "\nResults generated from that point on are likely to be physically wrong." if __debug__: logger.critical(msg) else: raise MadGraph5Error(msg) # Now generate all the integrands from the contributions exported all_integrands = [] run_card = banner_mod.RunCardME7( pjoin(self.export_dir, 'Cards', 'run_card.dat')) # We might want to recover whether prefix was used when importing the model and whether # the MG5 name conventions was used. But this is a detail that can easily be fixed later. modelReader_instance = import_ufo.import_model( pjoin(self.export_dir, 'Source', 'ME7_UFO_model_') + self.model.get('name'), prefix=True, complex_mass_scheme=self.options['complex_mass_scheme']) modelReader_instance.pass_particles_name_in_mg_default() modelReader_instance.set_parameters_and_couplings( param_card=pjoin(self.export_dir, 'Cards', 'param_card.dat'), scale=run_card['scale'], complex_mass_scheme=self.options['complex_mass_scheme']) ME7_options = dict(self.options) ME7_options['me_dir'] = self.export_dir for contrib in self.contributions: all_integrands.extend( contrib.get_integrands(modelReader_instance, run_card, all_MEAccessors, ME7_options)) # And finally dump ME7 output information so that all relevant objects # can be reconstructed for a future launch with ME7Interface. # Normally all the relevant information should simply be encoded in only: # 'all_MEAccessors' and 'all_integrands'. self.dump_ME7(all_MEAccessors, all_integrands) # Finally, for future convenience it may sometimes be desirable to already compile # all contributions and global ME7 resources (e.g. MODEL) as followed. # By default however, we don't do that and this will instead be done at the launch time. #logger.info('Compilation of the process output.') #logger.info('It can be interrupted at any time,'+ # ' in which case it would be automatically resumed when launched.') #self.compile() return ################################################################################################### ### ### WARNING THE CODE BELOW IS JUST FOR TESTING PURPOSES AND CORRESPONDS TO RUNNING THE INTEGRATION ### RIGHT AWAY AND NOT WITHIN THE ME7 INTERFACE> ### ################################################################################################### import madgraph.interface.ME7_interface as ME7_interface # Test the reconstruction of the ME7 output instances ME7_dump = save_load_object.load_from_file( pjoin(self.export_dir, 'MadEvent7.db')) all_MEAccessors = ME7_dump['all_MEAccessors'][ 'class'].initialize_from_dump(ME7_dump['all_MEAccessors'], root_path=self.export_dir) all_integrands = [ integrand_dump['class'].initialize_from_dump( integrand_dump, modelReader_instance, run_card, all_MEAccessors, self.options) for integrand_dump in ME7_dump['all_integrands'] ] model_name = ME7_dump['model_name'] model_with_CMS = ME7_dump['model_with_CMS'] # This is now just for gigs. Integrate that beast! # Of course, what should really happen is that the users starts a ME7_interface, that # bootstraps from the dump above and starts the integration below with lunch. # So this is really just for testing purposes. import madgraph.integrator.integrators as integrators integrator_naive = integrators.SimpleMonteCarloIntegrator( all_integrands, **{ 'n_iterations': 10, 'n_points_per_iterations': 100, 'accuracy_target': None, 'verbosity': 1 }) import madgraph.integrator.pyCubaIntegrator as pyCubaIntegrator integrator_vegas = pyCubaIntegrator.pyCubaIntegrator( all_integrands, **{ 'algorithm': 'Vegas', 'verbosity': 1, 'seed': 3, 'target_accuracy': 1.0e-3, 'n_start': 1000, 'n_increase': 500, 'n_batch': 1000, 'max_eval': 100000, 'min_eval': 0 }) # Now run them all! for integrator in [integrator_naive, integrator_vegas]: xsec, error = integrator.integrate() logger.info("=" * 100) logger.info('{:^100}'.format( "\033[92mCross-section for process output '%s' with integrator '%s':\033[0m" % (self.export_dir, integrator.get_name()))) logger.info('{:^100}'.format("\033[94m%.5e +/- %.2e [pb]\033[0m" % (xsec, error))) logger.info("=" * 100 + "\n")
def setUp(self): """ Setup a toy-model with gluon and down-quark only """ # A gluon self.mypartlist.append( base_objects.Particle({ 'name': 'g', 'antiname': 'g', 'spin': 3, 'color': 8, 'mass': 'zero', 'width': 'zero', 'texname': 'g', 'antitexname': 'g', 'line': 'curly', 'charge': 0., 'pdg_code': 21, 'propagating': True, 'is_part': True, 'self_antipart': True })) # A quark D and its antiparticle self.mypartlist.append( base_objects.Particle({ 'name': 'd', 'antiname': 'd~', 'spin': 2, 'color': 3, 'mass': 'dmass', 'width': 'zero', 'texname': 'd', 'antitexname': '\bar d', 'line': 'straight', 'charge': -1. / 3., 'pdg_code': 1, 'propagating': True, 'is_part': True, 'self_antipart': False })) antid = copy.copy(self.mypartlist[1]) antid.set('is_part', False) # 3 gluon vertex self.myinterlist.append(base_objects.Interaction({ 'id': 1, 'particles': base_objects.ParticleList(\ [self.mypartlist[0]] * 3), 'color': [], 'lorentz':['L1'], 'couplings':{(0, 0):'G'}, 'orders':{'QCD':1}})) # 4 gluon vertex self.myinterlist.append(base_objects.Interaction({ 'id': 2, 'particles': base_objects.ParticleList(\ [self.mypartlist[0]] * 4), 'color': [], 'lorentz':['L1'], 'couplings':{(0, 0):'G^2'}, 'orders':{'QCD':2}})) # Gluon coupling to the down-quark self.myinterlist.append(base_objects.Interaction({ 'id': 3, 'particles': base_objects.ParticleList(\ [self.mypartlist[1], \ antid, \ self.mypartlist[0]]), 'color': [], 'lorentz':['L1'], 'couplings':{(0, 0):'GQQ'}, 'orders':{'QCD':1}})) self.mymodel.set('particles', self.mypartlist) self.mymodel.set('interactions', self.myinterlist) self.myproc.set('model', self.mymodel) self.myloopmodel = save_load_object.load_from_file(os.path.join(_input_file_path,\ 'test_toyLoopModel.pkl')) box_diagram, box_struct = self.def_box() pent_diagram, pent_struct = self.def_pent() self.box_drawing = draw_lib.LoopFeynmanDiagram(box_diagram, box_struct, self.myloopmodel)
def run_from_pickle(self): import madgraph.iolibs.save_load_object as save_load_object generate_all = save_load_object.load_from_file( pjoin(self.options['ms_dir'], 'madspin.pkl')) # Re-create information which are not save in the pickle. generate_all.evtfile = self.events_file generate_all.curr_event = madspin.Event(self.events_file, self.banner) generate_all.mgcmd = self.mg5cmd generate_all.mscmd = self generate_all.pid2width = lambda pid: generate_all.banner.get( 'param_card', 'decay', abs(pid)).value generate_all.pid2mass = lambda pid: generate_all.banner.get( 'param_card', 'mass', abs(pid)).value if generate_all.path_me != self.options['ms_dir']: for decay in generate_all.all_ME.values(): decay['path'] = decay['path'].replace(generate_all.path_me, self.options['ms_dir']) for decay2 in decay['decays']: decay2['path'] = decay2['path'].replace( generate_all.path_me, self.options['ms_dir']) generate_all.path_me = self.options[ 'ms_dir'] # directory can have been move generate_all.ms_dir = generate_all.path_me if not hasattr(self.banner, 'param_card'): self.banner.charge_card('slha') # Special treatment for the mssm. Convert the param_card to the correct # format if self.banner.get('model').startswith('mssm-') or self.banner.get( 'model') == 'mssm': self.banner.param_card = check_param_card.convert_to_mg5card(\ self.banner.param_card, writting=False) for name, block in self.banner.param_card.items(): if name.startswith('decay'): continue orig_block = generate_all.banner.param_card[name] if block != orig_block: raise Exception, """The directory %s is specific to a mass spectrum. Your event file is not compatible with this one. (Different param_card: %s different) orig block: %s new block: %s""" \ % (self.options['ms_dir'], name, orig_block, block) #replace init information generate_all.banner['init'] = self.banner['init'] #replace run card if present in header (to make sure correct random seed is recorded in output file) if 'mgruncard' in self.banner: generate_all.banner['mgruncard'] = self.banner['mgruncard'] # NOW we have all the information available for RUNNING if self.seed: #seed is specified need to use that one: open(pjoin(self.options['ms_dir'], 'seeds.dat'), 'w').write('%s\n' % self.seed) #remove all ranmar_state for name in misc.glob( pjoin('*', 'SubProcesses', '*', 'ranmar_state.dat'), self.options['ms_dir']): os.remove(name) generate_all.ending_run() self.branching_ratio = generate_all.branching_ratio try: self.err_branching_ratio = generate_all.err_branching_ratio except Exception: # might not be define in some gridpack mode self.err_branching_ratio = 0 evt_path = self.events_file.name try: self.events_file.close() except: pass misc.gzip(evt_path) decayed_evt_file = evt_path.replace('.lhe', '_decayed.lhe') misc.gzip(pjoin(self.options['curr_dir'], 'decayed_events.lhe'), stdout=decayed_evt_file) if not self.mother: logger.info("Decayed events have been written in %s.gz" % decayed_evt_file)
raise files_list = [os.path.join(model_path, 'particles.dat'),\ os.path.join(model_path, 'interactions.dat')] for filepath in files_list: if not os.path.isfile(filepath): if not absolute: raise InvalidCmd, "%s directory is not a valid v4 model" % \ (model_path) else: return import_model(model_path_old, mgme_dir, False) # use pickle files if defined if files.is_uptodate(os.path.join(model_path, 'model.pkl'), files_list): model = save_load_object.load_from_file( \ os.path.join(model_path, 'model.pkl')) if model.has_key('version_tag') and model.get( 'version_tag') == os.path.realpath(model_path) + str( misc.get_pkg_info()): return model, model_path model = base_objects.Model() model.set('particles',files.read_from_file( \ os.path.join(model_path, 'particles.dat'), read_particles_v4)) model.set('interactions',files.read_from_file( \ os.path.join(model_path, 'interactions.dat'), read_interactions_v4, model['particles']))