def test_simple_import(self): """ check that basic quantity are define """ #remove pkl file try: model_path = os.path.join(MG5DIR, 'models', 'sm') os.remove(os.path.join(model_path,'model.pkl')) except: pass import_ufo._import_once = [] sm_path = import_ufo.find_ufo_path('sm') model = import_ufo.import_model(sm_path) self.assertNotEqual(model.get('particles'),None) self.assertNotEqual(model.get('particles'),[], "empty particles list") self.assertNotEqual(model.get('interactions'),None) self.assertNotEqual(model.get('interactions'),[]) # try with the pickle: sm_path = import_ufo.find_ufo_path('sm') model = import_ufo.import_model(sm_path) self.assertNotEqual(model.get('particles'),None) self.assertNotEqual(model.get('particles'),[], "empty particles list") self.assertNotEqual(model.get('interactions'),None) self.assertNotEqual(model.get('interactions'),[])
def test_model_name(self): """ test that the model name is correctly set """ self.assertEqual(self.base_model["name"], "sm") model = import_ufo.import_model('sm-full') self.assertEqual(model["name"], "sm-full") model = import_ufo.import_model('sm-no_b_mass') self.assertEqual(model["name"], "sm-no_b_mass")
def test_simple_import(self): """ check that basic quantity are define """ #remove pkl file try: model_path = os.path.join(MG5DIR, 'models', 'sm') os.remove(os.path.join(model_path, 'model.pkl')) except: pass import_ufo._import_once = [] sm_path = import_ufo.find_ufo_path('sm') model = import_ufo.import_model(sm_path) self.assertNotEqual(model.get('particles'), None) self.assertNotEqual(model.get('particles'), [], "empty particles list") self.assertNotEqual(model.get('interactions'), None) self.assertNotEqual(model.get('interactions'), []) # try with the pickle: sm_path = import_ufo.find_ufo_path('sm') model = import_ufo.import_model(sm_path) self.assertNotEqual(model.get('particles'), None) self.assertNotEqual(model.get('particles'), [], "empty particles list") self.assertNotEqual(model.get('interactions'), None) self.assertNotEqual(model.get('interactions'), [])
def setUp(self): self.base_model_scalar = import_ufo.import_model('uutt_tch_scalar') self.full_model_scalar = \ model_reader.ModelReader(self.base_model_scalar) self.full_model_scalar.set_parameters_and_couplings() self.base_model_4ferm = import_ufo.import_model('uutt_tch_4fermion') self.full_model_4ferm = \ model_reader.ModelReader(self.base_model_4ferm) self.full_model_4ferm.set_parameters_and_couplings()
def setUp(self): self.base_model_scalar = import_ufo.import_model('sextet_diquarks') self.full_model_scalar = \ model_reader.ModelReader(self.base_model_scalar) self.full_model_scalar.set_parameters_and_couplings() self.full_model_scalar.get('parameter_dict')['mdl_MSIX'] = 1.e5 self.base_model_4ferm = import_ufo.import_model('uutt_sch_4fermion') self.full_model_4ferm = \ model_reader.ModelReader(self.base_model_4ferm) self.full_model_4ferm.set_parameters_and_couplings()
def load_model(self, name, use_mg_default, complex_mass=False): """load the model""" loop = False #if (name.startswith('loop_')): # logger.info("The model in the banner is %s" % name) # logger.info("Set the model to %s since only" % name[:5]) # logger.info("tree-level amplitudes are used for the decay ") # name = name[5:] # self.banner.proc_card.info['full_model_line'].replace('loop_','') logger.info('detected model: %s. Loading...' % name) model_path = name #base_model = import_ufo.import_model(model_path) # Import model base_model = import_ufo.import_model(name, decay=True) if use_mg_default: base_model.pass_particles_name_in_mg_default() if complex_mass: base_model.change_mass_to_complex_scheme() self.model = base_model self.mg5cmd._curr_model = self.model self.mg5cmd.process_model()
def test_mssm_equivalence(self): """Test the UFO and MG4 MSSM model correspond to the same model """ # import UFO model sm_path = import_ufo.find_ufo_path('mssm') ufo_model = import_ufo.import_model(sm_path) #converter = import_ufo.UFOMG5Converter(model) #ufo_model = converter.load_model() ufo_model.pass_particles_name_in_mg_default() # import MG4 model model = base_objects.Model() if not MG4DIR: raise MadGraph5Error, "Please provide a valid MG/ME path with -d" v4_path = os.path.join(MG4DIR, 'models', 'mssm_v4') if not os.path.isdir(v4_path): v4_path = os.path.join(MG4DIR, 'Models', 'mssm') if not os.path.isdir(v4_path): raise MadGraph5Error, \ "Please provide a valid MG/ME path with -d" model.set('particles', files.read_from_file( os.path.join(v4_path,'particles.dat'), import_v4.read_particles_v4)) model.set('interactions', files.read_from_file( os.path.join(v4_path,'interactions.dat'), import_v4.read_interactions_v4, model['particles'])) model.pass_particles_name_in_mg_default() # Checking the particles for particle in model['particles']: ufo_particle = ufo_model.get("particle_dict")[particle['pdg_code']] self.check_particles(particle, ufo_particle) # Skip test below until equivalence has been created by Benj and Claude return # Checking the interactions nb_vertex = 0 ufo_vertices = [] for ufo_vertex in ufo_model['interactions']: pdg_code_ufo = [abs(part['pdg_code']) for part in ufo_vertex['particles']] int_name = [part['name'] for part in ufo_vertex['particles']] rep = (pdg_code_ufo, int_name) pdg_code_ufo.sort() ufo_vertices.append(pdg_code_ufo) mg4_vertices = [] for vertex in model['interactions']: pdg_code_mg4 = [abs(part['pdg_code']) for part in vertex['particles']] pdg_code_mg4.sort() try: ufo_vertices.remove(pdg_code_mg4) except ValueError: mg4_vertices.append(pdg_code_mg4) self.assertEqual(ufo_vertices, []) self.assertEqual(mg4_vertices, [])
def load_model(self, name, use_mg_default, complex_mass=False): """load the model""" loop = False #if (name.startswith('loop_')): # logger.info("The model in the banner is %s" % name) # logger.info("Set the model to %s since only" % name[:5]) # logger.info("tree-level amplitudes are used for the decay ") # name = name[5:] # self.banner.proc_card.info['full_model_line'].replace('loop_','') logger.info('detected model: %s. Loading...' % name) model_path = name #base_model = import_ufo.import_model(model_path) # Import model base_model = import_ufo.import_model(name, decay=True, complex_mass_scheme=complex_mass) if use_mg_default: base_model.pass_particles_name_in_mg_default() self.model = base_model self.mg5cmd._curr_model = self.model self.mg5cmd.process_model()
def test_mssm_equivalence(self): """Test the UFO and MG4 MSSM model correspond to the same model """ # import UFO model mssm_path = import_ufo.find_ufo_path('MSSM_SLHA2') ufo_model = import_ufo.import_model(mssm_path) #converter = import_ufo.UFOMG5Converter(model) #ufo_model = converter.load_model() ufo_model.pass_particles_name_in_mg_default() # import MG4 model model = base_objects.Model() if not MG4DIR: raise MadGraph5Error("Please provide a valid MG/ME path with -d") v4_path = os.path.join(MG4DIR, 'models', 'mssm_v4') if not os.path.isdir(v4_path): import_ufo.import_model_from_db('mssm_v4', local_dir=True) model.set('particles', files.read_from_file( os.path.join(v4_path,'particles.dat'), import_v4.read_particles_v4)) model.set('interactions', files.read_from_file( os.path.join(v4_path,'interactions.dat'), import_v4.read_interactions_v4, model['particles'])) #model.pass_particles_name_in_mg_default() # Checking the particles for particle in model['particles']: ufo_particle = ufo_model.get("particle_dict")[particle['pdg_code']] self.check_particles(particle, ufo_particle) # Skip test below until equivalence has been created by Benj and Claude return # Checking the interactions nb_vertex = 0 ufo_vertices = [] for ufo_vertex in ufo_model['interactions']: pdg_code_ufo = [abs(part['pdg_code']) for part in ufo_vertex['particles']] int_name = [part['name'] for part in ufo_vertex['particles']] rep = (pdg_code_ufo, int_name) pdg_code_ufo.sort() ufo_vertices.append(pdg_code_ufo) mg4_vertices = [] for vertex in model['interactions']: pdg_code_mg4 = [abs(part['pdg_code']) for part in vertex['particles']] pdg_code_mg4.sort() try: ufo_vertices.remove(pdg_code_mg4) except ValueError: mg4_vertices.append(pdg_code_mg4) self.assertEqual(ufo_vertices, []) self.assertEqual(mg4_vertices, [])
def setUp(self): m_path = import_ufo.find_ufo_path('triplet_diquarks') self.base_model = import_ufo.import_model(m_path) self.full_model = model_reader.ModelReader(self.base_model) self.full_model.set_parameters_and_couplings() # Set top quark mass to 0 to compare with literature expression self.full_model.get('parameter_dict')['mdl_MT'] = 0. self.full_model.get('parameter_dict')['mdl_WT'] = 0.
def setUp(self): m_path = import_ufo.find_ufo_path("triplet_diquarks") self.base_model = import_ufo.import_model(m_path) self.full_model = model_reader.ModelReader(self.base_model) self.full_model.set_parameters_and_couplings() # Set top quark mass to 0 to compare with literature expression self.full_model.get("parameter_dict")["MT"] = 0.0 self.full_model.get("parameter_dict")["WT"] = 0.0
def test_sm_equivalence(self): """Test the UFO and MG4 SM model correspond to the same model """ # import UFO model sm_path = import_ufo.find_ufo_path('sm') ufo_model = import_ufo.import_model(sm_path) ufo_model.pass_particles_name_in_mg_default() # import MG4 model model = base_objects.Model() v4_path = os.path.join(MG4DIR, 'models', 'sm_v4') if not os.path.isdir(v4_path): v4_path = os.path.join(MG4DIR, 'Models', 'sm') if not os.path.isdir(v4_path): raise MadGraph5Error, \ "Please provide a valid MG/ME path with -d" model.set( 'particles', files.read_from_file(os.path.join(v4_path, 'particles.dat'), import_v4.read_particles_v4)) model.set( 'interactions', files.read_from_file(os.path.join(v4_path, 'interactions.dat'), import_v4.read_interactions_v4, model['particles'])) model.pass_particles_name_in_mg_default() # Checking the particles for particle in model['particles']: ufo_particle = ufo_model.get("particle_dict")[particle['pdg_code']] self.check_particles(particle, ufo_particle) # Checking the interactions nb_vertex = 0 ufo_vertices = [] for ufo_vertex in ufo_model['interactions']: pdg_code_ufo = [ abs(part['pdg_code']) for part in ufo_vertex['particles'] ] int_name = [part['name'] for part in ufo_vertex['particles']] rep = (pdg_code_ufo, int_name) pdg_code_ufo.sort() ufo_vertices.append(pdg_code_ufo) mg4_vertices = [] for vertex in model['interactions']: pdg_code_mg4 = [ abs(part['pdg_code']) for part in vertex['particles'] ] pdg_code_mg4.sort() try: ufo_vertices.remove(pdg_code_mg4) except ValueError: mg4_vertices.append(pdg_code_mg4) self.assertEqual(ufo_vertices, [[25, 25, 25, 25]]) self.assertEqual(mg4_vertices, [])
def load_IOTestsUnit(self): """load the models and exporters if necessary.""" if not hasattr(self, 'models') or \ not hasattr(self, 'fortran_models') or \ not hasattr(self, 'loop_exporters'):\ self.models = { \ 'loop_sm' : import_ufo.import_model('loop_sm') } self.fortran_models = { 'fortran_model' : helas_call_writers.FortranUFOHelasCallWriter(\ self.models['loop_sm']) } self.loop_exporters = { 'default' : loop_exporters.LoopProcessExporterFortranSA(\ _mgme_file_path, _proc_file_path, {'clean':False, 'complex_mass':False, 'export_format':'madloop','mp':True, 'loop_dir':_loop_file_path, 'cuttools_dir':_cuttools_file_path, 'fortran_compiler':'gfortran', 'output_dependencies':'external'}), 'optimized' : loop_exporters.\ LoopProcessOptimizedExporterFortranSA(\ _mgme_file_path, _proc_file_path, {'clean':False, 'complex_mass':False, 'export_format':'madloop','mp':True, 'loop_dir':_loop_file_path, 'cuttools_dir':_cuttools_file_path, 'fortran_compiler':'gfortran', 'output_dependencies':'external'}) } # g g > t t~ self.addIOTestsForProcess( testName = 'gg_ttx', testFolder = 'short_ML_SMQCD', particles_ids = [21,21,6,-6], exporters = self.loop_exporters, orders = {'QCD':2,'QED':0} ) # d d > t t~ (only the proc files for this one) self.addIOTestsForProcess( testName = 'ddx_ttx', testFolder = 'short_ML_SMQCD', particles_ids = [1,-1,6,-6], exporters = self.loop_exporters, orders = {'QCD':2,'QED':0}, files_to_check=IOTests.IOTest.proc_files) # And the loop induced g g > h h for good measure # Use only one exporter only here self.addIOTestsForProcess( testName = 'gg_hh', testFolder = 'short_ML_SMQCD_LoopInduced', particles_ids = [21,21,25,25], exporters = self.loop_exporters['default'], orders = {'QCD': 2, 'QED': 2} )
def load_IOTestsAcceptance(self): """load the models and exporters if necessary.""" if not hasattr(self, 'models') or \ not hasattr(self, 'fortran_models') or \ not hasattr(self, 'loop_exporters'): \ self.models = { \ 'loop_sm' : import_ufo.import_model('loop_sm') } self.fortran_models = { 'fortran_model' : helas_call_writers.FortranUFOHelasCallWriter(\ self.models['loop_sm']) } self.loop_exporters = { 'default' : loop_exporters.LoopProcessExporterFortranSA(\ _mgme_file_path, _proc_file_path, {'clean':False, 'complex_mass':False, 'export_format':'madloop','mp':True, 'loop_dir':_loop_file_path, 'cuttools_dir':_cuttools_file_path, 'fortran_compiler':'gfortran', 'output_dependencies':'external', 'SubProc_prefix': '', 'compute_color_flows': False}), 'optimized' : loop_exporters.\ LoopProcessOptimizedExporterFortranSA(\ _mgme_file_path, _proc_file_path, {'clean':False, 'complex_mass':False, 'export_format':'madloop','mp':True, 'loop_dir':_loop_file_path, 'cuttools_dir':_cuttools_file_path, 'fortran_compiler':'gfortran', 'output_dependencies':'external', 'SubProc_prefix': '', 'compute_color_flows': False}) } # d u~ > mu- vmx g self.addIOTestsForProcess(testName='dux_mumvmxg', testFolder='long_ML_SMQCD', particles_ids=[1, -2, 13, -14, 21], exporters=['default', 'optimized'], orders={ 'QCD': 1, 'QED': 2 }) # g g > w- t b~ Single top (long but really includes everything) self.addIOTestsForProcess(testName='gg_wmtbx', testFolder='long_ML_SMQCD', particles_ids=[21, 21, -24, 6, -5], exporters=['default', 'optimized'], orders={ 'QCD': 2, 'QED': 1 })
def __init__(self, model): self.model = model self.cmd = cmd_interface.MasterCmd() self.cmd.exec_cmd('set automatic_html_opening False') self.cmd.exec_cmd('import model %s --modelname' % model) self.cmd._curr_model = import_ufo.import_model(model, decay=True) self.particles_id = dict([(p.get('name'), p.get('pdg_code')) for p in self.cmd._curr_model.get('particles')])
def __init__(self, model): self.model = model self.cmd = cmd_interface.MasterCmd() self.cmd.exec_cmd('set automatic_html_opening False') self.cmd.exec_cmd('import model %s --modelname' % model, precmd=True) self.cmd._curr_model = import_ufo.import_model(model, decay=True) self.particles_id = dict([(p.get('name'), p.get('pdg_code')) for p in self.cmd._curr_model.get('particles')])
def setUp(self): """ prepare a model and the ParamCardWriter""" # load the SM self.model = import_ufo.import_model('sm') # initialize the main object self.writter = writter.ParamCardWriter(self.model) self.content = StringIO.StringIO() self.writter.define_output_file(self.content) self.content.truncate(0) # remove the header
def setUp(self): if not hasattr(self, 'mymodel') or \ not hasattr(self, 'myleglist3') or \ not hasattr(self, 'myproc1') or \ not hasattr(self, 'myproc3'): myleglist1 = MG.LegList() # PROCESS: u g > u g mylegs = [{'id': 2, 'number': 1, 'state': False}, {'id': 21, 'number': 2, 'state': False}, {'id': 2, 'number': 3, 'state': True}, {'id': 21, 'number': 4, 'state': True}] for i in mylegs: myleglist1.append(MG.Leg(i)) myleglist3 = MG.LegList() # PROCESS: d d~ > u u~ mylegs = [{'id': 1, 'number': 1, 'state': False}, {'id': -1, 'number': 2, 'state': False}, {'id': 2, 'number': 3, 'state': True}, {'id': -2, 'number': 4, 'state': True}] for i in mylegs: myleglist3.append(MG.Leg(i)) mymodel = import_ufo.import_model('sm') dict1 = {'legs' : myleglist1, 'orders':{'QCD':10, 'QED':0}, 'model': mymodel, 'id': 1, 'required_s_channels':[], 'forbidden_s_channels':[], 'forbidden_particles':[], 'is_decay_chain': False, 'orders': {'QED': 0, 'WEIGHTED':2}, 'perturbation_couplings' : ['QCD'], 'decay_chains': MG.ProcessList(), 'overall_orders': {}} dict3 = {'legs' : myleglist3, 'orders':{'QCD':10, 'QED':0}, 'model': mymodel, 'id': 1, 'required_s_channels':[], 'forbidden_s_channels':[], 'forbidden_particles':[], 'is_decay_chain': False, 'orders': {'QED': 0, 'WEIGHTED':2 }, 'perturbation_couplings' : ['QCD'], 'decay_chains': MG.ProcessList(), 'overall_orders': {}} testFKSHelasObjects.mymodel = mymodel testFKSHelasObjects.myleglist3 = myleglist3 testFKSHelasObjects.myproc1 = MG.Process(dict1) testFKSHelasObjects.myproc3 = MG.Process(dict3)
def test_fks_ppzz_in_RS(self): """""" p = [21, 1, 2, 3, -1, -2, -3 ] z_leg = MG.MultiLeg({'ids':[23], 'state': True}) p_leg = MG.MultiLeg({'ids': p, 'state': False}); my_multi_leglist = MG.MultiLegList([copy.copy(leg) for leg in [p_leg] * 2] \ + MG.MultiLegList([z_leg, z_leg])) mymodel = import_ufo.import_model('RS') my_process_definition = MG.ProcessDefinition({ \ 'orders': {'WEIGHTED': 4}, 'legs': my_multi_leglist, 'perturbation_couplings': ['QCD'], 'NLO_mode': 'real', 'model': mymodel}) my_process_definitions = MG.ProcessDefinitionList(\ [my_process_definition]) my_multi_process = fks_base.FKSMultiProcess(\ {'process_definitions': my_process_definitions}) for born in my_multi_process['born_processes']: born_pdg_list = [l['id'] for l in born.born_proc['legs']] if born_pdg_list[0] == 21: # gg initiated self.assertEqual(len(born.born_amp['diagrams']), 1) for amp in born.real_amps: if amp.pdgs[0] != 21 or amp.pdgs[1] != 21: self.assertEqual(len(amp.amplitude['diagrams']), 12) else: self.assertEqual(len(amp.amplitude['diagrams']), 4) else: # qq initiated self.assertEqual(len(born.born_amp['diagrams']), 4) for amp in born.real_amps: self.assertEqual(len(amp.amplitude['diagrams']), 12) my_helas_mp = fks_helas.FKSHelasMultiProcess(my_multi_process, gen_color = False) for born in my_helas_mp['matrix_elements']: born_pdg_list = [l['id'] for l in born.born_matrix_element['base_amplitude']['process']['legs']] if born_pdg_list[0] == 21: # gg initiated self.assertEqual(len(born.born_matrix_element['diagrams']), 1) for real in born.real_processes: pdgs = [l['id'] for l in real.matrix_element['base_amplitude']['process']['legs']] if pdgs[0] != 21 or pdgs[1] != 21: self.assertEqual(len(real.matrix_element['diagrams']), 12) else: self.assertEqual(len(real.matrix_element['diagrams']), 4) else: # qq initiated self.assertEqual(len(born.born_matrix_element['diagrams']), 4) for real in born.real_processes: self.assertEqual(len(real.matrix_element['diagrams']), 12)
def setUp(self): """ creating the full model from scratch """ CheckFileCreate.clean_files(self) #picklefile = os.path.join(MG5DIR,'models','sm','model.pkl') #if not files.is_uptodate(picklefile): # sm_path = import_ufo.find_ufo_path('sm') model = import_ufo.import_model('sm') #else: # model = save_load_object.load_from_file(picklefile) export_v4.UFO_model_to_mg4(model, self.output_path).build()
def setUp(self): """Instantiate a model, which will be useful for the non-flat phase-space generator test. """ model_with_params_set = import_ufo.import_model( pjoin(MG5DIR,'models','sm'), prefix=True, complex_mass_scheme = False ) model_with_params_set.pass_particles_name_in_mg_default() model_with_params_set.set_parameters_and_couplings( param_card = pjoin(MG5DIR,'models','sm','restrict_default.dat'), complex_mass_scheme=False) self.model = model_with_params_set
def load_IOTestsAcceptance(self): """load the models and exporters if necessary.""" if not hasattr(self, 'models') or \ not hasattr(self, 'fortran_models') or \ not hasattr(self, 'loop_exporters'):\ self.models = { \ 'loop_sm' : import_ufo.import_model('loop_sm') } self.fortran_models = { 'fortran_model' : helas_call_writers.FortranUFOHelasCallWriter(\ self.models['loop_sm']) } self.loop_exporters = { 'default' : loop_exporters.LoopProcessExporterFortranSA(\ _mgme_file_path, _proc_file_path, {'clean':False, 'complex_mass':False, 'export_format':'madloop','mp':True, 'loop_dir':_loop_file_path, 'cuttools_dir':_cuttools_file_path, 'fortran_compiler':'gfortran', 'output_dependencies':'external', 'SubProc_prefix': '', 'compute_color_flows': False}), 'optimized' : loop_exporters.\ LoopProcessOptimizedExporterFortranSA(\ _mgme_file_path, _proc_file_path, {'clean':False, 'complex_mass':False, 'export_format':'madloop','mp':True, 'loop_dir':_loop_file_path, 'cuttools_dir':_cuttools_file_path, 'fortran_compiler':'gfortran', 'output_dependencies':'external', 'SubProc_prefix': '', 'compute_color_flows': False}) } # d u~ > mu- vmx g self.addIOTestsForProcess( testName = 'dux_mumvmxg', testFolder = 'long_ML_SMQCD', particles_ids = [1,-2,13,-14,21], exporters = ['default','optimized'], orders = {'QCD': 1, 'QED': 2} ) # g g > w- t b~ Single top (long but really includes everything) self.addIOTestsForProcess( testName = 'gg_wmtbx', testFolder = 'long_ML_SMQCD', particles_ids = [21,21,-24,6,-5], exporters = ['default','optimized'], orders = {'QCD': 2, 'QED': 1} )
def setUp(self): """ prepare a model and the ParamCardWriter""" # load the SM with restriction self.model = import_ufo.import_model('sm-full') self.model = import_ufo.RestrictModel(self.model) self.restrict_file = os.path.join(_file_path, os.path.pardir, 'input_files', 'restrict_sm.dat') self.model.restrict_model(self.restrict_file) # initialize the main object self.writter = writter.ParamCardWriter(self.model) self.content = StringIO.StringIO() self.writter.define_output_file(self.content) self.content.truncate(0) # remove the header
def setUp(self): """ creating the full model from scratch """ CheckFileCreate.setUp(self) os.system('cp %s %s' % (pjoin(MG5DIR,'Template', 'LO','Source', 'make_opts'), '/tmp')) CheckFileCreate.clean_files(self) #picklefile = os.path.join(MG5DIR,'models','sm','model.pkl') #if not files.is_uptodate(picklefile): # sm_path = import_ufo.find_ufo_path('sm') model = import_ufo.import_model('sm') #else: # model = save_load_object.load_from_file(picklefile) export_v4.UFO_model_to_mg4(model, self.output_path).build()
def setUp(self): """Loading the different writers, exporters and model used for these IOTests""" if not hasattr(self, 'model'): self.model=import_ufo.import_model('loop_qcd_qed_sm-full') if not hasattr(self, 'exporter'): self.exporter = loop_exporters.\ LoopProcessOptimizedExporterFortranSA(\ _mgme_file_path, _proc_file_path, {'clean':False, 'complex_mass':False, 'export_format':'madloop','mp':True, 'loop_dir':_loop_file_path, 'cuttools_dir':_cuttools_file_path, 'fortran_compiler':'gfortran', 'output_dependencies':'external'})
def setUp(self): """Loading the different writers, exporters and model used for these IOTests""" if not hasattr(self, 'model'): self.model = import_ufo.import_model('loop_qcd_qed_sm-full') if not hasattr(self, 'exporter'): self.exporter = loop_exporters.\ LoopProcessOptimizedExporterFortranSA(\ _mgme_file_path, _proc_file_path, {'clean':False, 'complex_mass':False, 'export_format':'madloop','mp':True, 'loop_dir':_loop_file_path, 'cuttools_dir':_cuttools_file_path, 'fortran_compiler':'gfortran', 'output_dependencies':'external'})
def load_IOTestsUnit(self): """load the models and exporters if necessary.""" if not hasattr(self, 'models') or \ not hasattr(self, 'fortran_models') or \ not hasattr(self, 'loop_exporters'): \ self.models = { \ 'loop_sm' : import_ufo.import_model('loop_sm') } self.fortran_models = { 'fortran_model' : helas_call_writers.FortranUFOHelasCallWriter(\ self.models['loop_sm']) } # g g > t t~ self.addIOTestsForProcess(testName='gg_ttx', testFolder='short_ML_SMQCD', particles_ids=[21, 21, 6, -6], exporters=['default', 'optimized'], orders={ 'QCD': 2, 'QED': 0 }) # d d > t t~ (only the proc files for this one) self.addIOTestsForProcess(testName='ddx_ttx', testFolder='short_ML_SMQCD', particles_ids=[1, -1, 6, -6], exporters=['default', 'optimized'], orders={ 'QCD': 2, 'QED': 0 }, files_to_check=IOTests.IOTest.proc_files) # And the loop induced g g > h h for good measure # Use only one exporter only here self.addIOTestsForProcess(testName='gg_hh', testFolder='short_ML_SMQCD_LoopInduced', particles_ids=[21, 21, 25, 25], exporters='default', orders={ 'QCD': 2, 'QED': 2 })
def test_sort_fks_proc(self): """tests that two FKSProcesses with different legs order in the input process/amplitude are returned as equal. check also that born_proc has 'legs_with_decay' = madgraph.base_objects.LegList()""" model = import_ufo.import_model("sm") # sorted leglist for e+ e- > u u~ g myleglist_s = MG.LegList() myleglist_s.append(MG.Leg({"id": -11, "state": False})) myleglist_s.append(MG.Leg({"id": 11, "state": False})) myleglist_s.append(MG.Leg({"id": 2, "state": True})) myleglist_s.append(MG.Leg({"id": -2, "state": True})) myleglist_s.append(MG.Leg({"id": 21, "state": True})) # unsorted leglist: e+ e- > u g u~ myleglist_u = MG.LegList() myleglist_u.append(MG.Leg({"id": -11, "state": False})) myleglist_u.append(MG.Leg({"id": 11, "state": False})) myleglist_u.append(MG.Leg({"id": 2, "state": True})) myleglist_u.append(MG.Leg({"id": 21, "state": True})) myleglist_u.append(MG.Leg({"id": -2, "state": True})) # define (un)sorted processes: proc_s = MG.Process({"model": model, "legs": myleglist_s, "orders": {"QED": 2, "QCD": 1}}) proc_u = MG.Process({"model": model, "legs": myleglist_u, "orders": {"QED": 2, "QCD": 1}}) # define (un)sorted amplitudes: amp_s = diagram_generation.Amplitude(proc_s) amp_u = diagram_generation.Amplitude(proc_u) fks_p_s = fks_base.FKSProcess(proc_s) fks_p_u = fks_base.FKSProcess(proc_u) self.assertEqual(fks_p_s.born_proc, fks_p_u.born_proc) self.assertEqual(fks_p_s.born_amp, fks_p_u.born_amp) fks_a_s = fks_base.FKSProcess(amp_s) fks_a_u = fks_base.FKSProcess(amp_u) self.assertEqual(fks_a_s.born_proc, fks_a_u.born_proc) self.assertEqual(fks_a_s.born_amp, fks_a_u.born_amp) self.assertEqual(fks_a_s.born_proc["legs_with_decays"], MG.LegList()) self.assertEqual(fks_a_u.born_proc["legs_with_decays"], MG.LegList())
def load_model(self, name, use_mg_default, complex_mass=False): """load the model""" loop = False logger.info("detected model: %s. Loading..." % name) model_path = name # Import model base_model = import_ufo.import_model(name, decay=False) if use_mg_default: base_model.pass_particles_name_in_mg_default() if complex_mass: base_model.change_mass_to_complex_scheme() self.model = base_model self.mg5cmd._curr_model = self.model self.mg5cmd.process_model()
def do_generate_param_card(modelname, output): """Do the work for generating param_card.dat to <output> given the model name. We put this in a separate function so it can be called in a different process, because the imports below are sensitive to the model changing, so we need a new Python interpreter for each call""" # Modify sys.path so we can import the below objects # See: http://stackoverflow.com/questions/279237/import-a-module-from-a-relative-path/6098238#6098238 cmd_folder = os.path.realpath(os.path.abspath(options.mg5_dir)) if cmd_folder not in sys.path: sys.path.insert(0, cmd_folder) import madgraph.core.base_objects as base_objects import models.import_ufo as import_ufo import models.write_param_card as write_param_card model = import_ufo.import_model(modelname) writer = write_param_card.ParamCardWriter(model) writer.define_output_file(output) writer.write_card()
def load_model(self, name, use_mg_default, complex_mass=False): """load the model""" loop = False logger.info('detected model: %s. Loading...' % name) model_path = name # Import model base_model = import_ufo.import_model(name, decay=False) if use_mg_default: base_model.pass_particles_name_in_mg_default() if complex_mass: base_model.change_mass_to_complex_scheme() self.model = base_model self.mg5cmd._curr_model = self.model self.mg5cmd.process_model()
def setUp(self): """ Setup the model and the overhead common to all tests """ model_with_params_set = import_ufo.import_model( pjoin(MG5DIR,'models','loop_sm'), prefix=True, complex_mass_scheme = False ) model_with_params_set.pass_particles_name_in_mg_default() model_with_params_set.set_parameters_and_couplings( param_card = pjoin(MG5DIR,'models','loop_sm','restrict_default.dat'), complex_mass_scheme=False ) self.model = model_with_params_set self.current_exporter = subtraction.SubtractionCurrentExporter( self.model, export_dir=None, current_set='colorful') self.walker = walkers.FinalRescalingNLOWalker legs = base_objects.LegList([ base_objects.Leg( {'id': 1, 'state': base_objects.Leg.INITIAL, 'number': 1}), base_objects.Leg( {'id': -1, 'state': base_objects.Leg.INITIAL, 'number': 2}), base_objects.Leg( {'id': 22, 'state': base_objects.Leg.FINAL, 'number': 3}), base_objects.Leg( {'id': 1, 'state': base_objects.Leg.FINAL, 'number': 4}), base_objects.Leg( {'id': -1, 'state': base_objects.Leg.FINAL, 'number': 5}), base_objects.Leg( {'id': 21, 'state': base_objects.Leg.FINAL, 'number': 6}), base_objects.Leg( {'id': 21, 'state': base_objects.Leg.FINAL, 'number': 7}), ]) self.reduced_process = base_objects.Process({ 'legs': legs, 'model': self.model })
def load_IOTestsUnit(self): """load the models and exporters if necessary.""" if not hasattr(self, 'models') or \ not hasattr(self, 'fortran_models') or \ not hasattr(self, 'loop_exporters'):\ self.models = { \ 'loop_sm' : import_ufo.import_model('loop_sm') } self.fortran_models = { 'fortran_model' : helas_call_writers.FortranUFOHelasCallWriter(\ self.models['loop_sm']) } # g g > t t~ self.addIOTestsForProcess( testName = 'gg_ttx', testFolder = 'short_ML_SMQCD', particles_ids = [21,21,6,-6], exporters = ['default','optimized'], orders = {'QCD':2,'QED':0} ) # d d > t t~ (only the proc files for this one) self.addIOTestsForProcess( testName = 'ddx_ttx', testFolder = 'short_ML_SMQCD', particles_ids = [1,-1,6,-6], exporters = ['default','optimized'], orders = {'QCD':2,'QED':0}, files_to_check=IOTests.IOTest.proc_files) # And the loop induced g g > h h for good measure # Use only one exporter only here self.addIOTestsForProcess( testName = 'gg_hh', testFolder = 'short_ML_SMQCD_LoopInduced', particles_ids = [21,21,25,25], exporters = 'default', orders = {'QCD': 2, 'QED': 2} )
def setUp(self): """Set up decay model""" #Read the full SM sm_path = import_ufo.find_ufo_path('sm') self.base_model = import_ufo.import_model(sm_path) self.model_reader = model_reader.ModelReader(self.base_model)
def setUp(self): m_path = import_ufo.find_ufo_path('sextet_diquarks') self.base_model = import_ufo.import_model(m_path)
def test_get_nflav_sm_nomasses(self): """Tests the get_nflav_function for the SM, with the no_masses restriction""" sm_path = import_ufo.find_ufo_path('sm') model = import_ufo.import_model(sm_path + '-no_masses') self.assertEqual(model.get_nflav(), 5)
def setUp(self): if not hasattr(self, 'mymodel') or \ not hasattr(self, 'myleglist3') or \ not hasattr(self, 'myproc1') or \ not hasattr(self, 'myproc3'): myleglist1 = MG.LegList() # PROCESS: u g > u g mylegs = [{ 'id': 2, 'number': 1, 'state': False }, { 'id': 21, 'number': 2, 'state': False }, { 'id': 2, 'number': 3, 'state': True }, { 'id': 21, 'number': 4, 'state': True }] for i in mylegs: myleglist1.append(MG.Leg(i)) myleglist3 = MG.LegList() # PROCESS: d d~ > u u~ mylegs = [{ 'id': 1, 'number': 1, 'state': False }, { 'id': -1, 'number': 2, 'state': False }, { 'id': 2, 'number': 3, 'state': True }, { 'id': -2, 'number': 4, 'state': True }] for i in mylegs: myleglist3.append(MG.Leg(i)) mymodel = import_ufo.import_model('sm') dict1 = { 'legs': myleglist1, 'orders': { 'QCD': 10, 'QED': 0 }, 'model': mymodel, 'id': 1, 'required_s_channels': [], 'forbidden_s_channels': [], 'forbidden_particles': [], 'is_decay_chain': False, 'orders': { 'QED': 0, 'WEIGHTED': 2 }, 'perturbation_couplings': ['QCD'], 'decay_chains': MG.ProcessList(), 'overall_orders': {} } dict1_qed = { 'legs': myleglist1, 'orders': { 'QCD': 10, 'QED': 0 }, 'model': mymodel, 'id': 1, 'required_s_channels': [], 'forbidden_s_channels': [], 'forbidden_particles': [], 'is_decay_chain': False, 'orders': { 'QED': 0, 'WEIGHTED': 2 }, 'perturbation_couplings': ['QED'], 'decay_chains': MG.ProcessList(), 'overall_orders': {} } dict3 = { 'legs': myleglist3, 'orders': { 'QCD': 10, 'QED': 0 }, 'model': mymodel, 'id': 1, 'required_s_channels': [], 'forbidden_s_channels': [], 'forbidden_particles': [], 'is_decay_chain': False, 'orders': { 'QED': 0, 'WEIGHTED': 2 }, 'perturbation_couplings': ['QCD'], 'decay_chains': MG.ProcessList(), 'overall_orders': {} } dict3_qed = { 'legs': myleglist3, 'orders': { 'QCD': 10, 'QED': 0 }, 'model': mymodel, 'id': 1, 'required_s_channels': [], 'forbidden_s_channels': [], 'forbidden_particles': [], 'is_decay_chain': False, 'orders': { 'QED': 0, 'WEIGHTED': 2 }, 'perturbation_couplings': ['QED'], 'decay_chains': MG.ProcessList(), 'overall_orders': {} } testFKSHelasObjects.mymodel = mymodel testFKSHelasObjects.myleglist3 = myleglist3 testFKSHelasObjects.myproc1 = MG.Process(dict1) testFKSHelasObjects.myproc3 = MG.Process(dict3) testFKSHelasObjects.myproc1_qed = MG.Process(dict1_qed) testFKSHelasObjects.myproc3_qed = MG.Process(dict3_qed)
def setUp(self): self.base_model = import_ufo.import_model('sm')
def setUp(self): sm_path = import_ufo.find_ufo_path('mssm') self.base_model = import_ufo.import_model(sm_path)
def test_run_python_matrix_element(self): """Test a complete running of a Python matrix element without writing any files""" # Import the SM sm_path = import_ufo.find_ufo_path('sm') model = import_ufo.import_model(sm_path) myleglist = base_objects.LegList() myleglist.append(base_objects.Leg({'id':-11, 'state':False, 'number': 1})) myleglist.append(base_objects.Leg({'id':11, 'state':False, 'number': 2})) myleglist.append(base_objects.Leg({'id':22, 'state':True, 'number': 3})) myleglist.append(base_objects.Leg({'id':22, 'state':True, 'number': 4})) myleglist.append(base_objects.Leg({'id':22, 'state':True, 'number': 5})) myproc = base_objects.Process({'legs':myleglist, 'model':model}) myamplitude = diagram_generation.Amplitude({'process': myproc}) mymatrixelement = helas_objects.HelasMatrixElement(myamplitude) # Create only the needed aloha routines wanted_lorentz = mymatrixelement.get_used_lorentz() aloha_model = create_aloha.AbstractALOHAModel(model.get('name')) aloha_model.compute_subset(wanted_lorentz) # Write out the routines in Python aloha_routines = [] for routine in aloha_model.values(): aloha_routines.append(routine.write(output_dir = None, language = 'Python').\ replace('import wavefunctions', 'import aloha.template_files.wavefunctions as wavefunctions')) # Define the routines to be available globally for routine in aloha_routines: exec(routine, globals()) # Write the matrix element(s) in Python mypythonmodel = helas_call_writers.PythonUFOHelasCallWriter(\ model) exporter = export_python.ProcessExporterPython(\ mymatrixelement, mypythonmodel) matrix_methods = exporter.get_python_matrix_methods() # Calculate parameters and couplings full_model = model_reader.ModelReader(model) full_model.set_parameters_and_couplings() # Define a momentum p = [[0.5000000e+03, 0.0000000e+00, 0.0000000e+00, 0.5000000e+03, 0.0000000e+00], [0.5000000e+03, 0.0000000e+00, 0.0000000e+00, -0.5000000e+03, 0.0000000e+00], [0.4585788e+03, 0.1694532e+03, 0.3796537e+03, -0.1935025e+03, 0.6607249e-05], [0.3640666e+03, -0.1832987e+02, -0.3477043e+03, 0.1063496e+03, 0.7979012e-05], [0.1773546e+03, -0.1511234e+03, -0.3194936e+02, 0.8715287e+02, 0.1348699e-05]] # Evaluate the matrix element for the given momenta answer = 1.39189717257175028e-007 for process in matrix_methods.keys(): # Define Python matrix element for process exec(matrix_methods[process]) # Calculate the matrix element for the momentum p value = eval("Matrix_0_epem_aaa().smatrix(p, full_model)") self.assertTrue(abs(value-answer)/answer < 1e-6, "Value is: %.9e should be %.9e" % \ (abs(value), answer))
def setUp(self): if not hasattr(self, 'mymodel'): TestGenerateLoopFKS.mymodel = import_ufo.import_model('loop_sm')
def test_run_python_matrix_element(self): """Test a complete running of a Python matrix element without writing any files""" # Import the SM sm_path = import_ufo.find_ufo_path('sm') model = import_ufo.import_model(sm_path) myleglist = base_objects.LegList() myleglist.append( base_objects.Leg({ 'id': -11, 'state': False, 'number': 1 })) myleglist.append( base_objects.Leg({ 'id': 11, 'state': False, 'number': 2 })) myleglist.append( base_objects.Leg({ 'id': 22, 'state': True, 'number': 3 })) myleglist.append( base_objects.Leg({ 'id': 22, 'state': True, 'number': 4 })) myleglist.append( base_objects.Leg({ 'id': 22, 'state': True, 'number': 5 })) myproc = base_objects.Process({'legs': myleglist, 'model': model}) myamplitude = diagram_generation.Amplitude({'process': myproc}) mymatrixelement = helas_objects.HelasMatrixElement(myamplitude) # Create only the needed aloha routines wanted_lorentz = mymatrixelement.get_used_lorentz() aloha_model = create_aloha.AbstractALOHAModel(model.get('name')) aloha_model.compute_subset(wanted_lorentz) # Write out the routines in Python aloha_routines = [] for routine in aloha_model.values(): aloha_routines.append(routine.write(output_dir = None, language = 'Python').\ replace('import wavefunctions', 'import aloha.template_files.wavefunctions as wavefunctions')) # Define the routines to be available globally for routine in aloha_routines: exec(routine, globals()) # Write the matrix element(s) in Python mypythonmodel = helas_call_writers.PythonUFOHelasCallWriter(\ model) exporter = export_python.ProcessExporterPython(\ mymatrixelement, mypythonmodel) matrix_methods = exporter.get_python_matrix_methods() # Calculate parameters and couplings full_model = model_reader.ModelReader(model) full_model.set_parameters_and_couplings() # Define a momentum p = [[ 0.5000000e+03, 0.0000000e+00, 0.0000000e+00, 0.5000000e+03, 0.0000000e+00 ], [ 0.5000000e+03, 0.0000000e+00, 0.0000000e+00, -0.5000000e+03, 0.0000000e+00 ], [ 0.4585788e+03, 0.1694532e+03, 0.3796537e+03, -0.1935025e+03, 0.6607249e-05 ], [ 0.3640666e+03, -0.1832987e+02, -0.3477043e+03, 0.1063496e+03, 0.7979012e-05 ], [ 0.1773546e+03, -0.1511234e+03, -0.3194936e+02, 0.8715287e+02, 0.1348699e-05 ]] # Evaluate the matrix element for the given momenta answer = 1.39189717257175028e-007 for process in matrix_methods.keys(): # Define Python matrix element for process exec(matrix_methods[process]) # Calculate the matrix element for the momentum p value = eval("Matrix_0_epem_aaa().smatrix(p, full_model)") self.assertTrue(abs(value-answer)/answer < 1e-6, "Value is: %.9e should be %.9e" % \ (abs(value), answer))
def setUp(self): self.mymodel = import_ufo.import_model( pjoin(MG5DIR,'tests','input_files','LoopSMTest'), prefix=True, complex_mass_scheme = False ) self.mymodel.pass_particles_name_in_mg_default() # Setting up the process p p > h j j and its subtraction self.mylegs = base_objects.MultiLegList([ base_objects.MultiLeg( {'ids': [1,2,-1,-2,21], 'state': base_objects.Leg.INITIAL}), base_objects.MultiLeg( {'ids': [1,2,-1,-2,21], 'state': base_objects.Leg.INITIAL}), base_objects.MultiLeg( {'ids': [22], 'state': base_objects.Leg.FINAL}), base_objects.MultiLeg( {'ids': [21,1,-1,2,-2], 'state': base_objects.Leg.FINAL}) ]) self.myprocdef = base_objects.ProcessDefinition({ 'legs': self.mylegs, 'model': self.mymodel, 'split_orders': ['QCD','QED'] }) # The general accessor with the Born ME registered self.all_born_MEs_accessor = accessors.MEAccessorDict() # Generate only Born LO contributions with misc.TMP_directory(debug=False) as tmp_path: # Generate the output for this. self.madgraph_cmd = cmd.MasterCmd(main='MadGraph') self.madgraph_cmd._curr_model = self.mymodel self.madgraph_cmd.reset_interface_before_new_generation() self.madgraph_cmd._export_dir = pjoin(tmp_path,'ME7ContributionTest_LO') # Generate contributions generation_options = {'ME7_definition': True, 'diagram_filter': False, 'LO': True, 'NNLO': [], 'NNNLO': [], 'optimize': False, 'NLO': [], 'loop_induced': [], 'ignore_contributions' : [], 'beam_types': ['auto', 'auto'], 'loop_filter' : None, 'process_definitions' : {}} self.madgraph_cmd.add_contributions(self.myprocdef, generation_options) LO_contributions = self.madgraph_cmd._curr_contribs LO_contributions.apply_method_to_all_contribs( 'generate_amplitudes', log='Generate diagrams for') self.exporter = export_ME7.ME7Exporter( self.madgraph_cmd, False, group_subprocesses=True ) self.exporter.pass_information_from_cmd(self.madgraph_cmd) self.exporter.copy_template(self.madgraph_cmd._curr_model) self.exporter.export(True, args=[]) # We want to finalize and output the model for the Born, because need to # register its MEs in the accessor. self.exporter.finalize(['nojpeg'], self.madgraph_cmd.history) self.LO_contributions = self.madgraph_cmd._curr_contribs # Add the Born ME accessors to the dictionary self.LO_contributions[0].add_ME_accessors( self.all_born_MEs_accessor, pjoin(tmp_path,'ME7ContributionTest_LO') ) # Generate all NLO contributions with misc.TMP_directory(debug=False) as tmp_path: # Generate the output for this. self.madgraph_cmd = cmd.MasterCmd(main='MadGraph') self.madgraph_cmd._curr_model = self.mymodel self.madgraph_cmd.reset_interface_before_new_generation() self.madgraph_cmd._export_dir = pjoin(tmp_path,'ME7ContributionTest_LO') # Generate contributions generation_options = {'ME7_definition': True, 'diagram_filter': False, 'LO': True, 'NNLO': [], 'NNNLO': [], 'optimize': False, 'NLO': ['QCD'], 'loop_induced': [], 'ignore_contributions' : [], 'beam_types': ['auto', 'auto'], 'loop_filter' : None, 'process_definitions' : {}, } self.madgraph_cmd.add_contributions(self.myprocdef, generation_options) self.madgraph_cmd._curr_contribs.apply_method_to_all_contribs( 'generate_amplitudes', log='Generate diagrams for') self.exporter = export_ME7.ME7Exporter( self.madgraph_cmd, False, group_subprocesses=True ) self.exporter.pass_information_from_cmd(self.madgraph_cmd) self.exporter.copy_template(self.madgraph_cmd._curr_model) self.exporter.export(True, args=[]) # The export above was enough to have fully functional contributions to test # self.exporter.finalize(['nojpeg'], self.madgraph_cmd.history) self.NLO_contributions = self.madgraph_cmd._curr_contribs
def setUp(self): m_path = import_ufo.find_ufo_path("sextet_diquarks") self.base_model = import_ufo.import_model(m_path)
def setUp(self): sm_path = import_ufo.find_ufo_path('MSSM_SLHA2') self.base_model = import_ufo.import_model(sm_path)
def finalize(self, flaglist, interface_history): """Distribute and organize the finalization of all contributions. """ # Make sure contributions are sorted at this stage # It is important to act on LO contributions first, then NLO, then etc... # because ME and currents must be added to the ME_accessor in order since there # are look-up operations on it in-between self.contributions.sort_contributions() # Save all the global couplings to write out afterwards global_wanted_couplings = [] # Forward the finalize request to each contribution for contrib in self.contributions: # Must clean the aloha Kernel before each aloha export for each contribution aloha.aloha_lib.KERNEL.clean() wanted_couplings_to_add_to_global = contrib.finalize( flaglist=flaglist, interface_history=interface_history) global_wanted_couplings.extend(wanted_couplings_to_add_to_global) # Generate the global ME7 MODEL if global_wanted_couplings: output_dir = pjoin(self.export_dir, 'Source', 'MODEL') # Writing out the model common to all the contributions that can share it model_export_options = { 'complex_mass': self.options['complex_mass_scheme'], 'export_format': 'madloop', # So as to have access to lha_read_mp.f 'mp': True, 'loop_induced': False } model_builder = export_v4.UFO_model_to_mg4(self.model, output_dir, model_export_options) model_builder.build(global_wanted_couplings) # Now possibly add content to the pool of global ME7 resources before removing superfluous files # and linking to necessary global ME7 resources for contrib in self.contributions: contrib.add_content_to_global_ME7_resources(self.export_dir) contrib.remove_superfluous_content() contrib.link_global_ME7_resources(self.export_dir) # Create the run_card self.create_run_card() # Add the cards generated in MODEL to the Cards directory self.copy_model_resources() # Now link the Sources files within each contribution for contrib in self.contributions: contrib.make_model_symbolic_link() # Copy the UFO model to the global ME7 resources Source directory ME7_ufo_path = pjoin( self.export_dir, 'Source', 'ME7_UFO_model_%s' % os.path.basename(self.model.get('modelpath'))) shutil.copytree(self.model.get('modelpath'), ME7_ufo_path) # And clear compiled files in it for path in misc.glob(pjoin(ME7_ufo_path, '*.pkl')) + misc.glob( pjoin(ME7_ufo_path, '*.pyc')): os.remove(path) # Now generate all the ME accessors and integrand. # Notice that some of the information provided here (RunCard, ModelReader, root_path, etc...) # can and will be overwritten by the actualized values when the ME7Interface will be launched. # We provide it here just so as to be complete. # Obtain all the Accessors to the Matrix Element and currents made available in this process output all_MEAccessors = accessors.MEAccessorDict() for contrib in self.contributions: contrib.add_ME_accessors(all_MEAccessors, self.export_dir) # Now make sure that the integrated counterterms without any contribution host # indeed have a non-existent reduced process. contributions.Contribution_V.remove_counterterms_with_no_reduced_process( all_MEAccessors, self.integrated_counterterms_refused_from_all_contribs) # Check there is none left over after this filtering if len(self.integrated_counterterms_refused_from_all_contribs) > 0: counterterm_list = (ct['integrated_counterterm'].nice_string( ) for ct in self.integrated_counterterms_refused_from_all_contribs) # These integrated counterterms should in principle been added msg = "The following list of integrated counterterm are in principle non-zero" msg += " but could not be included in any contributions generated:\n" msg += '\n'.join(counterterm_list) msg += "\nResults generated from that point on are likely to be physically wrong." if __debug__: logger.critical(msg) else: raise MadGraph5Error(msg) # Now generate all the integrands from the contributions exported all_integrands = [] run_card = banner_mod.RunCardME7( pjoin(self.export_dir, 'Cards', 'run_card.dat')) # We might want to recover whether prefix was used when importing the model and whether # the MG5 name conventions was used. But this is a detail that can easily be fixed later. modelReader_instance = import_ufo.import_model( pjoin(self.export_dir, 'Source', 'ME7_UFO_model_') + self.model.get('name'), prefix=True, complex_mass_scheme=self.options['complex_mass_scheme']) modelReader_instance.pass_particles_name_in_mg_default() modelReader_instance.set_parameters_and_couplings( param_card=pjoin(self.export_dir, 'Cards', 'param_card.dat'), scale=run_card['scale'], complex_mass_scheme=self.options['complex_mass_scheme']) ME7_options = dict(self.options) ME7_options['me_dir'] = self.export_dir for contrib in self.contributions: all_integrands.extend( contrib.get_integrands(modelReader_instance, run_card, all_MEAccessors, ME7_options)) # And finally dump ME7 output information so that all relevant objects # can be reconstructed for a future launch with ME7Interface. # Normally all the relevant information should simply be encoded in only: # 'all_MEAccessors' and 'all_integrands'. self.dump_ME7(all_MEAccessors, all_integrands) # Finally, for future convenience it may sometimes be desirable to already compile # all contributions and global ME7 resources (e.g. MODEL) as followed. # By default however, we don't do that and this will instead be done at the launch time. #logger.info('Compilation of the process output.') #logger.info('It can be interrupted at any time,'+ # ' in which case it would be automatically resumed when launched.') #self.compile() return ################################################################################################### ### ### WARNING THE CODE BELOW IS JUST FOR TESTING PURPOSES AND CORRESPONDS TO RUNNING THE INTEGRATION ### RIGHT AWAY AND NOT WITHIN THE ME7 INTERFACE> ### ################################################################################################### import madgraph.interface.ME7_interface as ME7_interface # Test the reconstruction of the ME7 output instances ME7_dump = save_load_object.load_from_file( pjoin(self.export_dir, 'MadEvent7.db')) all_MEAccessors = ME7_dump['all_MEAccessors'][ 'class'].initialize_from_dump(ME7_dump['all_MEAccessors'], root_path=self.export_dir) all_integrands = [ integrand_dump['class'].initialize_from_dump( integrand_dump, modelReader_instance, run_card, all_MEAccessors, self.options) for integrand_dump in ME7_dump['all_integrands'] ] model_name = ME7_dump['model_name'] model_with_CMS = ME7_dump['model_with_CMS'] # This is now just for gigs. Integrate that beast! # Of course, what should really happen is that the users starts a ME7_interface, that # bootstraps from the dump above and starts the integration below with lunch. # So this is really just for testing purposes. import madgraph.integrator.integrators as integrators integrator_naive = integrators.SimpleMonteCarloIntegrator( all_integrands, **{ 'n_iterations': 10, 'n_points_per_iterations': 100, 'accuracy_target': None, 'verbosity': 1 }) import madgraph.integrator.pyCubaIntegrator as pyCubaIntegrator integrator_vegas = pyCubaIntegrator.pyCubaIntegrator( all_integrands, **{ 'algorithm': 'Vegas', 'verbosity': 1, 'seed': 3, 'target_accuracy': 1.0e-3, 'n_start': 1000, 'n_increase': 500, 'n_batch': 1000, 'max_eval': 100000, 'min_eval': 0 }) # Now run them all! for integrator in [integrator_naive, integrator_vegas]: xsec, error = integrator.integrate() logger.info("=" * 100) logger.info('{:^100}'.format( "\033[92mCross-section for process output '%s' with integrator '%s':\033[0m" % (self.export_dir, integrator.get_name()))) logger.info('{:^100}'.format("\033[94m%.5e +/- %.2e [pb]\033[0m" % (xsec, error))) logger.info("=" * 100 + "\n")