def test_PY8Card_basic(self): """ Basic consistency check of a read-write of the default card.""" pythia8_card_out = bannermod.PY8Card() out = StringIO.StringIO() pythia8_card_out.write(out, self.basic_PY8_template) # misc.sprint('WRITTEN:',out.getvalue()) pythia8_card_read = bannermod.PY8Card() # Rewind out.seek(0) pythia8_card_read.read(out) self.assertEqual(pythia8_card_out, pythia8_card_read) return # Below are some debug lines, comment the above return to run them # ========== # Keep the following if you want to print out all parameters with # print_only_visible=False pythia8_card_read.system_set = set( [k.lower() for k in pythia8_card_read.keys()]) for subrunID in pythia8_card_read.subruns.keys(): pythia8_card_read.subruns[subrunID].system_set = \ set([k.lower() for k in pythia8_card_read.subruns[subrunID].keys()]) # ========== out = StringIO.StringIO() pythia8_card_read.write(out, self.basic_PY8_template) misc.sprint('READ:', out.getvalue()) out = StringIO.StringIO() pythia8_card_read.write(out, self.basic_PY8_template, print_only_visible=True) misc.sprint('Only visible:', out.getvalue())
def p_expression_function4(self, p): "expression : FUNCTION '(' expression ',' expression ',' expression ',' expression ')'" p1 = p[1] re_groups = self.re_cmath_function.match(p1) if re_groups: p1 = re_groups.group("name") p[0] = p1 + '(' + p[3] + ',' + p[5] + ' , ' + p[7] + ' , ' + p[ 9] + ')' else: if not hasattr(self, 'modelfct'): self.create_modelfct() if p1 in self.modelfct: if not hasattr(self.modelfct[p1], 'argstype') or not self.modelfct[p1].argstype: p[0] = 'MP_' + p1 + '(' + p[3] + ',' + p[5] + ' , ' + p[ 7] + ' , ' + p[9] + ')' else: types = [ self.types_def[t] for t in self.modelfct[p1].argstype ] misc.sprint(types) p[0] = 'MP_' + p1 + '(' + types[0](p[3]) + ',' + types[1]( p[5]) + ' , ' + types[2](p[7]) + ' , ' + types[3]( p[9]) + ')' else: p[0] = 'MP_' + p1 + '(' + p[3] + ',' + p[5] + ' , ' + p[ 7] + ' , ' + p[9] + ')'
def finalize(self, matrix_element, cmdhistory, MG5options, outputflag): """Typically creating jpeg/HTML output/ compilation/... cmdhistory is the list of command used so far. MG5options are all the options of the main interface outputflags is a list of options provided when doing the output command""" misc.sprint('Entering PLUGIN_ProcessExporter.finalize') return super().finalize(matrix_element, cmdhistory, MG5options, outputflag)
def __init__(self, *args, **opts): misc.sprint("") misc.sprint("=======================================================================================") misc.sprint("Warning: the integrated Final-final soft no longer matches the definition of the locals") misc.sprint("=======================================================================================") misc.sprint("") super(integrated_NLO_QCD_soft_gluon, self).__init__(*args, **opts) self.supports_helicity_assignment = False
def generate_subprocess_directory(self, subproc_group, fortran_model, me=None): misc.sprint('create the directory') return super(MY_CPP_Standalone, self).generate_subprocess_directory( subproc_group, fortran_model, me)
def modify_grouping(self, matrix_element): """allow to modify the grouping (if grouping is in place) return two value: - True/False if the matrix_element was modified - the new(or old) matrix element""" # Irrelevant here since group_mode=False so this function is never called misc.sprint('Entering PLUGIN_ProcessExporter.modify_grouping') return False, matrix_element
def __init__(self, mgme_dir='', *completekey, **stdin): """ Special init tasks for the Loop Interface """ mg_interface.MadGraphCmd.__init__(self, mgme_dir='', *completekey, **stdin) misc.sprint(type(self.history)) self.setup()
def setUp(self): self.tmp_process_dir = pjoin(_file_path, 'TMP_TestME7_colorful_epem_jjj_output') # Generate the process output if it does not exist yet or if we # are not in debug mode. if os.path.isdir(self.tmp_process_dir): if not self.is_process_generated and not self.debugging: shutil.rmtree(self.tmp_process_dir) else: TestME7_NLO_colorful_epem_jjj.is_process_generated = True if not self.is_process_generated: self.cmd = Cmd.MasterCmd() if os.path.isdir(self.tmp_process_dir): shutil.rmtree(self.tmp_process_dir) # Now generate and output a process, so as to run ME7 commands on it self.do('import model loop_sm') self.do('set subtraction_scheme colorful') self.do('generate e+ e- > j j j --NLO=QCD') self.do('output %s' % self.tmp_process_dir) TestME7_NLO_colorful_epem_jjj.is_process_generated = True if self.debugging: misc.sprint(debugging_warning) misc.sprint( debugging_written % (self.__class__.__name__, self.tmp_process_dir)) else: if self.debugging: misc.sprint(debugging_warning) misc.sprint( debugging_reused % (self.__class__.__name__, self.tmp_process_dir)) # Now initialize an ME7 interface on the above process output self.cmd = ME7_interface.MadEvent7Cmd(me_dir=self.tmp_process_dir) self.cmd.no_notification()
def launch_program(self): """launch the main program""" # Check for number of cores if multicore mode mode = str(self.cluster) nb_node = 1 if mode == "2": import multiprocessing max_node = multiprocessing.cpu_count() if max_node == 1: logger.warning( 'Only one core is detected on your computer! Pass in single machine' ) self.cluster = 0 self.launch_program() return elif max_node == 2: nb_node = 2 elif not self.force: nb_node = self.ask('How many cores do you want to use?', max_node, range(2, max_node + 1)) else: nb_node = max_node import madgraph.interface.amcatnlo_run_interface as run_int if hasattr(self, 'shell') and self.shell: usecmd = run_int.aMCatNLOCmdShell(me_dir=self.running_dir, options=self.cmd_int.options) else: usecmd = run_int.aMCatNLOCmd(me_dir=self.running_dir, options=self.cmd_int.options) #Check if some configuration were overwritten by a command. If so use it set_cmd = [ l for l in self.cmd_int.history if l.strip().startswith('set') ] all_options = usecmd.options_configuration.keys( ) + usecmd.options_madgraph.keys() + usecmd.options_madevent.keys() for line in set_cmd: arg = line.split() if arg[1] not in all_options: continue misc.sprint(line) try: usecmd.exec_cmd(line) except Exception, error: misc.sprint('Command %s fails with msg: %s'%(str(line), \ str(error))) pass
def PLUGIN_make_unique(input, keepordering=None): "remove duplicate in a list " global printordering if keepordering is None: keepordering = misc.madgraph.ordering if printordering: printordering = False misc.sprint('keepordering (default): %s'%keepordering) # AV - add a printout only in the first call else: misc.sprint('keepordering (argument): %s'%keepordering) # AV - add a printout at every call only if it is an argument ###sprint(keepordering) # AV - remove the printout at every call if not keepordering: return list(set(input)) else: return list(dict.fromkeys(input))
def mod_file(self, inputpath, outputpath): fsock = open(outputpath, 'w') to_change = {} to_change.update(self.translate) to_change.update(self.old_new) for particle in self.particles: if hasattr(particle, 'replace') and particle.replace: misc.sprint(particle.get('name'), particle.replace.get('name')) pattern = re.compile(r'\b(%s)\b' % ('|'.join(to_change))) for line in open(inputpath): line = pattern.sub(lambda mo: to_change[mo.group()], line) fsock.write(line)
def __init__(self, path, mode='r', *args, **opt): """open file and read the banner [if in read mode]""" if mode in ['r', 'rb']: mode = 'r' self.mode = mode if not path.endswith(".gz"): self.file = open(path, mode, *args, **opt) elif mode == 'r' and not os.path.exists(path) and os.path.exists( path[:-3]): self.file = open(path[:-3], mode, *args, **opt) path = path[:-3] else: try: self.file = gzip.GzipFile(path, mode, *args, **opt) self.zip_mode = True except IOError as error: raise except Exception as error: misc.sprint(error) if mode == 'r': misc.gunzip(path) else: self.to_zip = True self.file = open(path[:-3], mode, *args, **opt) path = path[:-3] self.parsing = True # check if/when we need to parse the event. self.eventgroup = False self.header = '' if mode == 'r': line = '' while 'HepMC::IO_GenEvent-START_EVENT_LISTING' not in line: line = self.file.readline() if not line: self.seek(0) self.banner = '' break if 'b' in mode or self.zip_mode: line = str(line.decode()) self.header += line self.start_event = ''
def cp(path1, path2, log=True, error=False): """ simple cp taking linux or mix entry""" path1 = format_path(path1) path2 = format_path(path2) try: shutil.copy(path1, path2) except IOError, why: import madgraph.various.misc as misc try: if os.path.exists(path2): path2 = os.path.join(path2, os.path.split(path1)[1]) shutil.copytree(path1, path2) except IOError, why: if error: raise if log: logger.warning(why) else: misc.sprint("fail to cp", why)
def generate_PS_point(self,n_initial, n_final, initial_masses=None, final_masses=None): """ Generate a PS point with specified number of initial and final legs, with possibly a masses specified.""" if initial_masses is None: initial_masses = tuple([0.0]*n_initial) if final_masses is None: final_masses = tuple([0.0]*n_final) PS_generator = phase_space_generators.FlatInvertiblePhasespace( initial_masses, final_masses, (500.0, 500.0), (0, 0) ) PS_point, wgt, x1s, x2s = PS_generator.get_PS_point(None) misc.sprint('PS point:\n\n%s\n\n'%PS_point.__str__(n_initial=len(initial_masses))) return dict( (i, momentum) for i, momentum in enumerate(PS_point) )
def launch_program(self): """launch the main program""" # Check for number of cores if multicore mode mode = str(self.cluster) nb_node = 1 if mode == "2": import multiprocessing max_node = multiprocessing.cpu_count() if max_node == 1: logger.warning('Only one core is detected on your computer! Pass in single machine') self.cluster = 0 self.launch_program() return elif max_node == 2: nb_node = 2 elif not self.force: nb_node = self.ask('How many cores do you want to use?', max_node, range(2,max_node+1)) else: nb_node=max_node import madgraph.interface.amcatnlo_run_interface as run_int if hasattr(self, 'shell'): usecmd = run_int.aMCatNLOCmdShell(me_dir=self.running_dir, options = self.cmd_int.options) else: usecmd = run_int.aMCatNLOCmd(me_dir=self.running_dir, options = self.cmd_int.options) #Check if some configuration were overwritten by a command. If so use it set_cmd = [l for l in self.cmd_int.history if l.strip().startswith('set')] all_options = usecmd.options_configuration.keys() + usecmd.options_madgraph.keys() + usecmd.options_madevent.keys() for line in set_cmd: arg = line.split() if arg[1] not in all_options: continue misc.sprint(line) try: usecmd.exec_cmd(line) except Exception, error: misc.sprint('Command %s fails with msg: %s'%(str(line), \ str(error))) pass
def copy_template(self, model): misc.sprint( 'Entering PLUGIN_ProcessExporter.copy_template (initialise the directory)' ) try: os.mkdir(self.dir_path) except os.error as error: logger.warning(error.strerror + ' ' + self.dir_path) with misc.chdir(self.dir_path): logger.info('Creating subdirectories in directory %s' % self.dir_path) for d in ['src', 'Cards', 'SubProcesses', 'CMake']: # AV - added CMake, removed lib try: os.mkdir(d) except os.error as error: logger.warning(error.strerror + ' ' + os.path.join(self.dir_path, d)) # Write param_card open(os.path.join('Cards', 'param_card.dat'), 'w').write(model.write_param_card()) # Copy files in various subdirectories for key in self.from_template: for f in self.from_template[key]: PLUGIN_export_cpp.cp( f, key) # NB this assumes directory key exists... # Copy src makefile if self.template_src_make: makefile_src = self.read_template_file( self.template_src_make) % { 'model': self.get_model_name(model.get('name')) } open(os.path.join('src', 'cudacpp_src.mk'), 'w').write(makefile_src) # Copy SubProcesses makefile if self.template_Sub_make: makefile = self.read_template_file(self.template_Sub_make) % { 'model': self.get_model_name(model.get('name')) } open(os.path.join('SubProcesses', 'cudacpp.mk'), 'w').write(makefile)
def check_mass_width_of_particle(self, p_base, p_plugin): # Check the mass if p_base.mass.name != p_plugin.mass.name: #different name but actually the same if p_plugin.mass.name in self.old_new: if self.old_new[p_plugin.mass.name] != p_base.mass.name: raise USRMODERROR('Some inconsistency in the mass assignment in the model: equivalent of %s is %s != %s ' % ( p_plugin.mass.name, self.old_new[p_plugin.mass.name], p_base.mass.name)) elif p_base.mass.name.lower() == 'zero': p_base.mass = p_plugin.mass elif p_plugin.mass.name.lower() == 'zero': pass else: misc.sprint(p_base.mass.value, p_plugin.mass.value, dir(p_base.mass)) misc.sprint(p_base.mass.nature, p_plugin.mass.nature) misc.sprint(self.old_new) raise USRMODERROR('Some inconsistency in the mass assignment in the model\n' + \ ' Mass: %s and %s\n' %(p_base.mass.name, p_plugin.mass.name) + \ ' conflict name %s\n' % self.old_new + \ ' pdg_code: %s %s' % (p_base.pdg_code, p_plugin.pdg_code)) # Check the width if p_base.width.name != p_plugin.width.name: #different name but actually the same if p_plugin.width.name in self.old_new: if self.old_new[p_plugin.width.name] != p_base.width.name: raise USRMODERROR('Some inconsistency in the mass assignment in the model') elif p_base.width.name.lower() == 'zero': p_base.width = p_plugin.width elif p_plugin.width.name.lower() == 'zero': pass else: raise USRMODERROR('Some inconsistency in the mass assignment in the model') return
def evaluate_kernel(self, zs, kTs, parent): # Retrieve the collinear variables and compute basic quantities z1, z2, z3 = zs s12 = sij(1, 2, zs, kTs) s13 = sij(1, 3, zs, kTs) s23 = sij(2, 3, zs, kTs) misc.sprint(s12, s13, s23) s123 = s12 + s13 + s23 # Assemble kernel # Instantiate the structure of the result evaluation = utils.SubtractionCurrentEvaluation({ 'spin_correlations': [None], 'color_correlations': [None], 'values': {} }) ker = 0 # ker += 2*C123(z1, z2, z3, s12, s13, s23, s123) evaluation['values'][(0, 0)] = { 'finite': 0.5 * self.CF * self.TR * ker } return evaluation
def generate_subprocess_directory(self, subproc_group, fortran_model, me=None): misc.sprint( 'Entering PLUGIN_ProcessExporter.generate_subprocess_directory (create the directory)' ) misc.sprint(' type(subproc_group)=%s' % type(subproc_group) ) # e.g. madgraph.core.helas_objects.HelasMatrixElement misc.sprint( ' type(fortran_model)=%s' % type(fortran_model) ) # e.g. madgraph.iolibs.helas_call_writers.GPUFOHelasCallWriter misc.sprint(' type(me)=%s me=%s' % (type(me) if me is not None else None, me)) # e.g. int return super().generate_subprocess_directory(subproc_group, fortran_model, me)
def mod_file(self, inputpath, outputpath): fsock = open(outputpath, 'w') to_change = {} to_change.update(self.translate) to_change.update(self.old_new) for particle in self.particles: if hasattr(particle, 'replace') and particle.replace: misc.sprint(particle.get('name'), particle.replace.get('name')) pattern = re.compile(r'\b(%s)\b' % ('|'.join(to_change))) #need to check that all particle are written correctly <- Fix potential issue # of lower/upper case in FR all_particles_name = [self.format_param(P)[2:] for P in self.particles] all_lower = [p.lower() for p in all_particles_name] pat2 = re.compile(r'\bP\.(\w+)\b') for line in open(inputpath): line = pattern.sub(lambda mo: to_change[mo.group()], line) part_in_line = set(pat2.findall(line)) #handle the case of lower/upper case particle to_replace = {} for p in part_in_line: if p in all_particles_name: continue else: ind = all_lower.index(p.lower()) to_replace[p] = all_particles_name[ind] if to_replace: pat3 = re.compile(r'\bP\.(%s)\b' % '|'.join(p for p in to_replace)) line = pat3.sub( lambda mo: 'P.%s' % to_replace[mo.groups(0)[0]], line) fsock.write(line)
def test_phase_space_volume(self): """ Test the Singlechannel phase-space that is aligned along specific s- and t-channels.""" import madgraph.integrator.vegas3_integrator as vegas3 import madgraph.integrator.integrands as integrands class IntegrandForTest(integrands.VirtualIntegrand): """An integrand for this phase-space volume test.""" def __init__(self, phase_space_generator): super(IntegrandForTest, self).__init__(phase_space_generator.get_dimensions()) self.phase_space_generator = phase_space_generator self.counter = 0 #if type(self.phase_space_generator).__name__ == 'SingleChannelPhasespace': # self.my_random_path = self.phase_space_generator.generate_random_path() def __call__(self, continuous_inputs, discrete_inputs, **opts): #if type(self.phase_space_generator).__name__ == 'SingleChannelPhasespace': # PS_point, wgt, x1, x2 = self.phase_space_generator.get_PS_point(continuous_inputs,self.my_random_path) #else: self.counter += 1 PS_point, wgt, x1, x2 = self.phase_space_generator.get_PS_point(continuous_inputs) return wgt def analytical_phase_space_vol(E_cm,n): return math.pow((math.pi/2.0),n-1)*(math.pow((E_cm**2),n-2)/(math.factorial(n-1)*math.factorial(n-2))) verbose = False E_cm = 5000 nr_final = 6 #Analytical if verbose: misc.sprint('\n%d-body phase space analytical: %.4e'%(nr_final,analytical_phase_space_vol(E_cm,nr_final))) analytical_PS_volume = analytical_phase_space_vol(E_cm,nr_final) #SCPS my_epem_PS_generator = PS.SingleChannelPhasespace([0.]*2, [0.]*nr_final, beam_Es =(E_cm/2.,E_cm/2.), beam_types=(0,0), model=self.model, topology=self.vbf_topology_s_and_t_channel_specifier) my_integrand = IntegrandForTest(my_epem_PS_generator) my_integrator = vegas3.Vegas3Integrator(my_integrand, n_points_survey=400, n_points_refine=400, accuracy_target=None) # Finally integrate if verbose: misc.sprint('\nSCPS %d-body phase space '%nr_final + 'SCPS: Final result: %.4e +/- %.2e'%my_integrator.integrate()) SCPS_volume = my_integrator.integrate() #misc.sprint(abs(SCPS_volume[0]-analytical_PS_volume)/abs(SCPS_volume[1]), 5.0) self.assertTrue(abs(SCPS_volume[0]-analytical_PS_volume)/abs(SCPS_volume[1])<5.0) #FLATPS my_PS_generator = PS.FlatInvertiblePhasespace([0.]*2, [0.]*nr_final, beam_Es =(E_cm/2.,E_cm/2.), beam_types=(0,0)) my_integrand = IntegrandForTest(my_PS_generator) my_integrator = vegas3.Vegas3Integrator(my_integrand, n_points_survey=100, n_points_refine=100, accuracy_target=None) # Finally integrate if verbose: misc.sprint('\n FLATPS %d-body phase space '%nr_final + 'FLATPS: Final result: %.4e +/- %.2e'%my_integrator.integrate()) FLATPS_volume = my_integrator.integrate() self.assertTrue(abs(FLATPS_volume[0]-analytical_PS_volume)/abs(FLATPS_volume[1])<5.0) return 0
def complete_decay(self, *args): """ """ try: return self.mg5cmd.complete_generate(*args) except Exception, error: misc.sprint(error)
def __init__(self, mgme_dir = '', *completekey, **stdin): """ Special init tasks for the Loop Interface """ mg_interface.MadGraphCmd.__init__(self, mgme_dir = '', *completekey, **stdin) misc.sprint(type(self.history)) self.setup()
def add_model(self, model=None, path=None, identify_particles=None): """add another model in the current one""" self.new_external = [] if path: model = ufomodels.load_model(path) if not model: raise USRMODERROR, 'Need a valid Model' else: path = model.__path__[0] # Check the validity of the model. Too old UFO (before UFO 1.0) if not hasattr(model, 'all_orders'): raise USRMODERROR, 'Add-on Model doesn\'t follows UFO convention (no couplings_order information)\n' +\ 'MG5 is able to load such model but NOT to the add model feature.' if isinstance(model.all_particles[0].mass, basestring): raise USRMODERROR, 'Add-on Model doesn\'t follows UFO convention (Mass/Width of particles are string name, not object)\n' +\ 'MG5 is able to load such model but NOT to the add model feature.' for order in model.all_orders: if hasattr(order, 'perturbative_expansion') and order.perturbative_expansion: raise USRMODERROR, 'Add-on model can not be loop model.' for order in model.all_orders: self.add_coupling_order(order) # Adding automatically identification for anti-particle if needed # + define identify_pid which keep tracks of the pdg_code identified if identify_particles: identify_pid = {} for new, old in identify_particles.items(): new_part = next((p for p in model.all_particles if p.name==new), None) old_part = next((p for p in self.particles if p.name==old), None) identify_pid[new_part.pdg_code] = old_part.pdg_code if new_part is None: raise USRMODERROR, "particle %s not in added model" % new if old_part is None: raise USRMODERROR, "particle %s not in original model" % old if new_part.antiname not in identify_particles: new_anti = new_part.antiname old_anti = old_part.antiname misc.sprint(old, new, new_anti, old_anti, old_part.antiname) if old_anti == old: raise USRMODERROR, "failed identification (one particle is self-conjugate and not the other)" logger.info("adding identification for anti-particle: %s=%s" % (new_anti, old_anti)) identify_particles[new_anti] = old_anti for parameter in model.all_parameters: self.add_parameter(parameter, identify_pid) for coupling in model.all_couplings: self.add_coupling(coupling) for lorentz in model.all_lorentz: self.add_lorentz(lorentz) for particle in model.all_particles: if particle.name in identify_particles: self.add_particle(particle, identify=identify_particles[particle.name]) else: self.add_particle(particle) for vertex in model.all_vertices: self.add_interaction(vertex) self.all_path.append(path) return
def add_model(self, model=None, path=None, identify_particles=None): """add another model in the current one""" self.new_external = [] if path: model = ufomodels.load_model(path) if not model: raise USRMODERROR, 'Need a valid Model' else: path = model.__path__[0] # Check the validity of the model. Too old UFO (before UFO 1.0) if not hasattr(model, 'all_orders'): raise USRMODERROR, 'Add-on Model doesn\'t follows UFO convention (no couplings_order information)\n' +\ 'MG5 is able to load such model but NOT to the add model feature.' if isinstance(model.all_particles[0].mass, basestring): raise USRMODERROR, 'Add-on Model doesn\'t follows UFO convention (Mass/Width of particles are string name, not object)\n' +\ 'MG5 is able to load such model but NOT to the add model feature.' for order in model.all_orders: if hasattr( order, 'perturbative_expansion') and order.perturbative_expansion: raise USRMODERROR, 'Add-on model can not be loop model.' for order in model.all_orders: self.add_coupling_order(order) # Adding automatically identification for anti-particle if needed # + define identify_pid which keep tracks of the pdg_code identified if identify_particles: identify_pid = {} for new, old in identify_particles.items(): new_part = next( (p for p in model.all_particles if p.name == new), None) old_part = next((p for p in self.particles if p.name == old), None) identify_pid[new_part.pdg_code] = old_part.pdg_code if new_part is None: raise USRMODERROR, "particle %s not in added model" % new if old_part is None: raise USRMODERROR, "particle %s not in original model" % old if new_part.antiname not in identify_particles: new_anti = new_part.antiname old_anti = old_part.antiname misc.sprint(old, new, new_anti, old_anti, old_part.antiname) if old_anti == old: raise USRMODERROR, "failed identification (one particle is self-conjugate and not the other)" logger.info( "adding identification for anti-particle: %s=%s" % (new_anti, old_anti)) identify_particles[new_anti] = old_anti for parameter in model.all_parameters: self.add_parameter(parameter, identify_pid) for coupling in model.all_couplings: self.add_coupling(coupling) for lorentz in model.all_lorentz: self.add_lorentz(lorentz) for particle in model.all_particles: if particle.name in identify_particles: self.add_particle(particle, identify=identify_particles[particle.name]) else: self.add_particle(particle) for vertex in model.all_vertices: self.add_interaction(vertex) self.all_path.append(path) return
def create_standalone_directory(self): """generate the various directory for the weight evaluation""" # 0. clean previous run ------------------------------------------------ path_me = self.me_dir try: shutil.rmtree(pjoin(path_me,'rw_me')) except Exception: pass # 1. Load model--------------------------------------------------------- complex_mass = False has_cms = re.compile(r'''set\s+complex_mass_scheme\s*(True|T|1|true|$|;)''') for line in self.banner.proc_card: if line.startswith('set'): self.mg5cmd.exec_cmd(line, printcmd=False, precmd=False, postcmd=False) if has_cms.search(line): complex_mass = True elif line.startswith('define'): try: self.mg5cmd.exec_cmd(line, printcmd=False, precmd=False, postcmd=False) except Exception: pass info = self.banner.get('proc_card', 'full_model_line') if '-modelname' in info: mg_names = False else: mg_names = True model_name = self.banner.get('proc_card', 'model') if model_name: self.load_model(model_name, mg_names, complex_mass) else: raise self.InvalidCmd('Only UFO model can be loaded in this module.') mgcmd = self.mg5cmd modelpath = self.model.get('modelpath') if os.path.basename(modelpath) != mgcmd._curr_model['name']: name, restrict = mgcmd._curr_model['name'].rsplit('-',1) if os.path.exists(pjoin(os.path.dirname(modelpath),name, 'restrict_%s.dat' % restrict)): modelpath = pjoin(os.path.dirname(modelpath), mgcmd._curr_model['name']) commandline="import model %s " % modelpath mgcmd.exec_cmd(commandline) # 2. compute the production matrix element ----------------------------- processes = [line[9:].strip() for line in self.banner.proc_card if line.startswith('generate')] processes += [' '.join(line.split()[2:]) for line in self.banner.proc_card if re.search('^\s*add\s+process', line)] mgcmd.exec_cmd("set group_subprocesses False") logger.info('generating the square matrix element for reweighting') start = time.time() commandline='' for proc in processes: if '[' not in proc: commandline+="add process %s ;" % proc else: raise self.InvalidCmd('NLO processes can\'t be reweight') commandline = commandline.replace('add process', 'generate',1) logger.info(commandline) mgcmd.exec_cmd(commandline, precmd=True) commandline = 'output standalone_rw %s' % pjoin(path_me,'rw_me') mgcmd.exec_cmd(commandline, precmd=True) logger.info('Done %.4g' % (time.time()-start)) self.has_standalone_dir = True # 3. Store id to directory information --------------------------------- matrix_elements = mgcmd._curr_matrix_elements.get_matrix_elements() self.id_to_path = {} for me in matrix_elements: for proc in me.get('processes'): initial = [] #filled in the next line final = [l.get('id') for l in proc.get('legs')\ if l.get('state') or initial.append(l.get('id'))] order = (initial, final) tag = proc.get_initial_final_ids() decay_finals = proc.get_final_ids_after_decay() if tag[1] != decay_finals: order = (initial, list(decay_finals)) decay_finals.sort() tag = (tag[0], tuple(decay_finals)) Pdir = pjoin(path_me, 'rw_me', 'SubProcesses', 'P%s' % me.get('processes')[0].shell_string()) assert os.path.exists(Pdir), "Pdir %s do not exists" % Pdir if tag in self.id_to_path: if not Pdir == self.id_to_path[tag][1]: misc.sprint(tag, Pdir, self.id_to_path[tag][1]) raise self.InvalidCmd, '2 different process have the same final states. This module can not handle such situation' else: continue self.id_to_path[tag] = [order, Pdir]
def test_madspin_LOonly(self): text = """ set crash_on_error True generate p p > w+ [LOonly] output %s launch madspin=ON set nevents 10 decay w+ > e+ ve launch -i decay_events run_01 add madspin --replace_line="set spinmode.*" --after_line="banner" set spinmode=onshell """ % self.path interface = MGCmd.MasterCmd() interface.no_notification() open(pjoin(self.tmpdir, 'cmd'), 'w').write(text) interface.exec_cmd('import command %s' % pjoin(self.tmpdir, 'cmd')) # perform some basic check lhe_on = pjoin(self.path, 'Events', 'run_01_decayed_1', 'events.lhe.gz') lhe_onshell = pjoin(self.path, 'Events', 'run_01_decayed_2', 'events.lhe.gz') self.assertTrue(lhe_on) # check that each events has the decay ON mode nb_event = 0 for event in lhe_parser.EventFile(lhe_on): nb_event += 1 nb_final = 0 m_inv_w = 0 for p in event: if p.status == -1: continue elif p.status == 1: nb_final += 1 else: if p.pdg == 24: if m_inv_w != 0: self.assertTrue(False, 'two W') m_inv_w = p.mass self.assertTrue(30 < m_inv_w < 150) else: misc.sprint(p.pdg, p.status) self.assertTrue(False, 'not W decaying') self.assertTrue(nb_final in [2]) self.assertEqual(nb_event, 10) self.assertTrue(lhe_onshell) # check that each events has the decay ON mode nb_event = 0 for event in lhe_parser.EventFile(lhe_onshell): nb_event += 1 nb_final = 0 m_inv_w = 0 for p in event: if p.status == -1: continue elif p.status == 1: nb_final += 1 else: if p.pdg == 24: if m_inv_w != 0: self.assertTrue(False, 'two W') m_inv_w = p.mass self.assertTrue(80 < m_inv_w < 81) else: self.assertTrue(False, 'not W decaying') self.assertTrue(nb_final in [2]) self.assertEqual(nb_event, 10)
def __init__(self, *args, **kwargs): misc.sprint('Entering PLUGIN_ProcessExporter.__init__ (initialise the exporter)') return super().__init__(*args, **kwargs)
def copy_template(self, model): misc.sprint('Entering PLUGIN_ProcessExporter.copy_template (initialise the directory)') return super().copy_template(model)
def generate_events(pdg, nb_event, mg5, restrict_file=None, cumul=False): """generate new events for this particle restrict_file allow to only generate a subset of the definition cumul allow to merge all the definition in one run (add process) to generate events according to cross-section """ part = self.model.get_particle(pdg) name = part.get_name() out = {} logger.info("generate %s decay event for particle %s" % (nb_event, name)) if name not in self.list_branches: return out for i, proc in enumerate(self.list_branches[name]): if restrict_file and i not in restrict_file: continue decay_dir = pjoin( self.path_me, "decay_%s_%s" % (str(pdg).replace("-", "x"), i)) if not os.path.exists(decay_dir): if cumul: mg5.exec_cmd("generate %s" % proc) for j, proc2 in enumerate( self.list_branches[name][1:]): if restrict_file and j not in restrict_file: raise Exception # Do not see how this can happen mg5.exec_cmd("add process %s" % proc2) mg5.exec_cmd("output %s -f" % decay_dir) else: mg5.exec_cmd("generate %s" % proc) mg5.exec_cmd("output %s -f" % decay_dir) options = dict(mg5.options) if self.options['ms_dir']: misc.sprint("start gridpack!") # we are in gridpack mode -> create it me5_cmd = madevent_interface.MadEventCmdShell(me_dir=os.path.realpath(\ decay_dir), options=options) me5_cmd.options["automatic_html_opening"] = False if self.options["run_card"]: run_card = self.options["run_card"] else: run_card = banner.RunCard( pjoin(decay_dir, "Cards", "run_card.dat")) run_card["iseed"] = self.seed run_card['gridpack'] = True run_card.write( pjoin(decay_dir, "Cards", "run_card.dat")) param_card = self.banner['slha'] open(pjoin(decay_dir, "Cards", "param_card.dat"), "w").write(param_card) self.seed += 1 # actually creation me5_cmd.exec_cmd("generate_events run_01 -f") me5_cmd.exec_cmd("exit") #remove pointless informat misc.call( ["rm", "Cards", "bin", 'Source', 'SubProcesses'], cwd=decay_dir) misc.call(['tar', '-xzpvf', 'run_01_gridpack.tar.gz'], cwd=decay_dir) # Now generate the events if not self.options['ms_dir']: me5_cmd = madevent_interface.MadEventCmdShell(me_dir=os.path.realpath(\ decay_dir), options=mg5.options) me5_cmd.options["automatic_html_opening"] = False if self.options["run_card"]: run_card = self.options["run_card"] else: run_card = banner.RunCard( pjoin(decay_dir, "Cards", "run_card.dat")) run_card["nevents"] = int(1.2 * nb_event) run_card["iseed"] = self.seed run_card.write(pjoin(decay_dir, "Cards", "run_card.dat")) param_card = self.banner['slha'] open(pjoin(decay_dir, "Cards", "param_card.dat"), "w").write(param_card) self.seed += 1 me5_cmd.exec_cmd("generate_events run_01 -f") me5_cmd.exec_cmd("exit") out[i] = lhe_parser.EventFile( pjoin(decay_dir, "Events", 'run_01', 'unweighted_events.lhe.gz')) else: misc.call( ['run.sh', str(int(1.2 * nb_event)), str(self.seed)], cwd=decay_dir) out[i] = lhe_parser.EventFile( pjoin(decay_dir, 'events.lhe.gz')) if cumul: break return out
def test_short_ppgogo_amcatnlo_nlo(self): """tests if the p p > go go (in the mssm) process works""" start = time.time() self.generate(['p p > go go [real=QCD]'], 'MSSM_SLHA2') misc.sprint('ppgg[real=QCD] generated in', time.time() - start) ####NLO card = open('%s/Cards/run_card_default.dat' % self.path).read() self.assertTrue('10000 = npoints_FO' in card) card = card.replace('10000 = npoints_FO', '100 = npoints_FO') self.assertTrue('5000 = npoints_FO_grid' in card) card = card.replace('5000 = npoints_FO_grid', '100 = npoints_FO_grid') self.assertTrue('0.01 = req_acc_FO' in card) card = card.replace('0.01 = req_acc_FO', '-1 = req_acc_FO') open('%s/Cards/run_card.dat' % self.path, 'w').write(card) start = time.time() self.do('launch NLO -f') misc.sprint('launch in NLO in ', time.time() - start) # test the plot file exists self.assertTrue( os.path.exists('%s/Events/run_01/MADatNLO.HwU' % self.path)) self.assertTrue( os.path.exists('%s/Events/run_01/summary.txt' % self.path)) self.assertTrue( os.path.exists('%s/Events/run_01/run_01_tag_1_banner.txt' % self.path)) self.assertTrue( os.path.exists('%s/Events/run_01/res_0.txt' % self.path)) self.assertTrue( os.path.exists('%s/Events/run_01/res_1.txt' % self.path)) self.assertTrue( os.path.exists('%s/Events/run_01/alllogs_0.html' % self.path)) self.assertTrue( os.path.exists('%s/Events/run_01/alllogs_1.html' % self.path)) card = open('%s/Cards/run_card_default.dat' % self.path).read() self.assertTrue('10000 = nevents' in card) card = card.replace('10000 = nevents', '100 = nevents') open('%s/Cards/run_card.dat' % self.path, 'w').write(card) start = time.time() self.do('launch aMC@NLO -fp') misc.sprint('launch in aMC@NLO in ', time.time() - start) # test the lhe event file exists self.assertTrue( os.path.exists('%s/Events/run_02/events.lhe.gz' % self.path)) self.assertTrue( os.path.exists('%s/Events/run_02/summary.txt' % self.path)) self.assertTrue( os.path.exists('%s/Events/run_02/run_02_tag_1_banner.txt' % self.path)) self.assertTrue( os.path.exists('%s/Events/run_02/res_0.txt' % self.path)) self.assertTrue( os.path.exists('%s/Events/run_02/res_1.txt' % self.path)) self.assertTrue( os.path.exists('%s/Events/run_02/alllogs_0.html' % self.path)) self.assertTrue( os.path.exists('%s/Events/run_02/alllogs_1.html' % self.path)) self.assertTrue( os.path.exists('%s/Events/run_02/alllogs_2.html' % self.path)) self.assertFalse(self.debugging)
def generate_events(pdg, nb_event, mg5, restrict_file=None, cumul=False): """generate new events for this particle restrict_file allow to only generate a subset of the definition cumul allow to merge all the definition in one run (add process) to generate events according to cross-section """ part = self.model.get_particle(pdg) name = part.get_name() out = {} logger.info("generate %s decay event for particle %s" % (nb_event, name)) if name not in self.list_branches: return out for i,proc in enumerate(self.list_branches[name]): if restrict_file and i not in restrict_file: continue decay_dir = pjoin(self.path_me, "decay_%s_%s" %(str(pdg).replace("-","x"),i)) if not os.path.exists(decay_dir): if cumul: mg5.exec_cmd("generate %s" % proc) for j,proc2 in enumerate(self.list_branches[name][1:]): if restrict_file and j not in restrict_file: raise Exception # Do not see how this can happen mg5.exec_cmd("add process %s" % proc2) mg5.exec_cmd("output %s -f" % decay_dir) else: mg5.exec_cmd("generate %s" % proc) mg5.exec_cmd("output %s -f" % decay_dir) options = dict(mg5.options) if self.options['ms_dir']: misc.sprint("start gridpack!") # we are in gridpack mode -> create it me5_cmd = madevent_interface.MadEventCmdShell(me_dir=os.path.realpath(\ decay_dir), options=options) me5_cmd.options["automatic_html_opening"] = False if self.options["run_card"]: run_card = self.options["run_card"] else: run_card = banner.RunCard(pjoin(decay_dir, "Cards", "run_card.dat")) run_card["iseed"] = self.seed run_card['gridpack'] = True run_card.write(pjoin(decay_dir, "Cards", "run_card.dat")) param_card = self.banner['slha'] open(pjoin(decay_dir, "Cards", "param_card.dat"),"w").write(param_card) self.seed += 1 # actually creation me5_cmd.exec_cmd("generate_events run_01 -f") me5_cmd.exec_cmd("exit") #remove pointless informat misc.call(["rm", "Cards", "bin", 'Source', 'SubProcesses'], cwd=decay_dir) misc.call(['tar', '-xzpvf', 'run_01_gridpack.tar.gz'], cwd=decay_dir) # Now generate the events if not self.options['ms_dir']: me5_cmd = madevent_interface.MadEventCmdShell(me_dir=os.path.realpath(\ decay_dir), options=mg5.options) me5_cmd.options["automatic_html_opening"] = False if self.options["run_card"]: run_card = self.options["run_card"] else: run_card = banner.RunCard(pjoin(decay_dir, "Cards", "run_card.dat")) run_card["nevents"] = int(1.2*nb_event) run_card["iseed"] = self.seed run_card.write(pjoin(decay_dir, "Cards", "run_card.dat")) param_card = self.banner['slha'] open(pjoin(decay_dir, "Cards", "param_card.dat"),"w").write(param_card) self.seed += 1 me5_cmd.exec_cmd("generate_events run_01 -f") me5_cmd.exec_cmd("exit") out[i] = lhe_parser.EventFile(pjoin(decay_dir, "Events", 'run_01', 'unweighted_events.lhe.gz')) else: misc.call(['run.sh', str(int(1.2*nb_event)), str(self.seed)], cwd=decay_dir) out[i] = lhe_parser.EventFile(pjoin(decay_dir, 'events.lhe.gz')) if cumul: break return out
def complete_decay(self, *args): """ """ try: return self.mg5cmd.complete_generate(*args) except Exception,error: misc.sprint(error)
def convert_model(self, model, wanted_lorentz=[], wanted_coupling=[]): misc.sprint('Entering PLUGIN_ProcessExporter.convert_model (create the model)') return super().convert_model(model, wanted_lorentz, wanted_coupling)
def compare_PS_point(a,b,threshold=1.0e-10): for i_vec, (a_vec, b_vec) in enumerate(zip(a,b)): if max(abs(a_vec_el-b_vec_el)/max(abs(a_vec_el), 1.0e-10) for a_vec_el, b_vec_el in zip(a_vec, b_vec)) > threshold: misc.sprint('Lorentz vector #%d differ: %s vs %s'%(i_vec+1, str(a_vec), str(b_vec))) return False return True
def create_standalone_directory(self): """generate the various directory for the weight evaluation""" # 0. clean previous run ------------------------------------------------ path_me = self.me_dir try: shutil.rmtree(pjoin(path_me, "rw_me")) except Exception: pass # 1. Load model--------------------------------------------------------- complex_mass = False has_cms = re.compile(r"""set\s+complex_mass_scheme\s*(True|T|1|true|$|;)""") for line in self.banner.proc_card: if line.startswith("set"): self.mg5cmd.exec_cmd(line, printcmd=False, precmd=False, postcmd=False) if has_cms.search(line): complex_mass = True elif line.startswith("define"): try: self.mg5cmd.exec_cmd(line, printcmd=False, precmd=False, postcmd=False) except Exception: pass info = self.banner.get("proc_card", "full_model_line") if "-modelname" in info: mg_names = False else: mg_names = True model_name = self.banner.get("proc_card", "model") if model_name: self.load_model(model_name, mg_names, complex_mass) else: raise self.InvalidCmd("Only UFO model can be loaded in this module.") mgcmd = self.mg5cmd modelpath = self.model.get("modelpath") if os.path.basename(modelpath) != mgcmd._curr_model["name"]: name, restrict = mgcmd._curr_model["name"].rsplit("-", 1) if os.path.exists(pjoin(os.path.dirname(modelpath), name, "restrict_%s.dat" % restrict)): modelpath = pjoin(os.path.dirname(modelpath), mgcmd._curr_model["name"]) commandline = "import model %s " % modelpath mgcmd.exec_cmd(commandline) # 2. compute the production matrix element ----------------------------- processes = [line[9:].strip() for line in self.banner.proc_card if line.startswith("generate")] processes += [ " ".join(line.split()[2:]) for line in self.banner.proc_card if re.search("^\s*add\s+process", line) ] mgcmd.exec_cmd("set group_subprocesses False") logger.info("generating the square matrix element for reweighting") start = time.time() commandline = "" for proc in processes: if "[" not in proc: commandline += "add process %s ;" % proc else: raise self.InvalidCmd("NLO processes can't be reweight") commandline = commandline.replace("add process", "generate", 1) logger.info(commandline) mgcmd.exec_cmd(commandline, precmd=True) commandline = "output standalone_rw %s" % pjoin(path_me, "rw_me") mgcmd.exec_cmd(commandline, precmd=True) logger.info("Done %.4g" % (time.time() - start)) self.has_standalone_dir = True # 3. Store id to directory information --------------------------------- matrix_elements = mgcmd._curr_matrix_elements.get_matrix_elements() self.id_to_path = {} for me in matrix_elements: for proc in me.get("processes"): initial = [] # filled in the next line final = [l.get("id") for l in proc.get("legs") if l.get("state") or initial.append(l.get("id"))] order = (initial, final) tag = proc.get_initial_final_ids() decay_finals = proc.get_final_ids_after_decay() if tag[1] != decay_finals: order = (initial, list(decay_finals)) decay_finals.sort() tag = (tag[0], tuple(decay_finals)) Pdir = pjoin(path_me, "rw_me", "SubProcesses", "P%s" % me.get("processes")[0].shell_string()) assert os.path.exists(Pdir) if tag in self.id_to_path: if not Pdir == self.id_to_path[tag][1]: misc.sprint(tag, Pdir, self.id_to_path[tag][1]) raise self.InvalidCmd, "2 different process have the same final states. This module can not handle such situation" else: continue self.id_to_path[tag] = [order, Pdir]