def __len__(self): if self.file.closed: return 0 if hasattr(self, "len"): return self.len self.seek(0) nb_event = 0 with misc.TMP_variable(self, 'parsing', False): for _ in self: nb_event += 1 self.len = nb_event self.seek(0) return self.len
def load_model(name, decay=False): # avoid final '/' in the path if name.endswith('/'): name = name[:-1] path_split = name.split(os.sep) if len(path_split) == 1: try: model_pos = 'models.%s' % name __import__(model_pos) return sys.modules[model_pos] except Exception: pass for p in os.environ['PYTHONPATH']: new_name = os.path.join(p, name) try: return load_model(new_name, decay) except Exception: pass elif path_split[-1] in sys.modules: model_path = os.path.realpath(os.sep.join(path_split)) sys_path = os.path.realpath( os.path.dirname(sys.modules[path_split[-1]].__file__)) if sys_path != model_path: raise Exception, 'name %s already consider as a python library cann\'t be reassigned(%s!=%s)' % \ (path_split[-1], model_path, sys_path) with misc.TMP_variable(sys, 'path', [os.sep.join(path_split[:-1])]): __import__(path_split[-1]) output = sys.modules[path_split[-1]] if decay: dec_name = '%s.decays' % path_split[-1] try: __import__(dec_name) except ImportError: pass else: output.all_decays = sys.modules[dec_name].all_decays return sys.modules[path_split[-1]]
def find_class(self, module, name): """Find the correct path for the given function. Due to ME call via MG some libraries might be messed up on the pickle This routine helps to find back which one we need. """ # A bit of an ugly hack, but it works and has no side effect. if module == 'loop_me_comparator': module = 'tests.parallel_tests.loop_me_comparator' import sys try: import madgraph.various.misc as misc except ImportError: import internal.misc as misc with misc.TMP_variable(sys, 'path', sys.path + [self.basemod]): try: return pickle.Unpickler.find_class(self, module, name) except ImportError as error: pass lerror = None for prefix in ['internal.%s', 'madgraph.iolibs.%s', 'madgraph.madevent.%s', 'madgraph.various.%s', 'internal.ufomodel.%s']: if '.' in module: newmodule = prefix % module.rsplit('.',1)[1] else: newmodule = prefix % module try: return pickle.Unpickler.find_class(self, newmodule , name) except Exception as error: lerror = error pass else: raise lerror
def load_model(name, decay=False): # avoid final '/' in the path if name.endswith('/'): name = name[:-1] path_split = name.split(os.sep) if len(path_split) == 1: try: with misc.TMP_variable(sys, 'path', [ pjoin(MG5DIR, 'models'), pjoin(MG5DIR, 'models', name), MG5DIR ]): model_pos = 'models.%s' % name __import__(model_pos) return sys.modules[model_pos] except Exception as error: pass if 'PYTHONPATH' in os.environ: for p in os.environ['PYTHONPATH'].split(':'): new_name = os.path.join(p, name) try: return load_model(new_name, decay) except Exception: pass except ImportError: pass elif path_split[-1] in sys.modules: model_path = os.path.realpath(os.sep.join(path_split)) sys_path = os.path.realpath( os.path.dirname(sys.modules[path_split[-1]].__file__)) if sys_path != model_path: raise Exception('name %s already consider as a python library cann\'t be reassigned(%s!=%s)' % \ (path_split[-1], model_path, sys_path)) # remove any link to previous model for name in [ 'particles', 'object_library', 'couplings', 'function_library', 'lorentz', 'parameters', 'vertices', 'coupling_orders', 'write_param_card', 'CT_couplings', 'CT_vertices', 'CT_parameters' ]: try: del sys.modules[name] except Exception: continue with misc.TMP_variable( sys, 'path', [os.sep.join(path_split[:-1]), os.sep.join(path_split)]): try: __import__(path_split[-1]) except Exception as error: if six.PY3: logger.critical( 'It is likely that your UFO model is NOT python3 compatible.\n Most common issue with python2/3 compatibility can be solve with the "convert model" command of MG5aMC.' ) logger.warning( 'If you want to try that automatic conversion please run:') logger.warning('convert model %s' % '/'.join(path_split)) raise output = sys.modules[path_split[-1]] if decay: dec_name = '%s.decays' % path_split[-1] try: __import__(dec_name) except ImportError: pass else: output.all_decays = sys.modules[dec_name].all_decays return sys.modules[path_split[-1]]
def __init__(self, fksmulti, loop_optimized=False, gen_color=True, decay_ids=[]): """Initialization from a FKSMultiProcess""" #swhich the other loggers off loggers_off = [ logging.getLogger('madgraph.diagram_generation'), logging.getLogger('madgraph.helas_objects') ] old_levels = [logg.level for logg in loggers_off] for logg in loggers_off: logg.setLevel(logging.WARNING) self.loop_optimized = loop_optimized self['used_lorentz'] = [] self['used_couplings'] = [] self['processes'] = [] self['max_particles'] = -1 self['max_configs'] = -1 if not fksmulti['ncores_for_proc_gen']: # generate the real ME's if they are needed. # note that it may not be always the case, e.g. it the NLO_mode is LOonly if fksmulti['real_amplitudes']: logger.info('Generating real emission matrix-elements...') self['real_matrix_elements'] = self.generate_matrix_elements( copy.copy(fksmulti['real_amplitudes']), combine_matrix_elements=False) else: self[ 'real_matrix_elements'] = helas_objects.HelasMatrixElementList( ) self['matrix_elements'] = self.generate_matrix_elements_fks( fksmulti, gen_color, decay_ids) self['initial_states'] = [] self['has_loops'] = len(self.get_virt_matrix_elements()) > 0 else: self['has_loops'] = False #more efficient generation born_procs = fksmulti.get('born_processes') born_pdg_list = [[l['id'] for l in born.born_proc['legs']] \ for born in born_procs ] loop_orders = {} for born in born_procs: for coup, val in fks_common.find_orders(born.born_amp).items(): try: loop_orders[coup] = max([loop_orders[coup], val]) except KeyError: loop_orders[coup] = val pdg_list = [] real_amp_list = [] for born in born_procs: for amp in born.real_amps: if not pdg_list.count(amp.pdgs): pdg_list.append(amp.pdgs) real_amp_list.append(amp) #generating and store in tmp files all output corresponding to each real_amplitude real_out_list = [] realmapin = [] for i, real_amp in enumerate(real_amp_list): realmapin.append([i, real_amp]) # start the pool instance with a signal instance to catch ctr+c original_sigint_handler = signal.signal(signal.SIGINT, signal.SIG_IGN) if fksmulti['ncores_for_proc_gen'] < 0: # use all cores pool = multiprocessing.Pool(maxtasksperchild=1) else: pool = multiprocessing.Pool( processes=fksmulti['ncores_for_proc_gen'], maxtasksperchild=1) signal.signal(signal.SIGINT, original_sigint_handler) logger.info('Generating real matrix elements...') import time try: # the very large timeout passed to get is to be able to catch # KeyboardInterrupts modelpath = born_procs[0].born_proc['model'].get('modelpath') #model = self.get('processes')[0].get('model') with misc.TMP_variable( sys, 'path', sys.path + [pjoin(MG5DIR, 'models'), modelpath]): realmapout = pool.map_async(async_generate_real, realmapin).get(9999999) except KeyboardInterrupt: pool.terminate() raise KeyboardInterrupt realmapfiles = [] for realout in realmapout: realmapfiles.append(realout[0]) logger.info('Generating born and virtual matrix elements...') #now loop over born and consume reals, generate virtuals bornmapin = [] OLP = fksmulti['OLP'] for i, born in enumerate(born_procs): bornmapin.append([ i, born, born_pdg_list, loop_orders, pdg_list, loop_optimized, OLP, realmapfiles ]) try: bornmapout = pool.map_async(async_generate_born, bornmapin).get(9999999) except KeyboardInterrupt: pool.terminate() raise KeyboardInterrupt #remove real temp files for realtmp in realmapout: os.remove(realtmp[0]) logger.info('Collecting infos and finalizing matrix elements...') unique_me_list = [] duplicate_me_lists = [] for bornout in bornmapout: mefile = bornout[0] metag = bornout[1] has_loops = bornout[2] self['has_loops'] = self['has_loops'] or has_loops processes = bornout[3] self['processes'].extend(processes) self['max_particles'] = max( [self['max_configs']] + [len(p['legs']) + 1 for p in bornout[3]]) self['max_configs'] = max(self['max_configs'], bornout[4]) unique = True for ime2, bornout2 in enumerate(unique_me_list): mefile2 = bornout2[0] metag2 = bornout2[1] if metag == metag2: duplicate_me_lists[ime2].append(mefile) unique = False break if unique: unique_me_list.append(bornout) duplicate_me_lists.append([]) memapin = [] for i, bornout in enumerate(unique_me_list): mefile = bornout[0] memapin.append([i, mefile, duplicate_me_lists[i]]) try: memapout = pool.map_async(async_finalize_matrix_elements, memapin).get(9999999) except KeyboardInterrupt: pool.terminate() raise KeyboardInterrupt #remove born+virtual temp files for bornout in bornmapout: mefile = bornout[0] os.remove(mefile) pool.close() pool.join() #set final list of matrix elements (paths to temp files) matrix_elements = [] for meout in memapout: matrix_elements.append(meout[0]) self['matrix_elements'] = matrix_elements #cache information needed for output which will not be available from #the matrix elements later initial_states = [] for meout in memapout: me_initial_states = meout[1] for state in me_initial_states: initial_states.append(state) # remove doubles from the list checked = [] for e in initial_states: if e not in checked: checked.append(e) initial_states = checked self['initial_states'] = initial_states helas_list = [] for meout in memapout: helas_list.extend(meout[2]) self['used_lorentz'] = list(set(helas_list)) coupling_list = [] for meout in memapout: coupling_list.extend([c for l in meout[3] for c in l]) self['used_couplings'] = list(set(coupling_list)) has_virtuals = False for meout in memapout: if meout[4]: has_virtuals = True break self['has_virtuals'] = has_virtuals configs_list = [self['max_configs']] for meout in realmapout: configs_list.append(meout[1]) self['max_configs'] = max(configs_list) nparticles_list = [self['max_particles']] for meout in realmapout: nparticles_list.append(meout[2]) self['max_particles'] = max(nparticles_list) self['has_isr'] = fksmulti['has_isr'] self['has_fsr'] = fksmulti['has_fsr'] logger.info('... Done') for i, logg in enumerate(loggers_off): logg.setLevel(old_levels[i])