def prepare_run(self):
        """ ask for pythia-pgs/delphes run """

        # Find all main_model_process.cc files
        date_file_list = []
        for file in misc.glob('main_*_*.cc', self.running_dir):
            # retrieves the stats for the current file as a tuple
            # (mode, ino, dev, nlink, uid, gid, size, atime, mtime, ctime)
            # the tuple element mtime at index 8 is the last-modified-date
            stats = os.stat(file)
            # create tuple (year yyyy, month(1-12), day(1-31), hour(0-23), minute(0-59), second(0-59),
            # weekday(0-6, 0 is monday), Julian day(1-366), daylight flag(-1,0 or 1)) from seconds since epoch
            # note:  this tuple can be sorted properly by date and time
            lastmod_date = time.localtime(stats[8])
            date_file_list.append((lastmod_date, os.path.split(file)[-1]))

        if not date_file_list:
            raise MadGraph5Error, 'No Pythia output found'
        # Sort files according to date with newest first
        date_file_list.sort()
        date_file_list.reverse()
        files = [d[1] for d in date_file_list]
        
        answer = ''
        answer = self.ask('Select a main file to run:', files[0], files)

        self.cards.append(answer)
    
        self.executable = self.cards[-1].replace(".cc","")

        # Assign a valid run name if not put in options
        if self.name == '':
            for i in range(1000):
                path = os.path.join(self.running_dir, '',
                                    '%s_%02i.log' % (self.executable, i))
                if not os.path.exists(path):
                    self.name = '%s_%02i.log' % (self.executable, i)
                    break
        
        if self.name == '':
            raise MadGraph5Error, 'too many runs in this directory'

        # Find all exported models
        models = misc.glob("Processes_*", pjoin(self.running_dir,os.path.pardir))
        models = [os.path.split(m)[-1].replace("Processes_","") for m in models]
        # Extract model name from executable
        models.sort(key=len)
        models.reverse()
        model_dir = ""
        for model in models:
            if self.executable.replace("main_", "").startswith(model):
                model_dir = "Processes_%s" % model
                break
        if model_dir:
            self.model = model
            self.model_dir = os.path.realpath(os.path.join(self.running_dir,
                                                           os.path.pardir,
                                                           model_dir))
            self.cards.append(os.path.join(self.model_dir,
                                           "param_card_%s.dat" % model))
예제 #2
0
    def test_short_OfflineHEPToolsInstaller(self):
        """ Test whether the current OfflineHEPToolsInstaller is up to date."""

        with misc.TMP_directory() as tmp_path:
            subprocess.call(
                'bzr branch lp:~maddevelopers/mg5amcnlo/HEPToolsInstallers BZR_VERSION',
                cwd=tmp_path,
                shell=True)
            #                shutil.copy(pjoin(MG5DIR,'vendor','OfflineHEPToolsInstaller.tar.gz'),
            #                            pjoin(tmp_path,'OfflineHEPToolsInstaller.tar.gz'))
            #                subprocess.call('tar -xzf OfflineHEPToolsInstaller.tar.gz', cwd=tmp_path, shell=True)
            #                shutil.move(pjoin(tmp_path,'HEPToolsInstallers'),pjoin(tmp_path,'OFFLINE_VERSION'))
            online_path = dict(
                tuple(line.decode().split()[:2])
                for line in six.moves.urllib.request.urlopen(
                    'http://madgraph.phys.ucl.ac.be/package_info.dat')
            )['HEPToolsInstaller']
            subprocess.call(
                'tar -xzf %s' %
                TestMG5aMCDistribution.get_data(online_path, tmp_path),
                cwd=tmp_path,
                shell=True)
            shutil.move(pjoin(tmp_path, 'HEPToolsInstallers'),
                        pjoin(tmp_path, 'ONLINE_VERSION_UCL'))
            online_path = dict(
                tuple(line.decode().split()[:2])
                for line in six.moves.urllib.request.urlopen(
                    'http://madgraph.physics.illinois.edu/package_info.dat')
            )['HEPToolsInstaller']
            subprocess.call(
                'tar -xzf %s' %
                TestMG5aMCDistribution.get_data(online_path, tmp_path),
                cwd=tmp_path,
                shell=True)
            shutil.move(pjoin(tmp_path, 'HEPToolsInstallers'),
                        pjoin(tmp_path, 'ONLINE_VERSION_UIUC'))
            for path in misc.glob(pjoin('BZR_VERSION', '*'), tmp_path):
                if os.path.basename(path) == '.bzr':
                    continue
                file_name = os.path.basename(path)
                #                    for comparison in ['OFFLINE_VERSION','ONLINE_VERSION_UCL','ONLINE_VERSION_UIUC']:
                for comparison in [
                        'ONLINE_VERSION_UCL', 'ONLINE_VERSION_UIUC'
                ]:
                    #                        misc.sprint('Testing %s in %s vs %s.'%(file_name,'BZR_VERSION',comparison))
                    diff = subprocess.Popen(
                        'diff %s %s' %
                        (path, pjoin(tmp_path, comparison, file_name)),
                        cwd=tmp_path,
                        shell=True,
                        stdout=subprocess.PIPE,
                        stderr=subprocess.PIPE)
                    diff = diff.communicate()[0].decode()
                    self.assertEqual(
                        diff, '',
                        'Comparison of HEPToolsInstallers | %s vs %s | %s failed.\n'
                        % ('BZR_VERSION', comparison, file_name) +
                        "Consider updating MG servers and '%s'." %
                        pjoin(MG5DIR, 'vendor',
                              'OfflineHEPToolsInstaller.tar.gz'))
예제 #3
0
# Detect the total number of event in an efficient way
n_evts = int(
    subprocess.Popen('grep -rin "<event>" %s | wc -l' %
                     os.path.realpath(eventFile),
                     stdout=subprocess.PIPE,
                     shell=True).communicate()[0])

logging.info("Writing out the reweighted event file on\n   %s" % outPath)
# Write out the output event file
outputEvtFile = open(outPath, 'w')
outputEvtFile.write(evtFile.banner)

# List the channel present in the process output
channel_list = [ os.path.basename(chan) for chan in \
                 misc.glob('P*', pjoin(proc_path, 'SubProcesses')) ]

# Now scan over events
# For each encountered channel, store the corresponding process runner
encountered_channels = dict([])

t_before = time.time()
i_evt = 0
wgt_summed = 0.0
squared_wgt_summed = 0.0
running_spread = 0.0
max_wgt = 0.0
n_percent_monitor = 1
for event in evtFile:
    i_evt = i_evt + 1
    t_evt_start = time.time()
예제 #4
0
    def do_collect(self, line):
        """MadWeight Function: making the collect of the results"""
        
        self.configure()
        args = self.split_arg(line)
        self.check_collect(args)
        xml_reader = MWParserXML()
        
        name = self.MWparam.name
        # 1. Concatanate the file. #############################################
        out_dir = pjoin(self.me_dir, 'Events', name)
        if '-refine' in args:
            out_path = pjoin(out_dir, 'refine.xml') 
        else:
            out_path = pjoin(out_dir, 'output.xml')
            if os.path.exists(out_path):
                logger.warning('Output file already exists. Current one will be tagged with _old suffix')
                logger.warning('Run "collect -refine to instead update your current results."')
                files.mv(pjoin(out_dir, 'output.xml'), pjoin(out_dir, 'output_old.xml'))
                files.mv(pjoin(out_dir, 'weights.out'), pjoin(out_dir, 'weights.out'))
                for MWdir in self.MWparam.MW_listdir:
                    out_dir = pjoin(self.me_dir, 'Events', name, MWdir)
                    files.mv(pjoin(out_dir, 'output.xml'), pjoin(out_dir, 'output_old.xml'))
                out_dir = pjoin(self.me_dir, 'Events', name)
                    
        fsock = open(out_path, 'w')
        fsock.write('<madweight>\n<banner>\n')
        # BANNER
        for card in ['proc_card_mg5.dat','MadWeight_card.dat','transfer_card.dat','param_card.dat','run_card.dat']:
            cname = card[:-4]
            fsock.write('<%s>\n' % cname)
            fsock.write(open(pjoin(self.me_dir,'Cards',card)).read().replace('<','!>'))
            fsock.write('</%s>\n' % cname)
        fsock.write('</banner>\n')
        at_least_one = False
        for MWdir in self.MWparam.MW_listdir:
            out_dir = pjoin(self.me_dir, 'Events', name, MWdir)
            input_dir = pjoin(self.me_dir, 'SubProcesses', MWdir, name)
            if not os.path.exists(out_dir):
                os.mkdir(out_dir)
            if '-refine' in args:
                out_path = pjoin(out_dir, 'refine.xml') 
            else:
                out_path = pjoin(out_dir, 'output.xml')  
            fsock2 = open(out_path,'w')
            fsock.write('<subprocess id=\'%s\'>\n' % MWdir)
            fsock2.write('<subprocess id=\'%s\'>\n' % MWdir)
            for output in misc.glob('output_*_*.xml', input_dir):
                at_least_one = True
                text = open(output).read()
                fsock2.write(text)
                fsock.write(text)
                os.remove(output)
            fsock.write('</subprocess>\n')
            fsock2.write('</subprocess>\n')
            fsock2.close()
        fsock.write('\n</madweight>\n')          
        fsock.close()
        # 2. Special treatment for refine mode
        if '-refine' in args:
            xml_reader2 = MWParserXML(self.MWparam['mw_run']['log_level'])
            for MWdir in self.MWparam.MW_listdir:
                out_dir = pjoin(self.me_dir, 'Events',name, MWdir)
                ref_output = xml_reader2.read_file(pjoin(out_dir, 'refine.xml'))
                xml_reader2 = MWParserXML(self.MWparam['mw_run']['log_level'])
                base_output = xml_reader2.read_file(pjoin(out_dir, 'output.xml'))

                base_output.refine(ref_output)
                files.mv(pjoin(out_dir, 'output.xml'), pjoin(out_dir, 'output_old.xml'))
                base_output.write(pjoin(out_dir, 'output.xml'), MWdir)
        elif not at_least_one:
            logger.warning("Nothing to collect restore _old file as current.")
            out_dir = pjoin(self.me_dir, 'Events', name)
            files.mv(pjoin(out_dir, 'output_old.xml'), pjoin(out_dir, 'output.xml'))
            files.mv(pjoin(out_dir, 'weights_old.out'), pjoin(out_dir, 'weights.out'))
            for MWdir in self.MWparam.MW_listdir:
                out_dir = pjoin(self.me_dir, 'Events', name, MWdir)
                files.mv(pjoin(out_dir, 'output.xml'), pjoin(out_dir, 'output_old.xml'))
            
            
            
        # 3. Read the (final) log file for extracting data
        total = {}
        likelihood = {}
        err_likelihood = {}
        cards = set()
        events = set()
        tf_sets = set()
        for MW_dir in self.MWparam.MW_listdir:
            out_dir = pjoin(self.me_dir, 'Events', name, MW_dir)
            xml_reader = MWParserXML()
            data = xml_reader.read_file(pjoin(out_dir, 'output.xml'))
            #
            log_level = self.MWparam['mw_run']['log_level']            
            generator =  ((int(i),j,int(k),data[i][j][k]) for i in data 
                                                               for j in data[i] 
                                                               for k in data[i][j])
            for card, event, tf_set, obj in generator:
                # update the full list of events/cards
                cards.add(card)
                events.add(event)
                tf_sets.add(tf_set)
                # now compute the associate value, error[square]
                if (card,event, tf_set) in total:
                    value, error = total[(card, event, tf_set)]                    
                else:
                    value, error = 0, 0
                obj.calculate_total()
                value, error = (value + obj.value, error + obj.error**2) 
                total[(card, event, tf_set)] = (value, error)
                if tf_set == 1:
                    if value:
                        if card not in likelihood:
                            likelihood[card], err_likelihood[card] = 0, 0
                        likelihood[card] -= math.log(value)
                        err_likelihood[card] += error / value
                    else:
                        likelihood[card] = float('Inf')
                        err_likelihood[card] = float('nan')

                
        # write the weights file:
        fsock = open(pjoin(self.me_dir, 'Events', name, 'weights.out'), 'w')
        logger.info('Write output file with weight information: %s' % fsock.name)
        fsock.write('# Weight (un-normalize) for each card/event/set of transfer fct\n')
        fsock.write('# format: LHCO_event_number card_id tf_set_id value integration_error\n')
        events = list(events)
        events.sort()
        cards = list(cards)
        cards.sort()
        tf_sets = list(tf_sets)
        tf_sets.sort()
        for event in events:
            for card in cards:
                for tf_set in tf_sets:
                    try:
                        value, error = total[(card, event,tf_set)]
                    except KeyError:
                        continue
                    error = math.sqrt(error)
                    fsock.write('%s %s %s %s %s \n' % (event.replace('@', ' '), card, tf_set, value, error))
    
        # write the likelihood file:
        fsock = open(pjoin(self.me_dir, 'Events', name, 'un-normalized_likelihood.out'), 'w')
        fsock.write('# Warning:  this Likelihood needs a bin by bin normalization !\n')
        fsock.write('# IF more than one set of transfer function are define. ONLY the first one is ')
        fsock.write('# include in this file.')
        fsock.write('# format: card_id value integration_error\n')
        for card in cards:
            value, error = likelihood[card], err_likelihood[card]
            error = math.sqrt(error)
            fsock.write('%s %s %s \n' % (card, value, error))
예제 #5
0
    def run_bridge(self, line):
        """Run the Bridge Algorithm"""

        # 1. Read the event file to check which decay to perform and the number
        #   of event to generate for each type of particle.
        # 2. Generate the events requested
        # 3. perform the merge of the events.
        #    if not enough events. re-generate the missing one.

        # First define an utility function for generating events when needed
        def generate_events(pdg,
                            nb_event,
                            mg5,
                            restrict_file=None,
                            cumul=False):
            """generate new events for this particle
               restrict_file allow to only generate a subset of the definition
               cumul allow to merge all the definition in one run (add process)
                     to generate events according to cross-section
            """

            part = self.model.get_particle(pdg)
            name = part.get_name()
            out = {}
            logger.info("generate %s decay event for particle %s" %
                        (nb_event, name))
            if name not in self.list_branches:
                return out
            for i, proc in enumerate(self.list_branches[name]):
                if restrict_file and i not in restrict_file:
                    continue
                decay_dir = pjoin(
                    self.path_me,
                    "decay_%s_%s" % (str(pdg).replace("-", "x"), i))
                if not os.path.exists(decay_dir):
                    if cumul:
                        mg5.exec_cmd("generate %s" % proc)
                        for j, proc2 in enumerate(
                                self.list_branches[name][1:]):
                            if restrict_file and j not in restrict_file:
                                raise Exception  # Do not see how this can happen
                            mg5.exec_cmd("add process %s" % proc2)
                        mg5.exec_cmd("output %s -f" % decay_dir)
                    else:
                        mg5.exec_cmd("generate %s" % proc)
                        mg5.exec_cmd("output %s -f" % decay_dir)

                    options = dict(mg5.options)
                    if self.options['ms_dir']:
                        misc.sprint("start gridpack!")
                        # we are in gridpack mode -> create it
                        me5_cmd = madevent_interface.MadEventCmdShell(me_dir=os.path.realpath(\
                                                decay_dir), options=options)
                        me5_cmd.options["automatic_html_opening"] = False
                        if self.options["run_card"]:
                            run_card = self.options["run_card"]
                        else:
                            run_card = banner.RunCard(
                                pjoin(decay_dir, "Cards", "run_card.dat"))

                        run_card["iseed"] = self.seed
                        run_card['gridpack'] = True
                        run_card.write(
                            pjoin(decay_dir, "Cards", "run_card.dat"))
                        param_card = self.banner['slha']
                        open(pjoin(decay_dir, "Cards", "param_card.dat"),
                             "w").write(param_card)
                        self.seed += 1
                        # actually creation
                        me5_cmd.exec_cmd("generate_events run_01 -f")
                        me5_cmd.exec_cmd("exit")
                        #remove pointless informat
                        misc.call(
                            ["rm", "Cards", "bin", 'Source', 'SubProcesses'],
                            cwd=decay_dir)
                        misc.call(['tar', '-xzpvf', 'run_01_gridpack.tar.gz'],
                                  cwd=decay_dir)

                # Now generate the events

                if not self.options['ms_dir']:
                    me5_cmd = madevent_interface.MadEventCmdShell(me_dir=os.path.realpath(\
                                                    decay_dir), options=mg5.options)
                    me5_cmd.options["automatic_html_opening"] = False
                    if self.options["run_card"]:
                        run_card = self.options["run_card"]
                    else:
                        run_card = banner.RunCard(
                            pjoin(decay_dir, "Cards", "run_card.dat"))
                    run_card["nevents"] = int(1.2 * nb_event)
                    run_card["iseed"] = self.seed
                    run_card.write(pjoin(decay_dir, "Cards", "run_card.dat"))
                    param_card = self.banner['slha']
                    open(pjoin(decay_dir, "Cards", "param_card.dat"),
                         "w").write(param_card)
                    self.seed += 1
                    me5_cmd.exec_cmd("generate_events run_01 -f")
                    me5_cmd.exec_cmd("exit")
                    out[i] = lhe_parser.EventFile(
                        pjoin(decay_dir, "Events", 'run_01',
                              'unweighted_events.lhe.gz'))
                else:
                    misc.call(
                        ['run.sh',
                         str(int(1.2 * nb_event)),
                         str(self.seed)],
                        cwd=decay_dir)
                    out[i] = lhe_parser.EventFile(
                        pjoin(decay_dir, 'events.lhe.gz'))
                if cumul:
                    break

            return out

        args = self.split_arg(line)

        #0. Define the path where to write the file
        self.path_me = os.path.realpath(self.options['curr_dir'])
        if self.options['ms_dir']:
            self.path_me = os.path.realpath(self.options['ms_dir'])
            if not os.path.exists(self.path_me):
                os.mkdir(self.path_me)
        else:
            # cleaning
            for name in misc.glob("decay_*_*", self.path_me):
                shutil.rmtree(name)

        self.events_file.close()
        orig_lhe = lhe_parser.EventFile(self.events_file.name)

        to_decay = collections.defaultdict(int)
        nb_event = 0
        for event in orig_lhe:
            nb_event += 1
            for particle in event:
                if particle.status == 1 and particle.pdg in self.final_state:
                    # final state and tag as to decay
                    to_decay[particle.pdg] += 1

        # Handle the banner of the output file
        if not self.seed:
            self.seed = random.randint(0, int(30081 * 30081))
            self.do_set('seed %s' % self.seed)
            logger.info('Will use seed %s' % self.seed)
            self.history.insert(0, 'set seed %s' % self.seed)

        if self.seed > 30081 * 30081:  # can't use too big random number
            msg = 'Random seed too large ' + str(self.seed) + ' > 30081*30081'
            raise Exception, msg

        self.options['seed'] = self.seed

        text = '%s\n' % '\n'.join([line for line in self.history if line])
        self.banner.add_text('madspin', text)

        # 2. Generate the events requested
        with misc.MuteLogger(["madgraph", "madevent", "ALOHA", "cmdprint"],
                             [50, 50, 50, 50]):
            mg5 = self.mg5cmd
            modelpath = self.model.get('modelpath+restriction')
            mg5.exec_cmd("import model %s" % modelpath)
            to_event = {}
            for pdg, nb_needed in to_decay.items():
                #check if a splitting is needed
                if nb_needed == nb_event:
                    to_event[pdg] = generate_events(pdg, nb_needed, mg5)
                elif nb_needed % nb_event == 0:
                    nb_mult = nb_needed // nb_event
                    part = self.model.get_particle(pdg)
                    name = part.get_name()
                    if name not in self.list_branches:
                        continue
                    elif len(self.list_branches[name]) == nb_mult:
                        to_event[pdg] = generate_events(pdg, nb_event, mg5)
                    else:
                        to_event[pdg] = generate_events(pdg,
                                                        nb_needed,
                                                        mg5,
                                                        cumul=True)
                else:
                    part = self.model.get_particle(pdg)
                    name = part.get_name()
                    if name not in self.list_branches or len(
                            self.list_branches[name]) == 0:
                        continue
                    raise self.InvalidCmd(
                        "The bridge mode of MadSpin does not support event files where events do not *all* share the same set of final state particles to be decayed."
                    )

        # Compute the branching ratio.
        br = 1
        for (pdg, event_files) in to_event.items():
            if not event_files:
                continue
            totwidth = float(self.banner.get('param', 'decay', abs(pdg)).value)
            if to_decay[pdg] == nb_event:
                # Exactly one particle of this type to decay by event
                pwidth = sum([event_files[k].cross for k in event_files])
                if pwidth > 1.01 * totwidth:
                    logger.critical("Branching ratio larger than one for %s " %
                                    pdg)
                br *= pwidth / totwidth
            elif to_decay[pdg] % nb_event == 0:
                # More than one particle of this type to decay by event
                # Need to check the number of event file to check if we have to
                # make separate type of decay or not.
                nb_mult = to_decay[pdg] // nb_event
                if nb_mult == len(event_files):
                    for k in event_files:
                        pwidth = event_files[k].cross
                        if pwidth > 1.01 * totwidth:
                            logger.critical(
                                "Branching ratio larger than one for %s " %
                                pdg)
                        br *= pwidth / totwidth
                    br *= math.factorial(nb_mult)
                else:
                    pwidth = sum(event_files[k].cross for k in event_files)
                    if pwidth > 1.01 * totwidth:
                        logger.critical(
                            "Branching ratio larger than one for %s " % pdg)
                    br *= (pwidth / totwidth)**nb_mult
            else:
                raise self.InvalidCmd(
                    "The bridge mode of MadSpin does not support event files where events do not *all* share the same set of final state particles to be decayed."
                )
        self.branching_ratio = br
        # modify the cross-section in the init block of the banner
        self.banner.scale_init_cross(self.branching_ratio)

        # 3. Merge the various file together.
        output_lhe = lhe_parser.EventFile(
            orig_lhe.name.replace('.lhe', '_decayed.lhe.gz'), 'w')
        self.banner.write(output_lhe, close_tag=False)

        # initialise object which store not use event due to wrong helicity
        bufferedEvents_decay = {}
        for pdg in to_event:
            bufferedEvents_decay[pdg] = [{}] * len(to_event[pdg])

        import time
        start = time.time()
        counter = 0
        orig_lhe.seek(0)
        for event in orig_lhe:
            if counter and counter % 1000 == 0 and float(
                    str(counter)[1:]) == 0:
                print "decaying event number %s [%s s]" % (counter,
                                                           time.time() - start)
            counter += 1

            # use random order for particles to avoid systematics when more than
            # one type of decay is asked.
            particles = [p for p in event if int(p.status) == 1.0]
            random.shuffle(particles)
            ids = [particle.pid for particle in particles]
            for i, particle in enumerate(particles):
                # check if we need to decay the particle
                if particle.pdg not in self.final_state or particle.pdg not in to_event:
                    continue  # nothing to do for this particle
                # check how the decay need to be done
                nb_decay = len(to_event[particle.pdg])
                if nb_decay == 0:
                    continue  #nothing to do for this particle
                if nb_decay == 1:
                    decay_file = to_event[particle.pdg][0]
                    decay_file_nb = 0
                elif ids.count(particle.pdg) == nb_decay:
                    decay_file = to_event[particle.pdg][ids[:i].count(
                        particle.pdg)]
                    decay_file_nb = ids[:i].count(particle.pdg)
                else:
                    #need to select the file according to the associate cross-section
                    r = random.random()
                    tot = sum(to_event[particle.pdg][key].cross
                              for key in to_event[particle.pdg])
                    r = r * tot
                    cumul = 0
                    for j, events in to_event[particle.pdg].items():
                        cumul += events.cross
                        if r < cumul:
                            decay_file = events
                            decay_file_nb = j
                        else:
                            break

                # ok start the procedure
                helicity = particle.helicity
                bufferedEvents = bufferedEvents_decay[
                    particle.pdg][decay_file_nb]

                # now that we have the file to read. find the associate event
                # checks if we have one event in memory
                if helicity in bufferedEvents and bufferedEvents[helicity]:
                    decay = bufferedEvents[helicity].pop()
                else:
                    # read the event file up to completion
                    while 1:
                        try:
                            decay = decay_file.next()
                        except StopIteration:
                            # check how far we are
                            ratio = counter / nb_event
                            needed = 1.05 * to_decay[
                                particle.pdg] / ratio - counter
                            needed = min(1000, max(needed, 1000))
                            with misc.MuteLogger(
                                ["madgraph", "madevent", "ALOHA", "cmdprint"],
                                [50, 50, 50, 50]):
                                new_file = generate_events(
                                    particle.pdg, needed, mg5, [decay_file_nb])
                            to_event[particle.pdg].update(new_file)
                            decay_file = to_event[particle.pdg][decay_file_nb]
                            continue
                        if helicity == decay[0].helicity or helicity==9 or \
                                            self.options["spinmode"] == "none":
                            break  # use that event
                        # not valid event store it for later
                        if helicity not in bufferedEvents:
                            bufferedEvents[helicity] = [decay]
                        elif len(bufferedEvents[helicity]) < 200:
                            # only add to the buffering if the buffer is not too large
                            bufferedEvents[helicity].append(decay)
                # now that we have the event make the merge
                particle.add_decay(decay)
            # change the weight associate to the event
            event.wgt *= self.branching_ratio
            wgts = event.parse_reweight()
            for key in wgts:
                wgts[key] *= self.branching_ratio
            # all particle have been decay if needed
            output_lhe.write(str(event))
        output_lhe.write('</LesHouchesEvent>\n')
예제 #6
0
    def run_from_pickle(self):
        import madgraph.iolibs.save_load_object as save_load_object

        generate_all = save_load_object.load_from_file(
            pjoin(self.options['ms_dir'], 'madspin.pkl'))
        # Re-create information which are not save in the pickle.
        generate_all.evtfile = self.events_file
        generate_all.curr_event = madspin.Event(self.events_file, self.banner)
        generate_all.mgcmd = self.mg5cmd
        generate_all.mscmd = self
        generate_all.pid2width = lambda pid: generate_all.banner.get(
            'param_card', 'decay', abs(pid)).value
        generate_all.pid2mass = lambda pid: generate_all.banner.get(
            'param_card', 'mass', abs(pid)).value
        if generate_all.path_me != self.options['ms_dir']:
            for decay in generate_all.all_ME.values():
                decay['path'] = decay['path'].replace(generate_all.path_me,
                                                      self.options['ms_dir'])
                for decay2 in decay['decays']:
                    decay2['path'] = decay2['path'].replace(
                        generate_all.path_me, self.options['ms_dir'])
            generate_all.path_me = self.options[
                'ms_dir']  # directory can have been move
            generate_all.ms_dir = generate_all.path_me

        if not hasattr(self.banner, 'param_card'):
            self.banner.charge_card('slha')

        # Special treatment for the mssm. Convert the param_card to the correct
        # format
        if self.banner.get('model').startswith('mssm-') or self.banner.get(
                'model') == 'mssm':
            self.banner.param_card = check_param_card.convert_to_mg5card(\
                    self.banner.param_card, writting=False)

        for name, block in self.banner.param_card.items():
            if name.startswith('decay'):
                continue

            orig_block = generate_all.banner.param_card[name]
            if block != orig_block:
                raise Exception, """The directory %s is specific to a mass spectrum. 
                Your event file is not compatible with this one. (Different param_card: %s different)
                orig block:
                %s
                new block:
                %s""" \
                % (self.options['ms_dir'], name, orig_block, block)

        #replace init information
        generate_all.banner['init'] = self.banner['init']

        #replace run card if present in header (to make sure correct random seed is recorded in output file)
        if 'mgruncard' in self.banner:
            generate_all.banner['mgruncard'] = self.banner['mgruncard']

        # NOW we have all the information available for RUNNING

        if self.seed:
            #seed is specified need to use that one:
            open(pjoin(self.options['ms_dir'], 'seeds.dat'),
                 'w').write('%s\n' % self.seed)
            #remove all ranmar_state
            for name in misc.glob(
                    pjoin('*', 'SubProcesses', '*', 'ranmar_state.dat'),
                    self.options['ms_dir']):
                os.remove(name)

        generate_all.ending_run()
        self.branching_ratio = generate_all.branching_ratio
        try:
            self.err_branching_ratio = generate_all.err_branching_ratio
        except Exception:
            # might not be define in some gridpack mode
            self.err_branching_ratio = 0
        evt_path = self.events_file.name
        try:
            self.events_file.close()
        except:
            pass
        misc.gzip(evt_path)
        decayed_evt_file = evt_path.replace('.lhe', '_decayed.lhe')
        misc.gzip(pjoin(self.options['curr_dir'], 'decayed_events.lhe'),
                  stdout=decayed_evt_file)
        if not self.mother:
            logger.info("Decayed events have been written in %s.gz" %
                        decayed_evt_file)
    def do_collect(self, line):
        """MadWeight Function: making the collect of the results"""
        
        self.configure()
        args = self.split_arg(line)
        self.check_collect(args)
        xml_reader = MWParserXML()
        
        name = self.MWparam.name
        # 1. Concatanate the file. #############################################
        out_dir = pjoin(self.me_dir, 'Events', name)
        if '-refine' in args:
            out_path = pjoin(out_dir, 'refine.xml') 
        else:
            out_path = pjoin(out_dir, 'output.xml')
            if os.path.exists(out_path):
                logger.warning('Output file already exists. Current one will be tagged with _old suffix')
                logger.warning('Run "collect -refine to instead update your current results."')
                files.mv(pjoin(out_dir, 'output.xml'), pjoin(out_dir, 'output_old.xml'))
                files.mv(pjoin(out_dir, 'weights.out'), pjoin(out_dir, 'weights.out'))
                for MWdir in self.MWparam.MW_listdir:
                    out_dir = pjoin(self.me_dir, 'Events', name, MWdir)
                    files.mv(pjoin(out_dir, 'output.xml'), pjoin(out_dir, 'output_old.xml'))
                out_dir = pjoin(self.me_dir, 'Events', name)
                    
        fsock = open(out_path, 'w')
        fsock.write('<madweight>\n<banner>\n')
        # BANNER
        for card in ['proc_card_mg5.dat','MadWeight_card.dat','transfer_card.dat','param_card.dat','run_card.dat']:
            cname = card[:-4]
            fsock.write('<%s>\n' % cname)
            fsock.write(open(pjoin(self.me_dir,'Cards',card)).read().replace('<','!>'))
            fsock.write('</%s>\n' % cname)
        fsock.write('</banner>\n')
        at_least_one = False
        for MWdir in self.MWparam.MW_listdir:
            out_dir = pjoin(self.me_dir, 'Events', name, MWdir)
            input_dir = pjoin(self.me_dir, 'SubProcesses', MWdir, name)
            if not os.path.exists(out_dir):
                os.mkdir(out_dir)
            if '-refine' in args:
                out_path = pjoin(out_dir, 'refine.xml') 
            else:
                out_path = pjoin(out_dir, 'output.xml')  
            fsock2 = open(out_path,'w')
            fsock.write('<subprocess id=\'%s\'>\n' % MWdir)
            fsock2.write('<subprocess id=\'%s\'>\n' % MWdir)
            for output in misc.glob('output_*_*.xml', input_dir):
                at_least_one = True
                text = open(output).read()
                fsock2.write(text)
                fsock.write(text)
                os.remove(output)
            fsock.write('</subprocess>\n')
            fsock2.write('</subprocess>\n')
            fsock2.close()
        fsock.write('\n</madweight>\n')          
        fsock.close()
        # 2. Special treatment for refine mode
        if '-refine' in args:
            xml_reader2 = MWParserXML(self.MWparam['mw_run']['log_level'])
            for MWdir in self.MWparam.MW_listdir:
                out_dir = pjoin(self.me_dir, 'Events',name, MWdir)
                ref_output = xml_reader2.read_file(pjoin(out_dir, 'refine.xml'))
                xml_reader2 = MWParserXML(self.MWparam['mw_run']['log_level'])
                base_output = xml_reader2.read_file(pjoin(out_dir, 'output.xml'))

                base_output.refine(ref_output)
                files.mv(pjoin(out_dir, 'output.xml'), pjoin(out_dir, 'output_old.xml'))
                base_output.write(pjoin(out_dir, 'output.xml'), MWdir)
        elif not at_least_one:
            logger.warning("Nothing to collect restore _old file as current.")
            out_dir = pjoin(self.me_dir, 'Events', name)
            files.mv(pjoin(out_dir, 'output_old.xml'), pjoin(out_dir, 'output.xml'))
            files.mv(pjoin(out_dir, 'weights_old.out'), pjoin(out_dir, 'weights.out'))
            for MWdir in self.MWparam.MW_listdir:
                out_dir = pjoin(self.me_dir, 'Events', name, MWdir)
                files.mv(pjoin(out_dir, 'output.xml'), pjoin(out_dir, 'output_old.xml'))
            
            
            
        # 3. Read the (final) log file for extracting data
        total = {}
        likelihood = {}
        err_likelihood = {}
        cards = set()
        events = set()
        tf_sets = set()
        for MW_dir in self.MWparam.MW_listdir:
            out_dir = pjoin(self.me_dir, 'Events', name, MW_dir)
            xml_reader = MWParserXML()
            data = xml_reader.read_file(pjoin(out_dir, 'output.xml'))
            #
            log_level = self.MWparam['mw_run']['log_level']            
            generator =  ((int(i),j,int(k),data[i][j][k]) for i in data 
                                                               for j in data[i] 
                                                               for k in data[i][j])
            for card, event, tf_set, obj in generator:
                # update the full list of events/cards
                cards.add(card)
                events.add(event)
                tf_sets.add(tf_set)
                # now compute the associate value, error[square]
                if (card,event, tf_set) in total:
                    value, error = total[(card, event, tf_set)]                    
                else:
                    value, error = 0, 0
                obj.calculate_total()
                value, error = (value + obj.value, error + obj.error**2) 
                total[(card, event, tf_set)] = (value, error)
                if tf_set == 1:
                    if value:
                        if card not in likelihood:
                            likelihood[card], err_likelihood[card] = 0, 0
                        likelihood[card] -= math.log(value)
                        err_likelihood[card] += error / value
                    else:
                        likelihood[card] = float('Inf')
                        err_likelihood[card] = float('nan')

                
        # write the weights file:
        fsock = open(pjoin(self.me_dir, 'Events', name, 'weights.out'), 'w')
        logger.info('Write output file with weight information: %s' % fsock.name)
        fsock.write('# Weight (un-normalize) for each card/event/set of transfer fct\n')
        fsock.write('# format: LHCO_event_number card_id tf_set_id value integration_error\n')
        events = list(events)
        events.sort()
        cards = list(cards)
        cards.sort()
        tf_sets = list(tf_sets)
        tf_sets.sort()
        for event in events:
            for card in cards:
                for tf_set in tf_sets:
                    try:
                        value, error = total[(card, event,tf_set)]
                    except KeyError:
                        continue
                    error = math.sqrt(error)
                    fsock.write('%s %s %s %s %s \n' % (event.replace('@', ' '), card, tf_set, value, error))
    
        # write the likelihood file:
        fsock = open(pjoin(self.me_dir, 'Events', name, 'un-normalized_likelihood.out'), 'w')
        fsock.write('# Warning:  this Likelihood needs a bin by bin normalization !\n')
        fsock.write('# IF more than one set of transfer function are define. ONLY the first one is ')
        fsock.write('# include in this file.')
        fsock.write('# format: card_id value integration_error\n')
        for card in cards:
            value, error = likelihood[card], err_likelihood[card]
            error = math.sqrt(error)
            fsock.write('%s %s %s \n' % (card, value, error))
    def run_bridge(self, line):
        """Run the Bridge Algorithm"""
        
        # 1. Read the event file to check which decay to perform and the number
        #   of event to generate for each type of particle.
        # 2. Generate the events requested
        # 3. perform the merge of the events.
        #    if not enough events. re-generate the missing one.
                
        # First define an utility function for generating events when needed
        def generate_events(pdg, nb_event, mg5, restrict_file=None, cumul=False):
            """generate new events for this particle
               restrict_file allow to only generate a subset of the definition
               cumul allow to merge all the definition in one run (add process)
                     to generate events according to cross-section
            """
                        
            part = self.model.get_particle(pdg)
            name = part.get_name()
            out = {}
            logger.info("generate %s decay event for particle %s" % (nb_event, name))
            if name not in self.list_branches:
                return out
            for i,proc in enumerate(self.list_branches[name]):
                if restrict_file and i not in restrict_file:
                    continue
                decay_dir = pjoin(self.path_me, "decay_%s_%s" %(str(pdg).replace("-","x"),i))
                if not os.path.exists(decay_dir):
                    if cumul:
                        mg5.exec_cmd("generate %s" % proc)
                        for j,proc2 in enumerate(self.list_branches[name][1:]):
                            if restrict_file and j not in restrict_file:
                                raise Exception # Do not see how this can happen
                            mg5.exec_cmd("add process %s" % proc2)
                        mg5.exec_cmd("output %s -f" % decay_dir)
                    else:
                        mg5.exec_cmd("generate %s" % proc)
                        mg5.exec_cmd("output %s -f" % decay_dir)
                    
                    options = dict(mg5.options)
                    if self.options['ms_dir']:
                        misc.sprint("start gridpack!")
                        # we are in gridpack mode -> create it
                        me5_cmd = madevent_interface.MadEventCmdShell(me_dir=os.path.realpath(\
                                                decay_dir), options=options)
                        me5_cmd.options["automatic_html_opening"] = False
                        if self.options["run_card"]:
                            run_card = self.options["run_card"]
                        else:
                            run_card = banner.RunCard(pjoin(decay_dir, "Cards", "run_card.dat"))                        
                        
                        run_card["iseed"] = self.seed
                        run_card['gridpack'] = True
                        run_card.write(pjoin(decay_dir, "Cards", "run_card.dat"))
                        param_card = self.banner['slha']
                        open(pjoin(decay_dir, "Cards", "param_card.dat"),"w").write(param_card)
                        self.seed += 1
                        # actually creation
                        me5_cmd.exec_cmd("generate_events run_01 -f")
                        me5_cmd.exec_cmd("exit")                        
                        #remove pointless informat
                        misc.call(["rm", "Cards", "bin", 'Source', 'SubProcesses'], cwd=decay_dir)
                        misc.call(['tar', '-xzpvf', 'run_01_gridpack.tar.gz'], cwd=decay_dir)
                
                # Now generate the events

                if not self.options['ms_dir']:
                    me5_cmd = madevent_interface.MadEventCmdShell(me_dir=os.path.realpath(\
                                                    decay_dir), options=mg5.options)
                    me5_cmd.options["automatic_html_opening"] = False
                    if self.options["run_card"]:
                        run_card = self.options["run_card"]
                    else:
                        run_card = banner.RunCard(pjoin(decay_dir, "Cards", "run_card.dat"))
                    run_card["nevents"] = int(1.2*nb_event)
                    run_card["iseed"] = self.seed
                    run_card.write(pjoin(decay_dir, "Cards", "run_card.dat"))
                    param_card = self.banner['slha']
                    open(pjoin(decay_dir, "Cards", "param_card.dat"),"w").write(param_card)
                    self.seed += 1
                    me5_cmd.exec_cmd("generate_events run_01 -f")
                    me5_cmd.exec_cmd("exit")
                    out[i] = lhe_parser.EventFile(pjoin(decay_dir, "Events", 'run_01', 'unweighted_events.lhe.gz'))            
                else:
                    misc.call(['run.sh', str(int(1.2*nb_event)), str(self.seed)], cwd=decay_dir)     
                    out[i] = lhe_parser.EventFile(pjoin(decay_dir, 'events.lhe.gz'))            
                if cumul:
                    break
            
            return out

        
        args = self.split_arg(line)

        #0. Define the path where to write the file
        self.path_me = os.path.realpath(self.options['curr_dir']) 
        if self.options['ms_dir']:
            self.path_me = os.path.realpath(self.options['ms_dir'])
            if not os.path.exists(self.path_me):
                os.mkdir(self.path_me) 
        else:
            # cleaning
            for name in misc.glob("decay_*_*", self.path_me):
                shutil.rmtree(name)

        self.events_file.close()
        orig_lhe = lhe_parser.EventFile(self.events_file.name)
        
        to_decay = collections.defaultdict(int)
        nb_event = 0
        for event in orig_lhe:
            nb_event +=1
            for particle in event:
                if particle.status == 1 and particle.pdg in self.final_state:
                    # final state and tag as to decay
                    to_decay[particle.pdg] += 1

        # Handle the banner of the output file
        if not self.seed:
            self.seed = random.randint(0, int(30081*30081))
            self.do_set('seed %s' % self.seed)
            logger.info('Will use seed %s' % self.seed)
            self.history.insert(0, 'set seed %s' % self.seed)

        if self.seed > 30081*30081: # can't use too big random number
            msg = 'Random seed too large ' + str(self.seed) + ' > 30081*30081'
            raise Exception, msg

        self.options['seed'] = self.seed
        
        text = '%s\n' % '\n'.join([ line for line in self.history if line])
        self.banner.add_text('madspin' , text)


        # 2. Generate the events requested
        with misc.MuteLogger(["madgraph", "madevent", "ALOHA", "cmdprint"], [50,50,50,50]):
            mg5 = self.mg5cmd
            modelpath = self.model.get('modelpath+restriction')
            mg5.exec_cmd("import model %s" % modelpath)      
            to_event = {}
            for pdg, nb_needed in to_decay.items():
                #check if a splitting is needed
                if nb_needed == nb_event:
                    to_event[pdg] = generate_events(pdg, nb_needed, mg5)
                elif nb_needed %  nb_event == 0:
                    nb_mult = nb_needed // nb_event
                    part = self.model.get_particle(pdg)
                    name = part.get_name()
                    if name not in self.list_branches:
                        continue
                    elif len(self.list_branches[name]) == nb_mult:
                        to_event[pdg] = generate_events(pdg, nb_event, mg5)
                    else:
                        to_event[pdg] = generate_events(pdg, nb_needed, mg5, cumul=True)
                else:
                    part = self.model.get_particle(pdg)
                    name = part.get_name()
                    if name not in self.list_branches or len(self.list_branches[name]) == 0:
                        continue
                    raise self.InvalidCmd("The bridge mode of MadSpin does not support event files where events do not *all* share the same set of final state particles to be decayed.")
                    
                     
                
        
        # Compute the branching ratio.
        br = 1
        for (pdg, event_files) in to_event.items():
            if not event_files:
                continue
            totwidth = float(self.banner.get('param', 'decay', abs(pdg)).value)
            if to_decay[pdg] == nb_event:
                # Exactly one particle of this type to decay by event
                pwidth = sum([event_files[k].cross for k in event_files])
                if pwidth > 1.01 * totwidth:
                    logger.critical("Branching ratio larger than one for %s " % pdg) 
                br *= pwidth / totwidth
            elif to_decay[pdg] % nb_event == 0:
                # More than one particle of this type to decay by event
                # Need to check the number of event file to check if we have to 
                # make separate type of decay or not.
                nb_mult = to_decay[pdg] // nb_event
                if nb_mult == len(event_files):
                    for k in event_files:
                        pwidth = event_files[k].cross
                        if pwidth > 1.01 * totwidth:
                            logger.critical("Branching ratio larger than one for %s " % pdg)                       
                        br *= pwidth / totwidth
                    br *= math.factorial(nb_mult)
                else:
                    pwidth = sum(event_files[k].cross for k in event_files)
                    if pwidth > 1.01 * totwidth:
                        logger.critical("Branching ratio larger than one for %s " % pdg) 
                    br *= (pwidth / totwidth)**nb_mult
            else:
                raise self.InvalidCmd("The bridge mode of MadSpin does not support event files where events do not *all* share the same set of final state particles to be decayed.")
        self.branching_ratio = br
        # modify the cross-section in the init block of the banner
        self.banner.scale_init_cross(self.branching_ratio)
                    
        
        # 3. Merge the various file together.
        output_lhe = lhe_parser.EventFile(orig_lhe.name.replace('.lhe', '_decayed.lhe.gz'), 'w')
        self.banner.write(output_lhe, close_tag=False)
        
        # initialise object which store not use event due to wrong helicity
        bufferedEvents_decay = {}
        for pdg in to_event:
            bufferedEvents_decay[pdg] = [{}] * len(to_event[pdg])
        
        import time
        start = time.time()
        counter = 0
        orig_lhe.seek(0)
        for event in orig_lhe:
            if counter and counter % 1000 == 0 and float(str(counter)[1:]) ==0:
                print "decaying event number %s [%s s]" % (counter, time.time()-start)
            counter +=1
            
            # use random order for particles to avoid systematics when more than 
            # one type of decay is asked.
            particles = [p for p in event if int(p.status) == 1.0]
            random.shuffle(particles)
            ids = [particle.pid for particle in particles]
            for i,particle in enumerate(particles):
                # check if we need to decay the particle 
                if particle.pdg not in self.final_state or particle.pdg not in to_event:
                    continue # nothing to do for this particle
                # check how the decay need to be done
                nb_decay = len(to_event[particle.pdg])
                if nb_decay == 0:
                    continue #nothing to do for this particle
                if nb_decay == 1:
                    decay_file = to_event[particle.pdg][0]
                    decay_file_nb = 0
                elif ids.count(particle.pdg) == nb_decay:
                    decay_file = to_event[particle.pdg][ids[:i].count(particle.pdg)]
                    decay_file_nb = ids[:i].count(particle.pdg)
                else:
                    #need to select the file according to the associate cross-section
                    r = random.random()
                    tot = sum(to_event[particle.pdg][key].cross for key in to_event[particle.pdg])
                    r = r * tot
                    cumul = 0
                    for j,events in to_event[particle.pdg].items():
                        cumul += events.cross
                        if r < cumul:
                            decay_file = events
                            decay_file_nb = j
                        else:
                            break
                        
                # ok start the procedure
                helicity = particle.helicity
                bufferedEvents = bufferedEvents_decay[particle.pdg][decay_file_nb]
                
                # now that we have the file to read. find the associate event
                # checks if we have one event in memory
                if helicity in bufferedEvents and bufferedEvents[helicity]:
                    decay = bufferedEvents[helicity].pop()
                else:
                    # read the event file up to completion
                    while 1:
                        try:
                            decay = decay_file.next()
                        except StopIteration:
                            # check how far we are
                            ratio = counter / nb_event 
                            needed = 1.05 * to_decay[particle.pdg]/ratio - counter
                            needed = min(1000, max(needed, 1000))
                            with misc.MuteLogger(["madgraph", "madevent", "ALOHA", "cmdprint"], [50,50,50,50]):
                                new_file = generate_events(particle.pdg, needed, mg5, [decay_file_nb])
                            to_event[particle.pdg].update(new_file)
                            decay_file = to_event[particle.pdg][decay_file_nb]
                            continue
                        if helicity == decay[0].helicity or helicity==9 or \
                                            self.options["spinmode"] == "none":
                            break # use that event
                        # not valid event store it for later
                        if helicity not in bufferedEvents:
                            bufferedEvents[helicity] = [decay]
                        elif len(bufferedEvents[helicity]) < 200:
                            # only add to the buffering if the buffer is not too large
                            bufferedEvents[helicity].append(decay)
                # now that we have the event make the merge
                particle.add_decay(decay)
            # change the weight associate to the event
            event.wgt *= self.branching_ratio
            wgts = event.parse_reweight()
            for key in wgts:
                wgts[key] *= self.branching_ratio
            # all particle have been decay if needed
            output_lhe.write(str(event))
        output_lhe.write('</LesHouchesEvent>\n')        
    def run_from_pickle(self):
        import madgraph.iolibs.save_load_object as save_load_object
        
        generate_all = save_load_object.load_from_file(pjoin(self.options['ms_dir'], 'madspin.pkl'))
        # Re-create information which are not save in the pickle.
        generate_all.evtfile = self.events_file
        generate_all.curr_event = madspin.Event(self.events_file, self.banner ) 
        generate_all.mgcmd = self.mg5cmd
        generate_all.mscmd = self 
        generate_all.pid2width = lambda pid: generate_all.banner.get('param_card', 'decay', abs(pid)).value
        generate_all.pid2mass = lambda pid: generate_all.banner.get('param_card', 'mass', abs(pid)).value
        if generate_all.path_me != self.options['ms_dir']:
            for decay in generate_all.all_ME.values():
                decay['path'] = decay['path'].replace(generate_all.path_me, self.options['ms_dir'])
                for decay2 in decay['decays']:
                    decay2['path'] = decay2['path'].replace(generate_all.path_me, self.options['ms_dir'])
            generate_all.path_me = self.options['ms_dir'] # directory can have been move
            generate_all.ms_dir = generate_all.path_me
        
        if not hasattr(self.banner, 'param_card'):
            self.banner.charge_card('slha')
        
        # Special treatment for the mssm. Convert the param_card to the correct
        # format
        if self.banner.get('model').startswith('mssm-') or self.banner.get('model')=='mssm':
            self.banner.param_card = check_param_card.convert_to_mg5card(\
                    self.banner.param_card, writting=False)
            
        for name, block in self.banner.param_card.items():
            if name.startswith('decay'):
                continue
                        
            orig_block = generate_all.banner.param_card[name]
            if block != orig_block:                
                raise Exception, """The directory %s is specific to a mass spectrum. 
                Your event file is not compatible with this one. (Different param_card: %s different)
                orig block:
                %s
                new block:
                %s""" \
                % (self.options['ms_dir'], name, orig_block, block)

        #replace init information
        generate_all.banner['init'] = self.banner['init']

        #replace run card if present in header (to make sure correct random seed is recorded in output file)
        if 'mgruncard' in self.banner:
            generate_all.banner['mgruncard'] = self.banner['mgruncard']   
        
        # NOW we have all the information available for RUNNING
        
        if self.seed:
            #seed is specified need to use that one:
            open(pjoin(self.options['ms_dir'],'seeds.dat'),'w').write('%s\n'%self.seed)
            #remove all ranmar_state
            for name in misc.glob(pjoin('*', 'SubProcesses','*','ranmar_state.dat'), 
                                                        self.options['ms_dir']):
                os.remove(name)    
        
        generate_all.ending_run()
        self.branching_ratio = generate_all.branching_ratio
        try:
            self.err_branching_ratio = generate_all.err_branching_ratio
        except Exception:
            # might not be define in some gridpack mode
            self.err_branching_ratio = 0 
        evt_path = self.events_file.name
        try:
            self.events_file.close()
        except:
            pass
        misc.gzip(evt_path)
        decayed_evt_file=evt_path.replace('.lhe', '_decayed.lhe')
        misc.gzip(pjoin(self.options['curr_dir'],'decayed_events.lhe'),
                  stdout=decayed_evt_file)
        if not self.mother:
            logger.info("Decayed events have been written in %s.gz" % decayed_evt_file)    
예제 #10
0
    logging.error("Could not load the model %s used for generating the event file"\
                 %evtBanner.get_detail('model'))
    exit()

# Detect the total number of event in an efficient way
n_evts = int(subprocess.Popen('grep -rin "<event>" %s | wc -l'%os.path.realpath(eventFile),
                        stdout=subprocess.PIPE, shell=True).communicate()[0])

logging.info("Writing out the reweighted event file on\n   %s"%outPath)
# Write out the output event file
outputEvtFile = open(outPath,'w')
outputEvtFile.write(evtFile.banner)

# List the channel present in the process output
channel_list = [ os.path.basename(chan) for chan in \
                 misc.glob('P*', pjoin(proc_path, 'SubProcesses'))) ]

# Now scan over events
# For each encountered channel, store the corresponding process runner
encountered_channels=dict([])

t_before = time.time()
i_evt=0
wgt_summed = 0.0
squared_wgt_summed = 0.0
running_spread = 0.0
max_wgt = 0.0
n_percent_monitor=1
for event in evtFile:
    i_evt = i_evt+1
    t_evt_start = time.time()    
예제 #11
0
    logging.error("Could not read event file %s." % eventFile)

# Detect the total number of event in an efficient way
n_evts = int(
    subprocess.Popen('grep -rin "<event>" %s | wc -l' %
                     os.path.realpath(eventFile),
                     stdout=subprocess.PIPE,
                     shell=True).communicate()[0].decode())

logging.info("Writing out the reweighted event file on\n   %s" % outPath)
# Write out the output event file
outputEvtFile = open(outPath, 'w')
outputEvtFile.write(evtFile.banner)
# List the channel present in the process output
channel_list = [ os.path.basename(chan) for chan in \
                 misc.glob('P*', pjoin(proc_path, 'SubProcesses')) ]

# Now scan over events
# For each encountered channel, store the corresponding process runner
encountered_channels = dict([])

ebeam1 = float(evtBanner.get('run_card', 'ebeam1'))
ebeam2 = float(evtBanner.get('run_card', 'ebeam2'))

t_before = time.time()
i_evt = 0
wgt_summed = 0.0
squared_wgt_summed = 0.0
running_spread = 0.0
max_wgt = 0.0
n_percent_monitor = 1
예제 #12
0
    def prepare_run(self):
        """ ask for pythia-pgs/delphes run """

        # Find all main_model_process.cc files
        date_file_list = []
        for file in misc.glob('main_*_*.cc', self.running_dir):
            # retrieves the stats for the current file as a tuple
            # (mode, ino, dev, nlink, uid, gid, size, atime, mtime, ctime)
            # the tuple element mtime at index 8 is the last-modified-date
            stats = os.stat(file)
            # create tuple (year yyyy, month(1-12), day(1-31), hour(0-23), minute(0-59), second(0-59),
            # weekday(0-6, 0 is monday), Julian day(1-366), daylight flag(-1,0 or 1)) from seconds since epoch
            # note:  this tuple can be sorted properly by date and time
            lastmod_date = time.localtime(stats[8])
            date_file_list.append((lastmod_date, os.path.split(file)[-1]))

        if not date_file_list:
            raise MadGraph5Error, 'No Pythia output found'
        # Sort files according to date with newest first
        date_file_list.sort()
        date_file_list.reverse()
        files = [d[1] for d in date_file_list]

        answer = ''
        answer = self.ask('Select a main file to run:', files[0], files)

        self.cards.append(answer)

        self.executable = self.cards[-1].replace(".cc", "")

        # Assign a valid run name if not put in options
        if self.name == '':
            for i in range(1000):
                path = os.path.join(self.running_dir, '',
                                    '%s_%02i.log' % (self.executable, i))
                if not os.path.exists(path):
                    self.name = '%s_%02i.log' % (self.executable, i)
                    break

        if self.name == '':
            raise MadGraph5Error, 'too many runs in this directory'

        # Find all exported models
        models = misc.glob("Processes_*",
                           pjoin(self.running_dir, os.path.pardir))
        models = [
            os.path.split(m)[-1].replace("Processes_", "") for m in models
        ]
        # Extract model name from executable
        models.sort(key=len)
        models.reverse()
        model_dir = ""
        for model in models:
            if self.executable.replace("main_", "").startswith(model):
                model_dir = "Processes_%s" % model
                break
        if model_dir:
            self.model = model
            self.model_dir = os.path.realpath(
                os.path.join(self.running_dir, os.path.pardir, model_dir))
            self.cards.append(
                os.path.join(self.model_dir, "param_card_%s.dat" % model))
예제 #13
0
    def finalize(self, flaglist, interface_history):
        """Distribute and organize the finalization of all contributions. """

        # Make sure contributions are sorted at this stage
        # It is important to act on LO contributions first, then NLO, then etc...
        # because ME and currents must be added to the ME_accessor in order since there
        # are look-up operations on it in-between
        self.contributions.sort_contributions()

        # Save all the global couplings to write out afterwards
        global_wanted_couplings = []
        # Forward the finalize request to each contribution
        for contrib in self.contributions:
            # Must clean the aloha Kernel before each aloha export for each contribution
            aloha.aloha_lib.KERNEL.clean()
            wanted_couplings_to_add_to_global = contrib.finalize(
                flaglist=flaglist, interface_history=interface_history)
            global_wanted_couplings.extend(wanted_couplings_to_add_to_global)

        # Generate the global ME7 MODEL
        if global_wanted_couplings:
            output_dir = pjoin(self.export_dir, 'Source', 'MODEL')
            # Writing out the model common to all the contributions that can share it
            model_export_options = {
                'complex_mass': self.options['complex_mass_scheme'],
                'export_format':
                'madloop',  # So as to have access to lha_read_mp.f
                'mp': True,
                'loop_induced': False
            }
            model_builder = export_v4.UFO_model_to_mg4(self.model, output_dir,
                                                       model_export_options)
            model_builder.build(global_wanted_couplings)

        # Now possibly add content to the pool of global ME7 resources before removing superfluous files
        # and linking to necessary global ME7 resources
        for contrib in self.contributions:
            contrib.add_content_to_global_ME7_resources(self.export_dir)
            contrib.remove_superfluous_content()
            contrib.link_global_ME7_resources(self.export_dir)

        # Create the run_card
        self.create_run_card()

        # Add the cards generated in MODEL to the Cards directory
        self.copy_model_resources()
        # Now link the Sources files within each contribution
        for contrib in self.contributions:
            contrib.make_model_symbolic_link()

        # Copy the UFO model to the global ME7 resources Source directory
        ME7_ufo_path = pjoin(
            self.export_dir, 'Source',
            'ME7_UFO_model_%s' % os.path.basename(self.model.get('modelpath')))
        shutil.copytree(self.model.get('modelpath'), ME7_ufo_path)
        # And clear compiled files in it
        for path in misc.glob(pjoin(ME7_ufo_path, '*.pkl')) + misc.glob(
                pjoin(ME7_ufo_path, '*.pyc')):
            os.remove(path)

        # Now generate all the ME accessors and integrand.
        # Notice that some of the information provided here (RunCard, ModelReader, root_path, etc...)
        # can and will be overwritten by the actualized values when the ME7Interface will be launched.
        # We provide it here just so as to be complete.

        # Obtain all the Accessors to the Matrix Element and currents made available in this process output
        all_MEAccessors = accessors.MEAccessorDict()
        for contrib in self.contributions:
            contrib.add_ME_accessors(all_MEAccessors, self.export_dir)

        # Now make sure that the integrated counterterms without any contribution host
        # indeed have a non-existent reduced process.
        contributions.Contribution_V.remove_counterterms_with_no_reduced_process(
            all_MEAccessors,
            self.integrated_counterterms_refused_from_all_contribs)

        # Check there is none left over after this filtering
        if len(self.integrated_counterterms_refused_from_all_contribs) > 0:
            counterterm_list = (ct['integrated_counterterm'].nice_string(
            ) for ct in self.integrated_counterterms_refused_from_all_contribs)
            # These integrated counterterms should in principle been added
            msg = "The following list of integrated counterterm are in principle non-zero"
            msg += " but could not be included in any contributions generated:\n"
            msg += '\n'.join(counterterm_list)
            msg += "\nResults generated from that point on are likely to be physically wrong."
            if __debug__:
                logger.critical(msg)
            else:
                raise MadGraph5Error(msg)

        # Now generate all the integrands from the contributions exported
        all_integrands = []
        run_card = banner_mod.RunCardME7(
            pjoin(self.export_dir, 'Cards', 'run_card.dat'))

        # We might want to recover whether prefix was used when importing the model and whether
        # the MG5 name conventions was used. But this is a detail that can easily be fixed later.
        modelReader_instance = import_ufo.import_model(
            pjoin(self.export_dir, 'Source', 'ME7_UFO_model_') +
            self.model.get('name'),
            prefix=True,
            complex_mass_scheme=self.options['complex_mass_scheme'])
        modelReader_instance.pass_particles_name_in_mg_default()
        modelReader_instance.set_parameters_and_couplings(
            param_card=pjoin(self.export_dir, 'Cards', 'param_card.dat'),
            scale=run_card['scale'],
            complex_mass_scheme=self.options['complex_mass_scheme'])

        ME7_options = dict(self.options)
        ME7_options['me_dir'] = self.export_dir
        for contrib in self.contributions:
            all_integrands.extend(
                contrib.get_integrands(modelReader_instance, run_card,
                                       all_MEAccessors, ME7_options))

        # And finally dump ME7 output information so that all relevant objects
        # can be reconstructed for a future launch with ME7Interface.
        # Normally all the relevant information should simply be encoded in only:
        #  'all_MEAccessors' and 'all_integrands'.
        self.dump_ME7(all_MEAccessors, all_integrands)

        # Finally, for future convenience it may sometimes be desirable to already compile
        # all contributions and global ME7 resources (e.g. MODEL) as followed.
        # By default however, we don't do that and this will instead be done at the launch time.
        #logger.info('Compilation of the process output.')
        #logger.info('It can be interrupted at any time,'+
        #                 ' in which case it would be automatically resumed when launched.')
        #self.compile()

        return
        ###################################################################################################
        ###
        ###  WARNING THE CODE BELOW IS JUST FOR TESTING PURPOSES AND CORRESPONDS TO RUNNING THE INTEGRATION
        ###  RIGHT AWAY AND NOT WITHIN THE ME7 INTERFACE>
        ###
        ###################################################################################################

        import madgraph.interface.ME7_interface as ME7_interface
        # Test the reconstruction of the ME7 output instances
        ME7_dump = save_load_object.load_from_file(
            pjoin(self.export_dir, 'MadEvent7.db'))
        all_MEAccessors = ME7_dump['all_MEAccessors'][
            'class'].initialize_from_dump(ME7_dump['all_MEAccessors'],
                                          root_path=self.export_dir)
        all_integrands = [
            integrand_dump['class'].initialize_from_dump(
                integrand_dump, modelReader_instance, run_card,
                all_MEAccessors, self.options)
            for integrand_dump in ME7_dump['all_integrands']
        ]
        model_name = ME7_dump['model_name']
        model_with_CMS = ME7_dump['model_with_CMS']

        # This is now just for gigs. Integrate that beast!
        # Of course, what should really happen is that the users starts a ME7_interface, that
        # bootstraps from the dump above and starts the integration below with lunch.
        # So this is really just for testing purposes.
        import madgraph.integrator.integrators as integrators
        integrator_naive = integrators.SimpleMonteCarloIntegrator(
            all_integrands, **{
                'n_iterations': 10,
                'n_points_per_iterations': 100,
                'accuracy_target': None,
                'verbosity': 1
            })
        import madgraph.integrator.pyCubaIntegrator as pyCubaIntegrator
        integrator_vegas = pyCubaIntegrator.pyCubaIntegrator(
            all_integrands, **{
                'algorithm': 'Vegas',
                'verbosity': 1,
                'seed': 3,
                'target_accuracy': 1.0e-3,
                'n_start': 1000,
                'n_increase': 500,
                'n_batch': 1000,
                'max_eval': 100000,
                'min_eval': 0
            })

        # Now run them all!
        for integrator in [integrator_naive, integrator_vegas]:
            xsec, error = integrator.integrate()
            logger.info("=" * 100)
            logger.info('{:^100}'.format(
                "\033[92mCross-section for process output '%s' with integrator '%s':\033[0m"
                % (self.export_dir, integrator.get_name())))
            logger.info('{:^100}'.format("\033[94m%.5e +/- %.2e [pb]\033[0m" %
                                         (xsec, error)))
            logger.info("=" * 100 + "\n")
예제 #14
0
    evtBanner = banner.Banner(eventFile)    
    evtFile = lhe_parser.EventFile(eventFile)
except:
    logging.error("Could not read event file %s."%eventFile)

# Detect the total number of event in an efficient way
n_evts = int(subprocess.Popen('grep -rin "<event>" %s | wc -l'%os.path.realpath(eventFile),
                        stdout=subprocess.PIPE, shell=True).communicate()[0])

logging.info("Writing out the reweighted event file on\n   %s"%outPath)
# Write out the output event file
outputEvtFile = open(outPath,'w')
outputEvtFile.write(evtFile.banner)
# List the channel present in the process output
channel_list = [ os.path.basename(chan) for chan in \
                 misc.glob('P*', pjoin(proc_path, 'SubProcesses')) ]

# Now scan over events
# For each encountered channel, store the corresponding process runner
encountered_channels=dict([])

ebeam1=float( evtBanner.get('run_card','ebeam1'))
ebeam2=float( evtBanner.get('run_card','ebeam2'))

t_before = time.time()
i_evt=0
wgt_summed = 0.0
squared_wgt_summed = 0.0
running_spread = 0.0
max_wgt = 0.0
n_percent_monitor=1