Exemple #1
0
    def testSLHACreator(self):

        parFile = 'test_lhiggs.ini'
        if os.path.isdir('outputDir'):
            shutil.rmtree('outputDir')
        if os.path.isdir('testOutput'):
            shutil.rmtree('testOutput')

        out = main(parFile, 'debug')

        self.assertTrue(len(out) == 4)

        outFiles = [
            'testOutput/test_F1000_8TeV.slha',
            'testOutput/test_F500_8TeV.slha',
            'testOutput/test_F1000_13TeV.slha',
            'testOutput/test_F500_13TeV.slha'
        ]

        for f in outFiles:
            self.assertTrue(os.path.isfile(f))

        for f in outFiles:
            old = os.path.basename(f).replace('.slha', '_default.slha')
            if not os.path.isfile(old):
                continue
            new = pyslha.readSLHAFile(f)
            old = pyslha.readSLHAFile(old)
            self.assertTrue(equalObjs(new, old, rtol=0.05))
Exemple #2
0
    def readslha(self, input_file):
        """ Read an SLHA file with PySLHA.
        Populates masses, mu and neutralino mixings.
        If the point is unphysical,
        populate the mass blocks with zeros.

        Arguments:
        input_file -- Name of input file.

        Returns:

        """
        try:
            # Read the blocks in the SLHA file.
            self.blocks, self.decays = pyslha.readSLHAFile(input_file)
        except Exception, e:
            # With expected running, shouldn't get any problems. But best
            # to be defensive. A missing mass block would cause an
            # exception, but e.g. stau LSP would not.
            self.physical = False
            print 'Caught trouble in the SLHA file:', e

            # Still need to return data of correct length.
            self.masses = [0] * 33
            self.mu = 0.
            self.neutralino = [0] * 16
Exemple #3
0
    def SLHADerive(self, name, block, key, epsilon=1E-2):
        """ Find the derivative of an input parameter wrt
        an output parameter.

        Arguments:
        name -- Name of input parameter, e.g. "tanbeta".
        block -- Block of ouput parameter in SLHA.
        key -- Key of output parameter in SLHA.
        epsilon -- Numerical infinitesiam for numerical derivative.

        Return:
        Numerical derivative, d(input)/d(output).

        """
        try:
            # Find output parameter for the variation of input parameter.
            output = {}
            for i, e in enumerate([-epsilon, epsilon]):
                param = self.param
                param[name] += e
                SLHAIN = self.writeslha(param)
                SLHA = RunProgram('./pyspec', '../NMSSMTools_4.2.1/main',
                                  SLHAIN)
                blocks, decays = pyslha.readSLHAFile(SLHA)
                output[i] = blocks[block][key]
        except:
            # Unphysical point.
            self.physical = False
            return 999.
        else:
            # Return the numerical derivative.
            return 2. * epsilon / (output[1] - output[0])
Exemple #4
0
    def SLHADerive(self, name, block, key, epsilon=1E-2):
        """ Find the derivative of an input parameter wrt
        an output parameter.

        Arguments:
        name -- Name of input parameter, e.g. "tanbeta".
        block -- Block of ouput parameter in SLHA.
        key -- Key of output parameter in SLHA.
        epsilon -- Numerical infinitesiam for numerical derivative.

        Return:
        Numerical derivative, d(input)/d(output).

        """
        try:
            # Find output parameter for the variation of input parameter.
            output = {}
            for i, e in enumerate([-epsilon, epsilon]):
                param = self.param
                param[name] += e
                SLHAIN = self.writeslha(param)
                SLHA = RunProgram(
                    './pyspec',
                    '../NMSSMTools_4.2.1/main',
                    SLHAIN)
                blocks, decays = pyslha.readSLHAFile(SLHA)
                output[i] = blocks[block][key]
        except:
            # Unphysical point.
            self.physical = False
            return 999.
        else:
            # Return the numerical derivative.
            return 2. * epsilon / (output[1] - output[0])
Exemple #5
0
 def runSPheno(self,LHA=None,DEBUG=False):
     """
     Set self.config for PATHS. 
     Using defatull SARAH Toolbox SPHENO command. 
     Return back a pyslha.Doc object for LHA model.
     Designed to change for other MODEL easily
     Set self.config['SPHENO_PATH']. 
     Self self.config['SPHENO_COMMAND'] if not using 
     default SARAH SPHENO executable
     """
     if not LHA:
         LHA=self.LHA
     if self.config['SPHENO_LHA_INPUT']=='':
         inputLHA='LesHouches.in.%s%s' %(self.MODEL,self.low)
     else:
         inputLHA=self.config['SPHENO_LHA_INPUT']
     if self.config['SPHENO_COMMAND']=='':
         SPheno_bin_command='SPheno'+self.MODEL #Default SARAH toolbox command
     else:
         SPheno_bin_command=self.config['SPHENO_COMMAND']
     #writeLHAinFile(xdict,inputLHA)
     pyslha.writeSLHAFile(inputLHA,LHA)
     a=commands.getoutput('%s/%s %s' %(self.config['SPHENO_PATH'],SPheno_bin_command,inputLHA))
     if DEBUG:
         print(a)
     assert os.path.isfile('SPheno.spc.%s' %self.MODEL)
     #print a
     if a.find('Problem')==-1:
         self.LHA_out=pyslha.readSLHAFile('SPheno.spc.%s' %self.MODEL)
     else:
         self.LHA_out=False
     return self.LHA_out
Exemple #6
0
    def readslha(self, input_file):
        """ Read an SLHA file with PySLHA.
        Populates masses, mu and neutralino mixings.
        If the point is unphysical,
        populate the mass blocks with zeros.

        Arguments:
        input_file -- Name of input file.

        Returns:

        """
        try:
            # Read the blocks in the SLHA file.
            self.blocks, self.decays = pyslha.readSLHAFile(input_file)
        except Exception, e:
            # With expected running, shouldn't get any problems. But best
            # to be defensive. A missing mass block would cause an
            # exception, but e.g. stau LSP would not.
            self.physical = False
            print 'Caught trouble in the SLHA file:', e

            # Still need to return data of correct length.
            self.masses = [0] * 34
            self.mu = 0.
            # NB neutralino mixing matrix is 5 by 5.
            self.neutralino = [0] * 25
Exemple #7
0
def _getDictionariesFromSLHA(slhafile):
    """
    Create mass and BR dictionaries from an SLHA file.
    Ignore decay blocks with R-parity violating or unknown decays

    """

    from smodels.particlesLoader import rEven, rOdd

    res = pyslha.readSLHAFile(slhafile)

    # Get mass and branching ratios for all particles
    brDic = {}
    writeIgnoreMessage(res.decays.keys(), rEven, rOdd)

    for pid in res.decays.keys():
        if not pid in rOdd:
            continue
        brs = []
        for decay in res.decays[pid].decays:
            nEven = nOdd = 0.
            for pidd in decay.ids:
                if pidd in rOdd: nOdd += 1
                elif pidd in rEven: nEven += 1
                else:
                    logger.warning(
                        "Particle %i not defined in particles.py,decay %i -> [%s] will be ignored"
                        % (pidd, pid, decay.ids))
                    break
            if nOdd + nEven == len(decay.ids) and nOdd == 1:
                brs.append(decay)
            else:
                logger.info("Ignoring decay: %i -> [%s]", pid, decay.ids)

        brsConj = copy.deepcopy(brs)
        for br in brsConj:
            br.ids = [-x for x in br.ids]
        brDic[pid] = brs
        brDic[-pid] = brsConj
    # Get mass list for all particles
    massDic = dict(res.blocks['MASS'].items())
    for pid in list(massDic.keys())[:]:
        massDic[pid] = round(abs(massDic[pid]), 1) * GeV
        if not -pid in massDic: massDic[-pid] = massDic[pid]

    #Include proxy for displaced decays
    if 0 in massDic or 0 in brDic:
        logger.error(
            "PDG = 0 is reserved for displaced decays and it can not be used for other particles. Please redefine the input model PDG assignments."
        )
        raise SModelSError()
    else:
        dispPid = 0
        massDic[dispPid] = 0. * GeV
        dispDec = pyslha.Decay(br=1., ids=[], nda=0)
        brDic[dispPid] = [dispDec]

    return brDic, massDic
Exemple #8
0
 def _checkSLHA(self, slhafile):
     if not os.path.isfile(slhafile):
         logger.error("SLHA file %s not found.", slhafile)
         raise SModelSError()
     try:
         f = pyslha.readSLHAFile(slhafile)
     except pyslha.ParseError as e:
         logger.error("File cannot be parsed as SLHA file: %s" % e)
         raise SModelSError()
Exemple #9
0
def _getPromptDecays(slhafile,
                     brDic,
                     l_inner=1. * mm,
                     gb_inner=1.3,
                     l_outer=10. * m,
                     gb_outer=1.43):
    """
    Using the widths in the slhafile, reweights the BR dictionary with the fraction
    of prompt decays and add the fraction of "long-lived decays".
    The fraction of prompt decays and "long-lived decays" are defined as:
    F_prompt = 1 - exp(-width*l_inner/gb_inner)
    F_long = exp(-width*l_outer/gb_outer)
    where l_inner is the inner radius of the detector, l_outer is the outer radius
    and gb_x is the estimate for the kinematical factor gamma*beta for each case.
    We use gb_outer = 10 and gb_inner= 0.5
    :param widthDic: Dictionary with the widths of the particles
    :param l_inner: Radius of the inner tracker
    :param gb_inner: Effective gamma*beta factor to be used for prompt decays
    :param l_outer: Radius of the outer detector
    :param gb_outer: Effective gamma*beta factor to be used for long-lived decays
    
    :return: Dictionary = {pid : decay}
    """

    hc = 197.327 * MeV * fm  #hbar * c

    #Get the widths:
    res = pyslha.readSLHAFile(slhafile)
    decays = res.decays

    #PID for displaced decays:
    dispPid = 0

    for pid in brDic:
        if pid == dispPid:
            continue
        width = abs(decays[abs(pid)].totalwidth) * GeV
        Fprompt = 1. - math.exp(-width * l_inner / (gb_inner * hc))
        Flong = math.exp(-width * l_outer / (gb_outer * hc))
        for decay in brDic[pid]:
            decay.br *= Fprompt  #Reweight by prompt fraction

        #Add long-lived fraction:
        if Flong > 1e-50:
            stableFraction = pyslha.Decay(br=Flong, ids=[], nda=0)
            brDic[pid].append(stableFraction)
        if (Flong + Fprompt) > 1.:
            logger.error("Sum of decay fractions > 1 for " + str(pid))
            return False
        Fdisp = 1 - Flong - Fprompt
        #Add displaced decay:
        if Fdisp > 0.001:
            displacedFraction = pyslha.Decay(br=Fdisp, ids=[dispPid], nda=1)
            brDic[pid].append(displacedFraction)

    return brDic
Exemple #10
0
def _getDictionariesFromSLHA(slhafile):
    """
    Create mass and BR dictionaries from an SLHA file.
    Ignore decay blocks with R-parity violating or unknown decays

    """

    from smodels.particlesLoader import rEven, rOdd

    res = pyslha.readSLHAFile(slhafile)

   
    # Get mass and branching ratios for all particles
    brDic = {}
    writeIgnoreMessage(res.decays.keys(), rEven, rOdd)

    for pid in res.decays.keys():
        if not pid in rOdd:
            continue
        brs = []
        for decay in res.decays[pid].decays:
            nEven = nOdd = 0.
            for pidd in decay.ids:
                if pidd in rOdd: nOdd += 1
                elif pidd in rEven: nEven += 1
                else:
                    logger.warning("Particle %i not defined in particles.py,decay %i -> [%s] will be ignored" %(pidd,pid,decay.ids))
                    break
            if nOdd + nEven == len(decay.ids) and nOdd == 1:
                brs.append(decay)
            else:
                logger.info("Ignoring decay: %i -> [%s]",pid,decay.ids)

        brsConj = copy.deepcopy(brs)
        for br in brsConj:
            br.ids = [-x for x in br.ids]
        brDic[pid] = brs
        brDic[-pid] = brsConj
    # Get mass list for all particles
    massDic = dict(res.blocks['MASS'].items())
    for pid in list ( massDic.keys() )[:]:
        massDic[pid] = round(abs(massDic[pid]),1)*GeV
        if not -pid in massDic: massDic[-pid] = massDic[pid] 

    #Include proxy for displaced decays
    if 0 in massDic or 0 in brDic:
        logger.error("PDG = 0 is reserved for displaced decays and it can not be used for other particles. Please redefine the input model PDG assignments.")
        raise SModelSError()
    else:
        dispPid = 0
        massDic[dispPid] = 0. * GeV
        dispDec = pyslha.Decay(br=1., ids=[], nda=0)
        brDic[dispPid] = [dispDec]
   
 
    return brDic, massDic
Exemple #11
0
def debugFile(slhafile,nevts=50000,forceDegenerate=False):
    #Individual file debugging:
    
    if forceDegenerate:
        f = pyslha.readSLHAFile(slhafile)
        masses = f.blocks['MASS']
        squarksMasses = [abs(mass) for pid,mass in masses.items() if pid in squarks]
        avgmass = sum(squarksMasses)/len(squarksMasses)
        if abs(max(squarksMasses)-avgmass) > 0.1 or  abs(min(squarksMasses)-avgmass) > 0.1:
            for pid in squarks:
                f.blocks['MASS'][pid] = avgmass
                
            slhaF,slhafile_new = tempfile.mkstemp(suffix='.slha', dir='./')
            os.write(slhaF,f.write())            
            os.close(slhaF)
            logger.warning("Testing degenerate squarks for %s with average mass %s" %(slhafile,avgmass))
        comp = debugFile(slhafile_new, nevts=nevts,forceDegenerate=False)
        os.remove(slhafile_new)
        return comp
    
    computer6 = xsecComputer.XSecComputer(LO, nevts, 6)
    computer8 = xsecComputer.XSecComputer(LO, nevts, 8)
    w6 = computer6.compute(8*TeV, slhafile, pythiacard = './my_pythia6.card').getDictionary()
    w8 = computer8.compute(8*TeV, slhafile, pythiacard = './my_pythia8.cfg').getDictionary()
    
#     print 'Pythia 6:'
#     for key,val in sorted(w6.items()):
#         print key,val.values()[0]
#              
#     print 'Pythia 8:'
#     for key,val in sorted(w8.items()):
#         print key,val.values()[0]
    
    
    #Remove the antisbottom-gluino xsec (seems to be missing in Pythia 8):
#    if (-1000005, 1000021) in w6:
#        w6.pop((-1000005, 1000021))
    #Remove the antisdown-gluino xsec (seems to be missing in Pythia 8):
#    if (-1000001, 1000021) in w6:
#        w6.pop((-1000001, 1000021))
#    if (-2000001, 1000021) in w6:
#        w6.pop((-2000001, 1000021))
#    if (-1000003, 1000021) in w6:
#        w6.pop((-1000003, 1000021))        
    #Remove the antisbottom-gluino xsec (seems to be missing in Pythia 8):
#    if (-1000024, 1000021) in w6:
#        totxsec = w6[(-1000024, 1000021)].values()[0]
#        if (1000021, 1000024) in w6:
#            totxsec += w6[(1000021, 1000024)].values()[0]
#        w6.pop((-1000024, 1000021))
#        w6[(1000021, 1000024)] = {'8 TeV (LO)' : totxsec}    
    
    comp = compareXSections(w6,w8,nevts,relError=0.1)

        
    return comp 
Exemple #12
0
    def runSPheno(self, LHA=None, DEBUG=False):
        """
        Set self.config for PATHS. 
        Using defatull SARAH Toolbox SPHENO command. 
        Return back a pyslha.Doc object for LHA model.
        Designed to change for other MODEL easily
        Set self.config['SPHENO_PATH']. 
        Self self.config['SPHENO_COMMAND'] if not using 
        default SARAH SPHENO executable
        """
        if not LHA:
            LHA = self.LHA
        if self.config['SPHENO_LHA_INPUT'] == '':
            inputLHA = 'LesHouches.in.%s%s' % (self.MODEL, self.low)
        else:
            inputLHA = self.config['SPHENO_LHA_INPUT']
        if self.config['SPHENO_COMMAND'] == '':
            SPheno_bin_command = 'SPheno' + self.MODEL  #Default SARAH toolbox command
        else:
            SPheno_bin_command = self.config['SPHENO_COMMAND']
        #writeLHAinFile(xdict,inputLHA)
        pyslha.writeSLHAFile(inputLHA, LHA)
        a = commands.getoutput(
            '%s/%s %s' %
            (self.config['SPHENO_PATH'], SPheno_bin_command, inputLHA))
        if DEBUG:
            print(a)
        assert os.path.isfile('SPheno.spc.%s' % self.MODEL)
        #print a
        #exceptions
        if a.find('Problem in OneLoop') > -1:
            a = a.replace('Problem', 'No problem')

        if a.find('Problem') == -1:
            self.LHA_out = pyslha.readSLHAFile('SPheno.spc.%s' % self.MODEL)
            #with comments but without decays
            a = commands.getoutput(
                "cat  SPheno.spc.%s | grep -m 1 -i -B1000 '^decay' | grep -vi '^decay' >  SPheno.spc.%s_nodecays.spc"
                % (self.MODEL, self.MODEL))
            if os.path.isfile("SPheno.spc.%s_nodecays.spc" % self.MODEL):
                self.LHA_out_with_comments = _readSLHAFile_with_comments(
                    "SPheno.spc.%s_nodecays.spc" % self.MODEL)
                #PDG for new particles
                for pid in self.LHA_out_with_comments.blocks['MASS'].entries:
                    if np.abs(pid) > 25:
                        pvalues = self.LHA_out_with_comments.blocks[
                            'MASS'].entries[pid].split('#')
                        if len(pvalues) == 2:
                            self.pdg[pvalues[1].strip()] = pid

        else:
            self.LHA_out = False
            self.LHA_out_with_comments = False

        self.to_series()  #Fill to_Series pandas Series
        return self.LHA_out
Exemple #13
0
def oscilation(spcfile):
    """oscilation parameters"""
    slha=pyslha.readSLHAFile(spcfile)
    #neutrino parameters
    Delta2m32=slha[0]['SPHENORP'].entries[7]
    Delta2m21=slha[0]['SPHENORP'].entries[8]
    s223=np.sin(np.arctan(np.sqrt(slha[0]['SPHENORP'].entries[9])))**2
    s212=np.sin(np.arctan(np.sqrt(slha[0]['SPHENORP'].entries[10])))**2
    U13=slha[0]['SPHENORP'].entries[11]
    return Delta2m32,Delta2m21,s223,s212,U13
Exemple #14
0
 def read(self):
     """
     Get pyslha output object.
     
     """
     try: ret = pyslha.readSLHAFile(self.filename)
     except (pyslha.AccessError,pyslha.ParseError,IOError): 
         return None
     if not ret.blocks["MASS"]: return None
     return ret
Exemple #15
0
 def read(self):
     """
     Get pyslha output object.
     
     """
     try: ret = pyslha.readSLHAFile(self.filename)
     except (pyslha.AccessError,pyslha.ParseError,IOError): 
         return None
     if not ret.blocks["MASS"]: return None
     return ret
def _getDictionariesFromSLHA(slhafile):
    """
    Create mass and BR dictionaries from an SLHA file.
    Ignore decay blocks with R-parity violating or unknown decays

    """

    res = pyslha.readSLHAFile(slhafile)

    rOdd = particleD.rOdd.keys()
    rEven = particleD.rEven.keys()

    # Get mass and branching ratios for all particles
    brDic = {}
    for pid in res.decays.keys():
        if not pid in rEven + rOdd:
            logger.warning(
                "Particle %i not defined in particles.py, its decays will be ignored"
                % (pid))
            continue
        if pid in rEven:
            logger.info("Ignoring %s decays", particleD.rEven[pid])
            continue
        brs = []
        for decay in res.decays[pid].decays:
            nEven = nOdd = 0.
            for pidd in decay.ids:
                if pidd in rOdd: nOdd += 1
                elif pidd in rEven: nEven += 1
                else:
                    logger.warning(
                        "Particle %i not defined in particles.py,decay %i -> [%s] will be ignored"
                        % (pidd, pid, decay.ids))
                    break
            if nOdd + nEven == len(decay.ids) and nOdd == 1:
                brs.append(decay)
            else:
                logger.info("Ignoring decay: %i -> [%s]", pid, decay.ids)

        brsConj = copy.deepcopy(brs)
        for br in brsConj:
            br.ids = [-x for x in br.ids]
        brDic[pid] = brs
        brDic[-pid] = brsConj
    # Get mass list for all particles
    massDic = dict(res.blocks['MASS'].items())
    for pid in list(massDic.keys())[:]:
        massDic[pid] *= GeV
        massDic[pid] = abs(massDic[pid])
        if not -pid in massDic: massDic[-pid] = massDic[pid]

    return brDic, massDic
Exemple #17
0
def _getPromptDecays(slhafile,brDic,l_inner=1.*mm,gb_inner=1.3,l_outer=10.*m,gb_outer=1.43):
    """
    Using the widths in the slhafile, reweights the BR dictionary with the fraction
    of prompt decays and add the fraction of "long-lived decays".
    The fraction of prompt decays and "long-lived decays" are defined as:
    F_prompt = 1 - exp(-width*l_inner/gb_inner)
    F_long = exp(-width*l_outer/gb_outer)
    where l_inner is the inner radius of the detector, l_outer is the outer radius
    and gb_x is the estimate for the kinematical factor gamma*beta for each case.
    We use gb_outer = 10 and gb_inner= 0.5
    :param widthDic: Dictionary with the widths of the particles
    :param l_inner: Radius of the inner tracker
    :param gb_inner: Effective gamma*beta factor to be used for prompt decays
    :param l_outer: Radius of the outer detector
    :param gb_outer: Effective gamma*beta factor to be used for long-lived decays
    
    :return: Dictionary = {pid : decay}
    """
    
    hc = 197.327*MeV*fm  #hbar * c
        
    #Get the widths:
    res = pyslha.readSLHAFile(slhafile)
    decays = res.decays

    #PID for displaced decays:
    dispPid = 0
        
    for pid in brDic:
        if pid == dispPid:
            continue
        width = abs(decays[abs(pid)].totalwidth)*GeV
        Fprompt = 1. - math.exp(-width*l_inner/(gb_inner*hc))
        Flong = math.exp(-width*l_outer/(gb_outer*hc))
        for decay in brDic[pid]:
            decay.br *= Fprompt  #Reweight by prompt fraction
            
        #Add long-lived fraction:
        if Flong > 1e-50:
            stableFraction = pyslha.Decay(br=Flong,ids=[],nda=0)
            brDic[pid].append(stableFraction) 
        if (Flong+Fprompt) > 1.:
            logger.error("Sum of decay fractions > 1 for "+str(pid))
            return False
        Fdisp = 1 - Flong - Fprompt
        #Add displaced decay:
        if Fdisp > 0.001:
            displacedFraction = pyslha.Decay(br=Fdisp, ids = [dispPid], nda=1)
            brDic[pid].append(displacedFraction)
        
    return brDic
Exemple #18
0
def writeEffsToFile(effsList, yodaFolder, slhaFolder, xpdg, ypdg, doError):
    """
    Write effs to outfile in the format:
       #<header>
       #<analysisID-label>
          #<sr1-label>  <sr2-label> ...
          <eff_sr1>    <eff_sr2> ...
       #<analysisID-label>
          #<sr1-label>  <sr2-label> ...
          <eff_sr1>    <eff_sr2> ...
          
    """

    analysesIDs = []
    for f in effsList.keys():
        for key in effsList[f].keys():
            if isinstance(effsList[f][key], dict):
                analysesIDs.append(key)

    for analysisID in analysesIDs:
        outF = open(analysisID + '_eff.dat', 'w')
        outF.write('# %s\n' % analysisID)
        f = list(effsList.keys())[0]
        effs = effsList[f][analysisID]
        header = ['# mass_%i' % xpdg, 'mass_%i' % ypdg]
        SRs = sorted(list(effs.keys()))
        header += SRs
        col_width = max(20, max(len(header) for h in header)) + 3  # padding
        outF.write("".join(h.ljust(col_width) for h in header) + '\n')
        for f in effsList:
            effs = effsList[f][analysisID]
            slhaFile = os.path.basename(f.replace('.yoda', '.slha'))
            slhaFile = os.path.join(slhaFolder, slhaFile)
            slhaData = pyslha.readSLHAFile(slhaFile)
            xval = slhaData.blocks['MASS'][xpdg]
            yval = slhaData.blocks['MASS'][ypdg]
            xvals = [xval, yval]
            vals = [effs[SR] for SR in SRs]
            outF.write("".join(("%.6e " % x).ljust(col_width) for x in xvals))
            if doError:
                errvals = [
                    sqrt(effs[SR]) / effsList[f]['Ntotal'] for SR in SRs
                ]
                outF.write("".join(
                    ("%.2e +- %.2e" % (v, errvals[i])).ljust(col_width)
                    for i, v in enumerate(vals)) + '\n')
            else:
                outF.write("".join(
                    ("%.2e" % (v)).ljust(col_width) for v in vals) + '\n')

        outF.close()
Exemple #19
0
def getXsecFromSLHAFile(slhafile, useXSecs=None, xsecUnit=pb):
    """
    Obtain cross sections for pair production of R-odd particles from input SLHA file.
    The default unit for cross section is pb.

    :parameter slhafile: SLHA input file with cross sections
    :parameter useXSecs: if defined enables the user to select cross sections to
                     use. Must be a XSecInfoList object
    :parameter xsecUnit: cross section unit in the input file (must be a Unum unit)
    :returns: a XSectionList object

    """
    # Store information about all cross sections in the SLHA file
    xSecsInFile = XSectionList()
    f = pyslha.readSLHAFile(slhafile)
    from smodels.particlesLoader import rOdd
    for production in f.xsections:
        rEvenParticles = list(set(production[2:]).difference(set(rOdd.keys())))
        if rEvenParticles:
            # ignore production of R-Even Particles
            logger.warning(
                "Particles %s not defined as R-odd, cross section for %s production will be ignored"
                % (rEvenParticles, str(production)))
            continue
        process = f.xsections.get(production)
        for pxsec in process.xsecs:
            csOrder = pxsec.qcd_order
            wlabel = str(int(pxsec.sqrts / 1000)) + ' TeV'
            if csOrder == 0:
                wlabel += ' (LO)'
            elif csOrder == 1:
                wlabel += ' (NLO)'
            elif csOrder == 2:
                wlabel += ' (NLL)'
            else:
                logger.error("Unknown QCD order %d" % csOrder)
                raise SModelSError()
            xsec = XSection()
            xsec.info.sqrts = pxsec.sqrts / 1000. * TeV
            xsec.info.order = csOrder
            xsec.info.label = wlabel
            xsec.value = pxsec.value * pb
            xsec.pid = production[2:]
            # Do not add xsecs which do not match the user required ones:
            if (useXSecs and not xsec.info in useXSecs):
                continue
            else:
                xSecsInFile.add(xsec)

    return xSecsInFile
Exemple #20
0
    def getModelDataFrom(self,inputFile):
        """
        Reads the input file (LHE or SLHA) and extract the relevant information
        (masses, widths, BRs and cross-sections). If a http address is given, it will
        attempt to download the file.

        :param inputFile: input file (SLHA or LHE)

        :return: dictionary with masses, dictionary with decays and XSectionList object
        """

        #Download input file, if requested
        if inputFile.startswith("http") or inputFile.startswith("ftp"):
            logger.info("Asked for remote slhafile %s. Fetching it." % inputFile)
            import requests
            import os.path
            r = requests.get(inputFile)
            if r.status_code != 200:
                logger.error("Could not retrieve remote file %d: %s" %(r.status_code, r.reason))
                raise SModelSError()
            basename = os.path.basename(inputFile)
            f = open(basename, "w")
            f.write(r.text)
            f.close()
            inputFile = basename

        #Trick to suppress pyslha error messages:
        import sys
        storeErr = sys.stderr
        #Try to read file assuming it is an SLHA file:
        try:
            sys.stderr = None
            res = pyslha.readSLHAFile(inputFile)
            massDict = res.blocks['MASS']
            #Make sure both PDG signs appear in massDict
            for pdg,mass in massDict.items():
                if not -pdg in massDict:
                    massDict[-pdg] = abs(mass)
            decaysDict = res.decays
            xsections = crossSection.getXsecFromSLHAFile(inputFile)
        #If fails assume it is an LHE file:
        except (IOError,AttributeError,KeyError):
            massDict,decaysDict = lheReader.getDictionariesFrom(inputFile)
            xsections = crossSection.getXsecFromLHEFile(inputFile)
            logger.info("Using LHE input. All unstable particles will be assumed to have prompt decays.")
            logger.info("Using LHE input. All particles not appearing in the events will be removed from the model.")
        finally:
            sys.stderr = storeErr

        return massDict,decaysDict,xsections
Exemple #21
0
def getXsecFromSLHAFile(slhafile, useXSecs=None, xsecUnit = pb):
    """
    Obtain cross sections for pair production of R-odd particles from input SLHA file.
    The default unit for cross section is pb.

    :parameter slhafile: SLHA input file with cross sections
    :parameter useXSecs: if defined enables the user to select cross sections to
                     use. Must be a XSecInfoList object
    :parameter xsecUnit: cross section unit in the input file (must be a Unum unit)
    :returns: a XSectionList object

    """
    # Store information about all cross sections in the SLHA file
    xSecsInFile = XSectionList()
    f=pyslha.readSLHAFile ( slhafile )
    from smodels.particlesLoader import rOdd
    for production in f.xsections:
        rEvenParticles = list(set(production[2:]).difference(set(rOdd.keys())))
        if rEvenParticles:
            # ignore production of R-Even Particles
            logger.warning("Particles %s not defined as R-odd, cross section for %s production will be ignored" 
                           %(rEvenParticles,str(production)))                 
            continue
        process = f.xsections.get ( production )
        for pxsec in process.xsecs:
            csOrder = pxsec.qcd_order
            wlabel = str( int ( pxsec.sqrts / 1000) ) + ' TeV'
            if csOrder == 0:
                wlabel += ' (LO)'
            elif csOrder == 1:
                wlabel += ' (NLO)'
            elif csOrder == 2:
                wlabel += ' (NLL)'
            else:
                logger.error ( "Unknown QCD order %d" % csOrder )
                raise SModelSError()
            xsec = XSection()
            xsec.info.sqrts = pxsec.sqrts/1000. * TeV
            xsec.info.order = csOrder
            xsec.info.label = wlabel
            xsec.value = pxsec.value * pb
            xsec.pid = production[2:]
            # Do not add xsecs which do not match the user required ones:
            if (useXSecs and not xsec.info in useXSecs):
                continue
            else: xSecsInFile.add(xsec)

    return xSecsInFile
def get_slha_data(slhaFile):
    """
    Uses pyslha to read the SLHA file. Return a pyslha.Doc objec, if successful.
    """

    if not os.path.isfile(slhaFile):
        logger.warning("SLHA file %s not found. This point will be ignored" %
                       slhaFile)
        return False

    try:
        slhaData = pyslha.readSLHAFile(slhaFile)
    except:
        logger.warning("Error reading SLHA file %s." % slhaFile)
        return False

    return slhaData
Exemple #23
0
def _readSLHAFile_with_comments(spcfile, ignorenomass=False, ignorenobr=True):
    import pyslha
    import os
    import re
    import sys

    if os.path.exists(spcfile):
        with open(spcfile, 'r') as f:
            try:
                input_file = f.readlines()
            except:  # whatever reader errors you care about
                sys.exit('LesHouches file not found')

    IF = pyslha.readSLHAFile(spcfile,
                             ignorenomass=ignorenomass,
                             ignorenobr=ignorenobr)
    lspaces = 15
    lspacesmin = 3

    for l in input_file:
        l = l.split('\n')[0]
        if re.search('^block', l.lower()):
            if len(l.split()) >= 2:
                block = l.split()[1].upper()
        else:
            if not re.search('^\s*#', l):
                fline = l.split('#')
                entries = fline[0].split()
                if len(entries) == 2:
                    spaces = lspaces - len(entries[1])
                    if spaces < 0:
                        spaces = lspacesmin

                    IF.blocks[block].entries[int(
                        entries[0])] = '%s%s#%s' % (entries[1], ' ' * spaces,
                                                    fline[1])
                if len(entries) == 3:
                    spaces = lspaces - len(entries[2])
                    if spaces < 0:
                        spaces = lspacesmin

                    IF.blocks[block].entries[
                        int(entries[0]),
                        int(entries[1])] = '%s%s#%s' % (entries[2],
                                                        ' ' * spaces, fline[1])
    return IF
    def runSPheno(self,LHA=None,DEBUG=False):
        """
        Set self.config for PATHS. 
        Using defatull SARAH Toolbox SPHENO command. 
        Return back a pyslha.Doc object for LHA model.
        Designed to change for other MODEL easily
        Set self.config['SPHENO_PATH']. 
        Self self.config['SPHENO_COMMAND'] if not using 
        default SARAH SPHENO executable
        """
        if not LHA:
            LHA=self.LHA
        if self.config['SPHENO_LHA_INPUT']=='':
            inputLHA='LesHouches.in.%s%s' %(self.MODEL,self.low)
        else:
            inputLHA=self.config['SPHENO_LHA_INPUT']
        if self.config['SPHENO_COMMAND']=='':
            SPheno_bin_command='SPheno'+self.MODEL #Default SARAH toolbox command
        else:
            SPheno_bin_command=self.config['SPHENO_COMMAND']
        #writeLHAinFile(xdict,inputLHA)
        pyslha.writeSLHAFile(inputLHA,LHA)
        a=commands.getoutput('%s/%s %s' %(self.config['SPHENO_PATH'],SPheno_bin_command,inputLHA))
        if DEBUG:
            print(a)
        assert os.path.isfile('SPheno.spc.%s' %self.MODEL)
        #print a
        if a.find('Problem')==-1:
            self.LHA_out=pyslha.readSLHAFile('SPheno.spc.%s' %self.MODEL)
            #with comments but without decays
            a=commands.getoutput("cat  SPheno.spc.%s | grep -m 1 -i -B1000 '^decay' | grep -vi '^decay' >  SPheno.spc.%s_nodecays.spc" %(self.MODEL,self.MODEL))
            if os.path.isfile("SPheno.spc.%s_nodecays.spc" %self.MODEL):
                self.LHA_out_with_comments=_readSLHAFile_with_comments("SPheno.spc.%s_nodecays.spc" %self.MODEL)
                #PDG for new particles
                for pid in self.LHA_out_with_comments.blocks['MASS'].entries:
                    if np.abs(pid)>25:
                        pvalues=self.LHA_out_with_comments.blocks['MASS'].entries[pid].split('#')
                        if len(pvalues)==2:
                            self.pdg[pvalues[1].strip()]=pid

            
        else:
            self.LHA_out=False
            self.LHA_out_with_comments=False
        return self.LHA_out
Exemple #25
0
def getXsecFromSLHAFile(slhafile, useXSecs=None, xsecUnit=pb):
    """
    Obtain cross sections for pair production of R-odd particles from input SLHA file.
    The default unit for cross section is pb.

    :parameter slhafile: SLHA input file with cross sections
    :parameter useXSecs: if defined enables the user to select cross sections to
                     use. Must be a XSecInfoList object
    :parameter xsecUnit: cross section unit in the input file (must be a Unum unit)
    :returns: a XSectionList object

    """
    # Store information about all cross sections in the SLHA file
    xSecsInFile = XSectionList()
    f = pyslha.readSLHAFile(slhafile)
    for production in f.xsections:
        process = f.xsections.get(production)
        for pxsec in process.xsecs:
            csOrder = pxsec.qcd_order
            wlabel = str(int(pxsec.sqrts / 1000)) + ' TeV'
            if csOrder == 0:
                wlabel += ' (LO)'
            elif csOrder == 1:
                wlabel += ' (NLO)'
            elif csOrder == 2:
                wlabel += ' (NLL)'
            else:
                logger.error("Unknown QCD order %d" % csOrder)
                raise SModelSError()
            xsec = XSection()
            xsec.info.sqrts = pxsec.sqrts / 1000. * TeV
            xsec.info.order = csOrder
            xsec.info.label = wlabel
            xsec.value = pxsec.value * pb
            xsec.pid = production[2:]
            # Do not add xsecs which do not match the user required ones:
            if (useXSecs and not xsec.info in useXSecs):
                continue
            else:
                xSecsInFile.add(xsec)

    #Make sure duplicates are removed.
    xSecsInFile.removeDuplicates()

    return xSecsInFile
Exemple #26
0
    def readSSP(self, LHA=None, SSP=False, DEBUG=False):
        """
        Set self.config for PATHS. 
        Using defatull SARAH Toolbox SPHENO command. 
        Return back a pyslha.Doc object for LHA model.
        Designed to change for other MODEL easily
        Set self.config['SPHENO_PATH']. 
        Self self.config['SPHENO_COMMAND'] if not using 
        default SARAH SPHENO executable
        """
        assert os.path.isfile('SPheno.spc.%s' % self.MODEL)

        self.LHA_out = pyslha.readSLHAFile('SPheno.spc.%s' % self.MODEL)
        #with comments but without decays
        a = commands.getoutput(
            "cat  SPheno.spc.%s | grep -m 1 -i -B1000 '^decay' | grep -vi '^decay' >  SPheno.spc.%s_nodecays.spc"
            % (self.MODEL, self.MODEL))

        if os.path.isfile("SPheno.spc.%s_nodecays.spc" % self.MODEL):
            a = commands.getoutput(
                "grep -i -A100000 '^DECAY' SPheno.spc.%s | grep -i -A100000 '^BLOCK'"
                % self.MODEL)  #check for blocks after decays
            if a:
                af = open("SPheno.spc.%s_nodecays.spc" % self.MODEL, 'a')
                af.write(a)
                af.close()

            self.LHA_out_with_comments = _readSLHAFile_with_comments(
                "SPheno.spc.%s_nodecays.spc" % self.MODEL)
            #PDG for new particles
            for pid in self.LHA_out_with_comments.blocks['MASS'].entries:
                if np.abs(pid) > 25:
                    pvalues = self.LHA_out_with_comments.blocks[
                        'MASS'].entries[pid].split('#')
                    if len(pvalues) == 2:
                        self.pdg[pvalues[1].strip()] = pid

        self.to_series()  #Fill to_Series pandas Series
        return self.LHA_out
Exemple #27
0
def _readSLHAFile_with_comments(spcfile,ignorenomass=False,ignorenobr=True):
    import pyslha
    import os
    import re
    import sys
    
    if os.path.exists(spcfile):
       with open(spcfile, 'r') as f:
           try:
               input_file=f.readlines()
           except : # whatever reader errors you care about
               sys.exit('LesHouches file not found')
                    
    IF=pyslha.readSLHAFile(spcfile,ignorenomass=ignorenomass,ignorenobr=ignorenobr)
    lspaces=15;lspacesmin=3

    for l in input_file:
        l=l.split('\n')[0]
        if re.search('^block',l.lower()):
            if len(l.split())>=2:
                block=l.split()[1].upper()
        else:
            if not re.search('^\s*#',l):
                fline=l.split('#')
                entries=fline[0].split()
                if len(entries)==2:
                    spaces=lspaces-len(entries[1])
                    if spaces<0:
                        spaces=lspacesmin
                
                    IF.blocks[block].entries[int(entries[0])]='%s%s#%s' %(entries[1],' '*spaces,fline[1])
                if len(entries)==3:
                    spaces=lspaces-len(entries[2])
                    if spaces<0:
                        spaces=lspacesmin
                    
                    IF.blocks[block].entries[int(entries[0]),int(entries[1])]='%s%s#%s' %(entries[2],' '*spaces,fline[1])
    return IF
def getKfactorsFor(pIDs, sqrts, slhafile, pdf='cteq'):
    """
    Read the NLLfast grid and returns a pair of k-factors (NLO and NLL) for the
    pair.

    :returns: k-factors = None, if NLLfast does not contain the process; uses
              the slhafile to obtain the SUSY spectrum.
    
    """
    if not os.path.isfile(slhafile):
        logger.error("SLHA file %s not found", slhafile)
        return False

    # Get process name (in NLLfast notation)
    process = getProcessName(pIDs)
    if not process:
        # Return k-factors = None, if NLLfast does not have the process
        return (None, None)

    # Obtain relevant masses
    readfile = pyslha.readSLHAFile(slhafile)
    masses = readfile.blocks['MASS']
    check_pids = squarks + gluinos + third
    for check in check_pids:
        if not check in masses.entries:
            logger.error ( "cannot compute k factor for pdgid %d: " \
              " no particle mass given. will set mass to inf." % check )
            masses.entries[check] = 1.e10

    gluinomass = abs(masses.entries[1000021])
    squarkmass = sum([abs(masses.entries[pid]) for pid in squarks]) / 8.
    pid1, pid2 = sorted(pIDs)
    if pid1 in antisquarks and pid2 in squarks:
        squarkmass = (abs(masses.entries[abs(pid1)]) +
                      abs(masses.entries[pid2])) / 2.
    elif pid1 in squarks and pid2 in squarks:
        squarkmass = (abs(masses.entries[pid1]) +
                      abs(masses.entries[pid2])) / 2.
    elif abs(pid1) == pid2 and pid2 in third:
        squarkmass = abs(masses.entries[abs(pid1)])

    # Set up NLLfast run, the old way
    sqrtS = float(sqrts / TeV)
    energy = str(int(sqrtS)) + 'TeV'
    toolname = "nllfast%d" % int(sqrtS)
    box = toolBox.ToolBox()
    tool = box.get(toolname)
    if tool == None:
        logger.warning("No NLLfast data for sqrts = " + str(sqrts))
        return (None, None)
    nllpath = tool.installDirectory()
    tool.pathOfExecutable()
    tool.checkInstallation()
    if process == "st":
        nll_run = "./nllfast_" + energy + " %s %s %s" % \
                  (process, pdf, squarkmass)
    else:
        nll_run = "./nllfast_" + energy + " %s %s %s %s" % \
                  (process, pdf, squarkmass, gluinomass)

    # Run NLLfast
    nll_output = runNLLfast(nll_run, nllpath)

    # If run was successful, return k-factors:
    if "K_NLO" in nll_output:
        # NLLfast ran ok, try to get the k-factors
        kFacs = getKfactorsFrom(nll_output)
        if not kFacs or min(kFacs) <= 0.:
            logger.warning("Error obtaining k-factors")
            return (None, None)
        else:
            return kFacs
    # If run was not successful, check for decoupling error messages:
    elif not "too low/high" in nll_output.lower():
        logger.warning("Error running NLLfast")
        return (None, None)

    # Check for decoupling cases with a decoupling grid (only for sb and gg)
    doDecoupling = False
    if "too low/high gluino" in nll_output.lower():
        if gluinomass > 500. and process == 'sb':
            doDecoupling = True
            dcpl_mass = gluinomass
    elif "too low/high squark" in nll_output.lower():
        if squarkmass > 500. and process == 'gg':
            doDecoupling = True
            dcpl_mass = squarkmass

    # If process do not have decoupled grids, return None:
    if not doDecoupling:
        logger.warning("Masses out of NLLfast grid for " + process)
        return (None, None)

    # Obtain k-factors from the NLLfast decoupled grid
    kfacs = getDecoupledKfactors(nllpath, process, energy, pdf,
                                 min(gluinomass, squarkmass))
    # Decoupling limit is satisfied, do not interpolate
    if not kfacs:
        logger.warning(
            "Error obtaining k-factors from the NLLfast decoupled grid for " +
            process)
        return (None, None)
    elif dcpl_mass / min(gluinomass, squarkmass) > 10.:
        return kfacs
    # Interpolate between the non-decoupled and decoupled grids
    else:
        kFacsVector = [[10. * min(gluinomass, squarkmass), kfacs]
                       ]  #First point for interpolation (decoupled grid)
        kfacs = None
        while not kfacs and dcpl_mass > 500.:
            dcpl_mass -= 100.  # Reduce decoupled mass, until NLLfast produces results
            if process == 'sb':
                nllinput = (process, pdf, squarkmass, dcpl_mass)
            else:
                nllinput = (process, pdf, dcpl_mass, gluinomass)
            nll_run = "./nllfast_" + energy + " %s %s %s %s" % nllinput
            nll_output = runNLLfast(nll_run, nllpath)
            kfacs = getKfactorsFrom(nll_output)
        kFacsVector.append(
            [dcpl_mass,
             kfacs])  #Second point for interpolation (non-decoupled grid)

    if len(kFacsVector) < 2:
        logger.warning("Not enough points for interpolation in the decoupling "
                       "limit")
        return (None, None)
    else:
        # Interpolate k-factors
        kFacs = interpolateKfactors(kFacsVector, max(squarkmass, gluinomass))
    return kFacs
    'font.serif': 'Times New Roman'
})
sns.set_context('paper', font_scale=1.8)
cm = plt.cm.get_cmap('RdYlBu')

# %% Load data
#Official curve
excATLAS = np.genfromtxt(
    './HEPData-ins1641262-v4-Exclusion_contour_EW_2_obs_conv.txt', names=True)

# %% Get data from CheckMate results
resultFolder = '../data/TDTM1M2F_smodels'
slhaFolder = '../data/TDTM1M2F_slha'
rData = []
for slhaFile in glob.glob(slhaFolder + '/*.slha'):
    slhaData = pyslha.readSLHAFile(slhaFile)
    mC1 = slhaData.blocks['MASS'][1000024]
    widthC1 = slhaData.decays[1000024].totalwidth
    ctau_ns = 6.582e-16 / widthC1
    xsecC1C1 = [
        x for x in slhaData.xsections[(2212, 2212, -1000024, 1000024)].xsecs
        if x.sqrts == 13000.
    ][0].value
    xsecC1pN1 = [
        x for x in slhaData.xsections[(2212, 2212, 1000022, 1000024)].xsecs
        if x.sqrts == 13000.
    ][0].value
    xsecC1mN1 = [
        x for x in slhaData.xsections[(2212, 2212, -1000024, 1000022)].xsecs
        if x.sqrts == 13000.
    ][0].value
Exemple #30
0
    def testDictionaries(self):

        filename = "./testFiles/lhe/simplyGluino.lhe"
        massDict, decayDict = lheReader.getDictionariesFrom(filename)
        self.assertEqual(massDict, {
            1000021: 675.0,
            1000022: 200.0,
            1: 0.33,
            2: 0.33
        })
        gluinoDecs = [
            pyslha.Decay(br=0.3, nda=3, ids=[-1, 1, 1000022],
                         parentid=1000021),
            pyslha.Decay(br=0.7, nda=3, ids=[-2, 2, 1000022], parentid=1000021)
        ]
        self.assertEqual(len(decayDict[1000021].decays), len(gluinoDecs))
        self.assertTrue(compareDecays(gluinoDecs, decayDict[1000021].decays))

        re = pyslha.readSLHAFile("./testFiles/slha/gluino_squarks.slha")

        filename = "./testFiles/lhe/gluino_squarks.lhe"
        massDict, decayDict = lheReader.getDictionariesFrom(filename)
        for pdg in massDict:
            if pdg < 100000:
                continue
            self.assertAlmostEqual(re.blocks['MASS'][pdg], massDict[pdg])

        #Expected answer:
        decayRes = {}
        decayRes[1000024] = [
            pyslha.Decay(br=1.0000, ids=[1000022, 24], parentid=1000024, nda=2)
        ]
        decayRes[1000023] = [
            pyslha.Decay(br=0.8571, ids=[1000022, 25], parentid=1000023,
                         nda=2),
            pyslha.Decay(br=0.1429, ids=[1000022, 23], parentid=1000023, nda=2)
        ]
        decayRes[1000001] = [
            pyslha.Decay(br=1.0000, ids=[1000021, 1], parentid=1000001, nda=2)
        ]
        decayRes[1000002] = [
            pyslha.Decay(br=0.5000, ids=[1000024, 1], parentid=1000002, nda=2),
            pyslha.Decay(br=0.2500, ids=[1000021, 2], parentid=1000002, nda=2),
            pyslha.Decay(br=0.2500, ids=[1000023, 2], parentid=1000002, nda=2)
        ]
        decayRes[2000002] = [
            pyslha.Decay(br=1.0000, ids=[1000021, 2], parentid=2000002, nda=2)
        ]
        decayRes[1000021] = [
            pyslha.Decay(br=0.2500,
                         ids=[-1000024, 4, -3],
                         parentid=1000021,
                         nda=3),
            pyslha.Decay(br=0.3750,
                         ids=[1000024, -2, 1],
                         parentid=1000021,
                         nda=3),
            pyslha.Decay(br=0.1250,
                         ids=[1000024, -4, 3],
                         parentid=1000021,
                         nda=3),
            pyslha.Decay(br=0.1250,
                         ids=[1000023, -2, 2],
                         parentid=1000021,
                         nda=3),
            pyslha.Decay(br=0.1250,
                         ids=[1000023, -6, 6],
                         parentid=1000021,
                         nda=3)
        ]

        for pdg in decayRes:
            self.assertTrue(compareDecays(decayDict[pdg].decays,
                                          decayRes[pdg]))
            self.assertEqual(decayDict[pdg].totalwidth, float('inf'))

        for pdg in decayDict:
            if not decayDict[pdg].decays:
                self.assertEqual(decayDict[pdg].totalwidth, 0.)
def decompose(slhafile,
              sigcut=.1 * fb,
              doCompress=False,
              doInvisible=False,
              minmassgap=-1. * GeV,
              useXSecs=None):
    """
    Perform SLHA-based decomposition.
    
    :param sigcut: minimum sigma*BR to be generated, by default sigcut = 0.1 fb
    :param doCompress: turn mass compression on/off
    :param doInvisible: turn invisible compression on/off
    :param minmassgap: maximum value (in GeV) for considering two R-odd particles
                       degenerate (only revelant for doCompress=True )
    :param useXSecs: optionally a dictionary with cross sections for pair
                 production, by default reading the cross sections
                 from the SLHA file.
    :returns: list of topologies (TopologyList object)

    """
    t1 = time.time()

    if doCompress and minmassgap / GeV < 0.:
        logger.error(
            "Asked for compression without specifying minmassgap. Please set minmassgap."
        )
        raise SModelSError()

    if type(sigcut) == type(1.):
        sigcut = sigcut * fb

    try:
        f = pyslha.readSLHAFile(slhafile)
    except pyslha.ParseError as e:
        logger.error("The file %s cannot be parsed as an SLHA file: %s" %
                     (slhafile, e))
        raise SModelSError()

    # Get cross section from file
    xSectionList = crossSection.getXsecFromSLHAFile(slhafile, useXSecs)
    # Get BRs and masses from file
    brDic, massDic = _getDictionariesFromSLHA(slhafile)
    # Only use the highest order cross sections for each process
    xSectionList.removeLowerOrder()
    # Order xsections by PDGs to improve performance
    xSectionList.order()

    # Get maximum cross sections (weights) for single particles (irrespective
    # of sqrtS)
    maxWeight = {}
    for pid in xSectionList.getPIDs():
        maxWeight[pid] = xSectionList.getXsecsFor(pid).getMaxXsec()

    # Generate dictionary, where keys are the PIDs and values
    # are the list of cross sections for the PID pair (for performance)
    xSectionListDict = {}
    for pids in xSectionList.getPIDpairs():
        xSectionListDict[pids] = xSectionList.getXsecsFor(pids)

    # Create 1-particle branches with all possible mothers
    branchList = []
    for pid in maxWeight:
        branchList.append(Branch())
        branchList[-1].PIDs = [[pid]]
        if not pid in massDic:
            logger.error(
                "pid %d does not appear in masses dictionary %s in slhafile %s"
                % (pid, massDic, slhafile))
        branchList[-1].masses = [massDic[pid]]
        branchList[-1].maxWeight = maxWeight[pid]

    # Generate final branches (after all R-odd particles have decayed)
    finalBranchList = decayBranches(branchList, brDic, massDic, sigcut)
    # Generate dictionary, where keys are the PIDs and values are the list of branches for the PID (for performance)
    branchListDict = {}
    for branch in finalBranchList:
        if len(branch.PIDs) != 1:
            logger.error("During decomposition the branches should \
                            not have multiple PID lists!")
            return False
        if branch.PIDs[0][0] in branchListDict:
            branchListDict[branch.PIDs[0][0]].append(branch)
        else:
            branchListDict[branch.PIDs[0][0]] = [branch]
    for pid in xSectionList.getPIDs():
        if not pid in branchListDict: branchListDict[pid] = []

    #Sort the branch lists by max weight to improve performance:
    for pid in branchListDict:
        branchListDict[pid] = sorted(branchListDict[pid],
                                     key=lambda br: br.maxWeight,
                                     reverse=True)

    smsTopList = topology.TopologyList()
    # Combine pairs of branches into elements according to production
    # cross section list
    for pids in xSectionList.getPIDpairs():
        weightList = xSectionListDict[pids]
        minBR = (sigcut / weightList.getMaxXsec()).asNumber()
        if minBR > 1.: continue
        for branch1 in branchListDict[pids[0]]:
            BR1 = branch1.maxWeight / maxWeight[
                pids[0]]  #Branching ratio for first branch
            if BR1 < minBR: break  #Stop loop if BR1 is already too low
            for branch2 in branchListDict[pids[1]]:
                BR2 = branch2.maxWeight / maxWeight[
                    pids[1]]  #Branching ratio for second branch
                if BR2 < minBR: break  #Stop loop if BR2 is already too low

                finalBR = BR1 * BR2
                if type(finalBR) == type(1. * fb):
                    finalBR = finalBR.asNumber()
                if finalBR < minBR:
                    continue  # Skip elements with xsec below sigcut

                if len(branch1.PIDs) != 1 or len(branch2.PIDs) != 1:
                    logger.error("During decomposition the branches should \
                            not have multiple PID lists!")
                    return False

                newElement = element.Element([branch1, branch2])
                newElement.weight = weightList * finalBR
                allElements = [newElement]
                # Perform compression
                if doCompress or doInvisible:
                    allElements += newElement.compressElement(
                        doCompress, doInvisible, minmassgap)

                for el in allElements:
                    el.sortBranches(
                    )  #Make sure elements are sorted BEFORE adding them
                    smsTopList.addElement(el)
    smsTopList._setElementIds()

    logger.debug("slhaDecomposer done in %.2f s." % (time.time() - t1))
    return smsTopList
def main(parfile, verbose):
    """
    Submit parallel jobs using the parameter file.

    :param parfile: name of the parameter file.
    :param verbose: level of debugging messages.
    """
    level = args.verbose.lower()
    levels = {
        "debug": logging.DEBUG,
        "info": logging.INFO,
        "warn": logging.WARNING,
        "warning": logging.WARNING,
        "error": logging.ERROR
    }
    if not level in levels:
        logger.error("Unknown log level ``%s'' supplied!" % level)
        sys.exit()
    logger.setLevel(level=levels[level])

    parser = ConfigParserExt()
    ret = parser.read(parfile)
    if ret == []:
        logger.error("No such file or directory: '%s'" % parfile)
        sys.exit()

    if not parser.has_option('options', 'input'):
        logger.error("An input file or folder must be defined.")
        sys.exit()
    else:
        inputF = parser.get('options', 'input')
        if os.path.isfile(inputF):
            inputFiles = [os.path.abspath(inputF)]
        elif "*" in inputF:
            inputFiles = [os.path.abspath(f) for f in glob.glob(inputF)]
        elif os.path.isdir(inputF):
            inputFiles = [
                os.path.abspath(os.path.join(inputF, f))
                for f in os.listdir(inputF)
                if os.path.isfile(os.path.join(inputF, f))
            ]
        else:
            logger.error("Input format %s not accepted" % inputF)
            sys.exit()

    parserList = []
    for f in inputFiles:
        newParser = ConfigParserExt()
        newParser.read_dict(parser.toDict(raw=True))
        newParser.set("CheckMateParameters", "SLHAFile", f)
        newParser.set("CheckMateParameters", "Name",
                      os.path.splitext(os.path.basename(f))[0])
        newParser.set(
            "CheckMateParameters", "OutputDirectory",
            os.path.abspath(
                parser.get("CheckMateParameters", "OutputDirectory")))
        #Get tags of processes:
        processTags = [
            tag for tag in newParser.sections() if
            (tag.lower() != 'options' and tag.lower() != 'checkmateparameters')
        ]

        #Get xsec dictionary:
        useSLHA = False
        unit = 'PB'
        xsecDict = {}
        if newParser.has_option("options", "xsecUnit"):
            unit = newParser.get("options", "xsecUnit")
        if newParser.has_option("options", "useSLHAxsecs"):
            useSLHA = newParser.get("options", "useSLHAxsecs")
            if not isinstance(useSLHA, dict):
                logger.error(
                    "useSLHAxsecs should be defined as dictionary with a key for each CheckMate process."
                )
                sys.exit()

            xsecsAll = pyslha.readSLHAFile(f).xsections
            for pTag, xsecTuple in useSLHA.items():
                if not xsecTuple in xsecsAll: continue
                xsecs = xsecsAll[xsecTuple].xsecs
                xsecs = sorted(xsecs,
                               key=lambda xsec: xsec.qcd_order,
                               reverse=True)
                xsecDict[pTag] = xsecs[0]

        for pTag in processTags:
            pName = newParser.get(pTag, "Name")
            newParser.set(pTag, "MGparam", f)
            if useSLHA:
                if pTag in xsecDict:
                    newParser.set(pTag, "XSect",
                                  "%1.5g %s" % (xsecDict[pTag].value, unit))
                if pName in xsecDict:
                    newParser.set(pTag, "XSect",
                                  "%1.5g %s" % (xsecDict[pName].value, unit))

        parserList.append(newParser)

    ncpus = int(parser.get("options", "ncpu"))
    if ncpus < 0:
        ncpus = multiprocessing.cpu_count()
    ncpus = min(ncpus, len(parserList))
    pool = multiprocessing.Pool(processes=ncpus)
    children = []
    #Loop over parsers and submit jobs
    logger.info("Submitting %i jobs over %i cores" % (len(parserList), ncpus))
    for newParser in parserList:
        logger.debug("Submitting job for file %s" %
                     (newParser.get("CheckMateParameters", "SLHAFile")))
        parserDict = newParser.toDict(
            raw=False)  #Must convert to dictionary for pickling
        p = pool.apply_async(RunCheckMate, args=(parserDict, ))
        children.append(p)
        time.sleep(10)

    #Wait for jobs to finish:
    output = [p.get() for p in children]
    for out in output:
        print(out)
def computeXSec(sqrts,
                maxOrder,
                nevts,
                slhafile,
                lhefile=None,
                unlink=True,
                loFromSlha=None,
                pythiacard=None):
    """
    Run pythia and compute SUSY cross sections for the input SLHA file.

    :param sqrts: sqrt{s} to run Pythia, given as a unum (e.g. 7.*TeV)
    :param maxOrder: maximum order to compute the cross section, given as an integer
                if maxOrder == 0, compute only LO pythia xsecs
                if maxOrder == 1, apply NLO K-factors from NLLfast (if available)
                if maxOrder == 2, apply NLO+NLL K-factors from NLLfast (if available)
    :param nevts: number of events for pythia run
    :param slhafile: SLHA file
    :param lhefile: LHE file. If None, do not write pythia output to file. If
                    file does not exist, write pythia output to this file name. If
                    file exists, read LO xsecs from this file (does not run pythia).
    :param unlink: Clean up temp directory after running pythia

    :param loFromSlha: If True, uses the LO xsecs from the SLHA file to compute the
                       higher order xsecs
    :param pythiaCard: Optional path to pythia.card. If None, uses /etc/pythia.card

    :returns: XSectionList object

    """
    if not os.path.isfile(slhafile):
        logger.error("SLHA file %s not found.", slhafile)
        raise SModelSError()
    try:
        f = pyslha.readSLHAFile(slhafile)
    except pyslha.ParseError as e:
        logger.error("File cannot be parsed as SLHA file: %s" % e)
        raise SModelSError()

    if type(sqrts) == type(float) or type(sqrts) == type(int):
        logger.warning("sqrt(s) given as scalar, will add TeV as unit.")
        sqrts = float(sqrts) * TeV

    smaxorder = {"LO": 0, "NLO": 1, "NLL": 2}
    if maxOrder in smaxorder.keys():
        logger.warning("maxorder given as string, please supply integer.")
        maxOrder = smaxorder[maxOrder]

    if lhefile:
        if os.path.isfile(lhefile):
            logger.warning("Using LO cross sections from " + lhefile)
        else:
            logger.info("Writing pythia LHE output to " + lhefile)
    if loFromSlha:
        logger.info("Using LO cross sections from " + slhafile)
        xsecsInfile = crossSection.getXsecFromSLHAFile(slhafile)
        loXsecs = crossSection.XSectionList()
        for xsec in xsecsInfile:
            if xsec.info.order == 0 and xsec.info.sqrts == sqrts:
                loXsecs.add(xsec)

    else:
        if not lhefile or not os.path.isfile(lhefile):
            lheFile = runPythia(slhafile,
                                nevts,
                                sqrts / TeV,
                                lhefile,
                                unlink=unlink,
                                pythiacard=pythiacard)
        else:
            lheFile = open(lhefile, 'r')
        loXsecs = crossSection.getXsecFromLHEFile(lheFile)
    xsecs = loXsecs
    wlabel = str(int(sqrts / TeV)) + ' TeV'
    if maxOrder == 0:
        wlabel += ' (LO)'
    elif maxOrder == 1:
        wlabel += ' (NLO)'
    elif maxOrder >= 2:
        wlabel += ' (NLO+NLL)'
    for ixsec, xsec in enumerate(xsecs):
        xsecs[ixsec].info.label = wlabel
        xsecs[ixsec].info.order = maxOrder

    if maxOrder > 0:
        pIDs = loXsecs.getPIDpairs()
        for pID in pIDs:
            k = 0.
            kNLO, kNLL = nllFast.getKfactorsFor(pID, sqrts, slhafile)
            if maxOrder == 1 and kNLO:
                k = kNLO
            elif maxOrder == 2 and kNLL and kNLO:
                k = kNLO * kNLL
            elif maxOrder > 2 and kNLL and kNLO:
                logger.warning("Unkown xsec order, using NLL+NLO k-factor, "
                               "if available")
                k = kNLO * kNLL
            k = float(k)
            for i, xsec in enumerate(xsecs):
                if set(xsec.pid) == set(pID):
                    # Apply k-factor
                    xsecs[i] = xsec * k

    # Remove zero cross sections
    while len(xsecs) > 0 and xsecs.getMinXsec() == 0. * pb:
        for xsec in xsecs:
            if xsec.value == 0. * pb:
                xsecs.delete(xsec)
                break
    if maxOrder > 0 and len(xsecs) == 0:
        logger.warning("No NLO or NLL cross sections available.")

    return xsecs
Exemple #34
0
#!/usr/bin/env python
'''Random search of neutrino solutions with SPheno'''
import numpy as np
import commands
import pyslha
## Run Spheno with lesHouches.in
commands.getoutput('cp LesHouches.in.neutrinofits-ON LesHouches.in')
commands.getoutput('./bin/SPheno')
##Determine mu
spc,decays=pyslha.readSLHAFile('SPheno.spc')
veps=np.asarray(spc['RVKAPPA'].entries.values())
vsnvev=np.asarray(spc['RVSNVEV'].entries.values())
vlam=np.asarray(spc['SPHENORP'].entries.values()[0:3])
vd=spc['SPHENORP'].entries[15]
mu=((vlam-vd*veps)/vsnvev)[0]
##To write rp parameters and neutrino 
datos= open('solutions.out','w')
#begin the iterations 
for i in range(1,11):
    if i%100==0:
        print "i=",i
#random generation of epsilon_i and lambda_i
#    sgn=(-1)**np.random.random_integers(1,2,3)
    sgn=np.array([1.0,1.0,-1.0])
#    epsi=np.random.uniform(-1,1,3)
#    epsi=np.array([np.random.uniform(3e-2,0.6),np.random.uniform(1e-5,1),np.random.uniform(1e-5,1)])*(-1)**np.random.random_integers(1,2,3)
    A1=np.log10([8e-2,8e-2,8e-2])    
    B1=np.log10([1e-1,1e-1,1e-1])    
    epsi=10**((B1-A1)*np.random.uniform(0,1,3)+A1)*sgn    
#    lambi=np.random.uniform(-1,1,3)
#    lambi=np.array([np.random.uniform(1e-5,6e-2),np.random.uniform(3e-2,7e-1),np.random.uniform(3e-2,7e-1)])*(-1)**np.random.random_integers(1,2,3)
import pyslha
# sf = pyslha.readSLHAFile("slha_decay_table_CMSSM_realistic_point3.txt")
sf = pyslha.readSLHAFile("slha_decay_table_CMSSM_realistic_point4.txt")
# sf = pyslha.readSLHAFile("susyhit_softsusy_slha.out")


squarklist = [1000001,1000002,1000003,1000004]
sleptonlist = [2000011,2000013,1000011,1000013]

branching_ratio_gluino = [] # List of branching ratios for gluino to different squarks
branching_ratio_squarks = [] # List of branching ratio for each of the squarks to neutralino2
for squark in squarklist:
	for decaypair in sf.decays[squark].decays:
		if decaypair.ids[0] == 1000023:
			branching_ratio_squarks.append(decaypair.br)
	for gluinodecays in sf.decays[1000021].decays:
		if gluinodecays.ids[0] == squark:
			branching_ratio_gluino.append(gluinodecays.br)
print "branching_ratio_squarks =", branching_ratio_squarks
print "branching_ratio_gluino =", branching_ratio_gluino

branching_ratio_neutralino2 = []
for neutralinodecays in sf.decays[1000023].decays:
	if abs(neutralinodecays.ids[0]) in sleptonlist:
		branching_ratio_neutralino2.append(neutralinodecays)
print "branching_ratio_neutralino2 =", branching_ratio_neutralino2

branching_ratio_sleptons = [] # List of branching ratio for each of the sleptons to neutralino1
for slepton in sleptonlist:
	for decaypair in sf.decays[slepton].decays:
		if decaypair.ids[0] == 1000022:
Exemple #36
0
def decompose(slhafile, sigcut=.1 * fb, doCompress=False, doInvisible=False,
              minmassgap=-1.*GeV, useXSecs=None):
    """
    Perform SLHA-based decomposition.

    :param slhafile: the slha input file. May be an URL (though http, ftp only).
    :param sigcut: minimum sigma*BR to be generated, by default sigcut = 0.1 fb
    :param doCompress: turn mass compression on/off
    :param doInvisible: turn invisible compression on/off
    :param minmassgap: maximum value (in GeV) for considering two R-odd particles
                       degenerate (only revelant for doCompress=True )
    :param useXSecs: optionally a dictionary with cross sections for pair
                 production, by default reading the cross sections
                 from the SLHA file.
    :returns: list of topologies (TopologyList object)

    """
    if slhafile.startswith("http") or slhafile.startswith("ftp"):
        logger.info ( "asked for remote slhafile %s. will fetch it." % slhafile )
        import requests
        import os.path
        r=requests.get(slhafile)
        if r.status_code != 200:
            logger.error ( "could not retrieve remote file %d: %s" % ( r.status_code, r.reason ) )
            raise SModelSError()
        basename = os.path.basename ( slhafile )
        f=open ( basename, "w" )
        f.write ( r.text )
        f.close()
        slhafile = basename
    t1 = time.time()

    if doCompress and minmassgap / GeV < 0.:
        logger.error("Asked for compression without specifying minmassgap. Please set minmassgap.")        
        raise SModelSError()

    if type(sigcut) == type(1.):
        sigcut = sigcut * fb

    try:
        f=pyslha.readSLHAFile ( slhafile )
    except pyslha.ParseError as e:
        logger.error ( "The file %s cannot be parsed as an SLHA file: %s" % (slhafile, e) )
        raise SModelSError()

    # Get cross section from file
    xSectionList = crossSection.getXsecFromSLHAFile(slhafile, useXSecs)
    # Get BRs and masses from file
    brDic, massDic = _getDictionariesFromSLHA(slhafile)
    # Only use the highest order cross sections for each process
    xSectionList.removeLowerOrder()
    # Order xsections by PDGs to improve performance
    xSectionList.order()
    #Reweight decays by fraction of prompt decays and add fraction of long-lived
    brDic = _getPromptDecays(slhafile,brDic)

    # Get maximum cross sections (weights) for single particles (irrespective
    # of sqrtS)
    maxWeight = {}
    for pid in xSectionList.getPIDs():
        maxWeight[pid] = xSectionList.getXsecsFor(pid).getMaxXsec()    

    # Generate dictionary, where keys are the PIDs and values 
    # are the list of cross sections for the PID pair (for performance)
    xSectionListDict = {}    
    for pids in xSectionList.getPIDpairs():
        xSectionListDict[pids] = xSectionList.getXsecsFor(pids)

    # Create 1-particle branches with all possible mothers
    branchList = []
    for pid in maxWeight:
        branchList.append(Branch())
        branchList[-1].PIDs = [[pid]]
        if not pid in massDic:
            logger.error ( "pid %d does not appear in masses dictionary %s in slhafile %s" % 
                    ( pid, massDic, slhafile ) )
        branchList[-1].masses = [massDic[pid]]
        branchList[-1].maxWeight = maxWeight[pid]

    # Generate final branches (after all R-odd particles have decayed)
    finalBranchList = decayBranches(branchList, brDic, massDic, sigcut)
    # Generate dictionary, where keys are the PIDs and values are the list of branches for the PID (for performance)
    branchListDict = {}
    for branch in finalBranchList:
        if len(branch.PIDs) != 1:
            logger.error("During decomposition the branches should \
                            not have multiple PID lists!")
            return False   
        if branch.PIDs[0][0] in branchListDict:
            branchListDict[branch.PIDs[0][0]].append(branch)
        else:
            branchListDict[branch.PIDs[0][0]] = [branch]
    for pid in xSectionList.getPIDs():
        if not pid in branchListDict: branchListDict[pid] = []

    #Sort the branch lists by max weight to improve performance:
    for pid in branchListDict:
        branchListDict[pid] = sorted(branchListDict[pid], 
                                     key=lambda br: br.maxWeight, reverse=True)
    
    smsTopList = topology.TopologyList()
    # Combine pairs of branches into elements according to production
    # cross section list
    for pids in xSectionList.getPIDpairs():
        weightList = xSectionListDict[pids]
        minBR = (sigcut/weightList.getMaxXsec()).asNumber()
        if minBR > 1.: continue
        for branch1 in branchListDict[pids[0]]:
            BR1 = branch1.maxWeight/maxWeight[pids[0]]  #Branching ratio for first branch            
            if BR1 < minBR: break  #Stop loop if BR1 is already too low            
            for branch2 in branchListDict[pids[1]]:
                BR2 = branch2.maxWeight/maxWeight[pids[1]]  #Branching ratio for second branch
                if BR2 < minBR: break  #Stop loop if BR2 is already too low
                
                finalBR = BR1*BR2                
                if type(finalBR) == type(1.*fb):
                    finalBR = finalBR.asNumber()
                if finalBR < minBR: continue # Skip elements with xsec below sigcut

                if len(branch1.PIDs) != 1 or len(branch2.PIDs) != 1:
                    logger.error("During decomposition the branches should \
                            not have multiple PID lists!")
                    return False    

                newElement = element.Element([branch1, branch2])
                newElement.weight = weightList*finalBR
                newElement.sortBranches()  #Make sure elements are sorted BEFORE adding them
                smsTopList.addElement(newElement)
    
    smsTopList.compressElements(doCompress, doInvisible, minmassgap)
    smsTopList._setElementIds()

    logger.debug("slhaDecomposer done in %.2f s." % (time.time() -t1 ) )
    return smsTopList
Exemple #37
0
#!/usr/bin/env python

import numpy as np
import commands
import pyslha
import sys

##To write rp parameters and neutrino 
datos= open('solutions.out','w')
for m0 in [200, 500, 700, 1000]:
    print "m0=", m0
    for tanb in [3, 10, 30, 50]:
        print "tanb=", tanb
        LesHouchesFit,decaysFit=pyslha.readSLHAFile('LesHouches_FIT_ON.in')
        LesHouchesFit['MINPAR'].entries[1]=m0 #M0
        LesHouchesFit['MINPAR'].entries[3]=1.0*tanb #tanbeta
        LesHouchesFit2={'AMODSEL':LesHouchesFit['MODSEL'],'BSMINPUTS':LesHouchesFit['SMINPUTS'],'CMINPAR':LesHouchesFit['MINPAR'],'GSPhenoInput':LesHouchesFit['SPHENOINPUT']}
        pyslha.writeSLHAFile('LesHouches.in',LesHouchesFit2,decaysFit)
#        sys.exit(0)
## Run Spheno with lesHouches.in
#    commands.getoutput('cp LesHouches.in.neutrinofits-ON LesHouches.in')
        commands.getoutput('./bin/SPheno')
##Determine mu
        spc,decays=pyslha.readSLHAFile('SPheno.spc')
        veps=np.asarray(spc['RVKAPPA'].entries.values())
        vsnvev=np.asarray(spc['RVSNVEV'].entries.values())
        vlam=np.asarray(spc['SPHENORP'].entries.values()[0:3])
        vd=spc['SPHENORP'].entries[15]
        mu=((vlam-vd*veps)/vsnvev)[0]
##begin the iterations 
        LesHouches,decays=pyslha.readSLHAFile('LesHouches_MASS.in')
Exemple #38
0
def debugFile(slhafile, nevts=50000, forceDegenerate=False):
    #Individual file debugging:

    if forceDegenerate:
        f = pyslha.readSLHAFile(slhafile)
        masses = f.blocks['MASS']
        squarksMasses = [
            abs(mass) for pid, mass in masses.items() if pid in squarks
        ]
        avgmass = sum(squarksMasses) / len(squarksMasses)
        if abs(max(squarksMasses) -
               avgmass) > 0.1 or abs(min(squarksMasses) - avgmass) > 0.1:
            for pid in squarks:
                f.blocks['MASS'][pid] = avgmass

            slhaF, slhafile_new = tempfile.mkstemp(suffix='.slha', dir='./')
            os.write(slhaF, f.write())
            os.close(slhaF)
            logger.warning(
                "Testing degenerate squarks for %s with average mass %s" %
                (slhafile, avgmass))
        comp = debugFile(slhafile_new, nevts=nevts, forceDegenerate=False)
        os.remove(slhafile_new)
        return comp

    computer6 = xsecComputer.XSecComputer(LO, nevts, 6)
    computer8 = xsecComputer.XSecComputer(LO, nevts, 8)
    w6 = computer6.compute(8 * TeV, slhafile,
                           pythiacard='./my_pythia6.card').getDictionary()
    w8 = computer8.compute(8 * TeV, slhafile,
                           pythiacard='./my_pythia8.cfg').getDictionary()

    #     print 'Pythia 6:'
    #     for key,val in sorted(w6.items()):
    #         print key,val.values()[0]
    #
    #     print 'Pythia 8:'
    #     for key,val in sorted(w8.items()):
    #         print key,val.values()[0]

    #Remove the antisbottom-gluino xsec (seems to be missing in Pythia 8):
    #    if (-1000005, 1000021) in w6:
    #        w6.pop((-1000005, 1000021))
    #Remove the antisdown-gluino xsec (seems to be missing in Pythia 8):
    #    if (-1000001, 1000021) in w6:
    #        w6.pop((-1000001, 1000021))
    #    if (-2000001, 1000021) in w6:
    #        w6.pop((-2000001, 1000021))
    #    if (-1000003, 1000021) in w6:
    #        w6.pop((-1000003, 1000021))
    #Remove the antisbottom-gluino xsec (seems to be missing in Pythia 8):
    #    if (-1000024, 1000021) in w6:
    #        totxsec = w6[(-1000024, 1000021)].values()[0]
    #        if (1000021, 1000024) in w6:
    #            totxsec += w6[(1000021, 1000024)].values()[0]
    #        w6.pop((-1000024, 1000021))
    #        w6[(1000021, 1000024)] = {'8 TeV (LO)' : totxsec}

    comp = compareXSections(w6, w8, nevts, relError=0.1)

    return comp
Exemple #39
0
    def getKfactorsFor( self, pIDs, slhafile, pdf='cteq' ):
        """
        Read the NLLfast grid and returns a pair of k-factors (NLO and NLL) for 
        the PIDs pair.

        :returns: k-factors = None, if NLLfast does not contain the process; uses
                  the slhafile to obtain the SUSY spectrum.
        
        """
        if not os.path.isfile(slhafile):
            logger.error("SLHA file %s not found", slhafile)
            return False

        energy = str(int(self.sqrts)) + 'TeV'
        # Get process name (in NLLfast notation)
        process = self._getProcessName(pIDs)
        if not process:
            # Return k-factors = None, if NLLfast does not have the process
            return (None, None)

        # Obtain relevant masses
        readfile = pyslha.readSLHAFile(slhafile)
        masses=readfile.blocks['MASS']
        check_pids=squarks+gluinos+third
        for check in check_pids:
            if not check in masses.entries:
                logger.error ( "cannot compute k factor for pdgid %d: " \
                  " no particle mass given. will set mass to inf." % check )
                masses.entries[check]=1.e10

        gluinomass = abs(masses.entries[1000021])
        squarkmass = sum([abs(masses.entries[pid])
                          for pid in squarks]) / 8.
        pid1, pid2 = sorted(pIDs)
        if pid1 in antisquarks and pid2 in squarks:
            squarkmass = (abs(masses.entries[abs(pid1)]) +
                          abs(masses.entries[pid2])) / 2.
        elif pid1 in squarks and pid2 in squarks:
            squarkmass = (abs(masses.entries[pid1]) + abs(masses.entries[pid2])) / 2.
        elif abs(pid1) == pid2 and pid2 in third:
            squarkmass = abs(masses.entries[abs(pid1)])

        #if tool == None:
        #    logger.warning("No NLLfast data for sqrts = " + str(sqrts))
        #    return (None, None)
        nllpath = self.installDirectory()
        # self.pathOfExecutable()
        self.checkInstallation()
        nll_output = self._compute ( energy, pIDs, pdf, squarkmass, gluinomass )

        # If run was successful, return k-factors:
        if "K_NLO" in nll_output:
            # NLLfast ran ok, try to get the k-factors
            kFacs = self._getKfactorsFrom(nll_output)
            if not kFacs or min(kFacs) <= 0.:
                logger.warning("Error obtaining k-factors")
                return (None, None)
            else:
                return kFacs
        # If run was not successful, check for decoupling error messages:
        elif not "too low/high" in nll_output.lower():
            logger.warning("Error running NLLfast")
            return (None, None)

        # Check for decoupling cases with a decoupling grid (only for sb and gg)
        doDecoupling = False
        if "too low/high gluino" in nll_output.lower():        
            if gluinomass > 500. and process == 'sb': 
                doDecoupling = True
                dcpl_mass = gluinomass
        elif "too low/high squark" in nll_output.lower():
            if squarkmass > 500. and process == 'gg':
                doDecoupling = True
                dcpl_mass = squarkmass

        # If process do not have decoupled grids, return None:
        if not doDecoupling:
            logger.warning("Masses of (q,g)=(%s,%s) out of NLLfast grid for %s, %s" % ( squarkmass, gluinomass, process, energy ))
            return (None, None)

        # Obtain k-factors from the NLLfast decoupled grid
        kfacs = self._getDecoupledKfactors(process,energy,pdf,min(gluinomass,squarkmass))
        # Decoupling limit is satisfied, do not interpolate
        if not kfacs:
            logger.warning("Error obtaining k-factors from the NLLfast decoupled grid for " + process)
            return (None, None)
        elif dcpl_mass/min(gluinomass,squarkmass) > 10.:    
            return kfacs
        # Interpolate between the non-decoupled and decoupled grids
        else:
            kFacsVector = [[10.*min(gluinomass,squarkmass),kfacs]]  #First point for interpolation (decoupled grid)
            kfacs = None        
            while not kfacs and dcpl_mass > 500.:
                dcpl_mass -= 100.  # Reduce decoupled mass, until NLLfast produces results
                if process == 'sb': nllinput = (process, pdf, squarkmass, dcpl_mass)
                else:  nllinput = (process, pdf, dcpl_mass, gluinomass)
                nll_output = self._runForDecoupled ( energy, nllinput )
                kfacs = self._getKfactorsFrom(nll_output)        
            kFacsVector.append([dcpl_mass, kfacs]) #Second point for interpolation (non-decoupled grid)

        if len(kFacsVector) < 2:
            logger.warning("Not enough points for interpolation in the decoupling "
                           "limit")
            return (None, None)
        else:
            # Interpolate k-factors
            kFacs = self._interpolateKfactors(kFacsVector,
                            max(squarkmass, gluinomass))
        return kFacs