Пример #1
0
    def __init__(self, path=None):

        self.path = path
        if path:
            logger.debug('Creating object based on  %s' % self.path)

            #Open the info file and get the information:
            if not os.path.isfile(path):
                logger.error("Info file %s not found" % path)
                raise SModelSError()
            from smodels.tools.stringTools import concatenateLines
            infoFile = open(self.path)
            content = concatenateLines(infoFile.readlines())
            infoFile.close()

            #Get tags in info file:
            tags = [line.split(':', 1)[0].strip() for line in content]
            for i, tag in enumerate(tags):
                if not tag: continue
                line = content[i]
                value = line.split(':', 1)[1].strip()
                if tags.count(tag) == 1:
                    self.addInfo(tag, value)
                else:
                    logger.info("Ignoring unknown field %s found in file %s" %
                                (tag, self.path))
                    continue
Пример #2
0
 def printFastlimBanner(self):
     """ check if fastlim appears in data.
         If yes, print a statement to stdout. """
     if not self.hasFastLim: return
     logger.info(
         "FastLim v1.1 efficiencies loaded. Please cite: arXiv:1402.0492, EPJC74 (2014) 11"
     )
Пример #3
0
def runSetOfFiles(inputFiles, outputDir, parser, databaseVersion, listOfExpRes,
                    timeout, development, parameterFile, jobnr ):
    """
    Loop over all input files in inputFiles with testPoint

    :parameter inputFiles: list of input files to be tested
    :parameter outputDir: path to directory where output is be stored
    :parameter parser: ConfigParser storing information from parameter.ini file
    :parameter databaseVersion: Database version (printed to output file)
    :parameter listOfExpRes: list of ExpResult objects to be considered
    :parameter development: turn on development mode (e.g. no crash report)
    :parameter parameterFile: parameter file, for crash reports
    :parameter jobnr: number of process, in parallel mode. mostly for debugging.
    :returns: printers output
    """
    a={}
    n=len(inputFiles)
    t_tot = 0. ## total time
    for i,inputFile in enumerate(inputFiles):
        txt=""
        sjob=""
        if jobnr>0:
            sjob="%d: " % jobnr
        if n>5: ## tell where we are in the list, if the list has more than 5 entries
            txt="[%s%d/%d] " % ( sjob, i+1, n )
            if i > 3: ## give the average time spent per point
                txt="[%s%d/%d, t~%.1fs] " % ( sjob, i+1, n, t_tot/float(i) )
        if t_tot/float(i+1)>.1 or (i+1) % 10 == 0:
            ## if it is super fast, show only every 10th
            logger.info ( "Start testing %s%s" % (txt, os.path.relpath ( inputFile ) ) )
        t0=time.time()
        a[inputFile] = runSingleFile(inputFile, outputDir, parser, databaseVersion,
                                  listOfExpRes, timeout, development, parameterFile)
        t_tot += ( time.time() - t0 )
    return a
Пример #4
0
 def __init__(self, path=None):
             
     self.path = path
     if path:
         logger.debug('Creating object based on  %s' %self.path)        
  
         #Open the info file and get the information:
         if not os.path.isfile(path):
             logger.error("Info file %s not found" % path)
             raise SModelSError()      
         from smodels.tools.stringTools import concatenateLines
         infoFile = open(self.path)
         content = concatenateLines ( infoFile.readlines() )
         infoFile.close()
         
         #Get tags in info file:
         tags = [line.split(':', 1)[0].strip() for line in content]
         for i,tag in enumerate(tags):
             if not tag: continue
             line = content[i]
             value = line.split(':',1)[1].strip()            
             if tags.count(tag) == 1:
                 self.addInfo(tag,value)
             else:
                 logger.info("Ignoring unknown field %s found in file %s" 
                             % (tag, self.path))
                 continue
Пример #5
0
 def loadTextDatabase ( self ):
     """ simply loads the textdabase """
     if self.txt_meta.databaseVersion and len(self.expResultList)>0:
         logger.debug ( "Asked to load database, but has already been loaded. Ignore." )
         return
     logger.info ( "Parsing text database at %s" % self.txt_meta.pathname )
     self.expResultList = self._loadExpResults()
Пример #6
0
 def createBinaryFile(self, filename=None):
     """ create a pcl file from the text database,
         potentially overwriting an old pcl file. """
     ## make sure we have a model to pickle with the database!
     if self.txt_meta == None:
         logger.error(
             "Trying to create database pickle, but no txt_meta defined.")
         raise SModelSError()
     logger.debug( "database timestamp: %s, filecount: %s" % \
                  ( time.ctime( self.txt_meta.mtime ), self.txt_meta.filecount ) )
     binfile = filename
     if binfile == None:
         binfile = self.pcl_meta.pathname
     if not hasattr(self,'databaseParticles') or \
         type(self.databaseParticles) == type(None):
         self._setParticles(self._getParticles())
     logger.debug(" * create %s" % binfile)
     with open(binfile, "wb") as f:
         logger.debug(" * load text database")
         self.loadTextDatabase()
         logger.debug(  " * write %s db version %s, format version %s, %s" % \
                 ( binfile, self.txt_meta.databaseVersion,
                   self.txt_meta.format_version, self.txt_meta.cTime() ) )
         # ptcl = serializer.HIGHEST_PROTOCOL
         ptcl = min(
             4, serializer.HIGHEST_PROTOCOL
         )  ## 4 is default protocol in python3.8, and highest protocol in 3.7
         serializer.dump(self.txt_meta, f, protocol=ptcl)
         serializer.dump(self.expResultList, f, protocol=ptcl)
         serializer.dump(self.databaseParticles, f, protocol=ptcl)
         logger.info("%s created." % (binfile))
Пример #7
0
def testPoints(fileList, inDir, outputDir, parser, databaseVersion,
                 listOfExpRes, timeout, development, parameterFile):
    """
    Loop over all input files in fileList with testPoint, using ncpus CPUs
    defined in parser

    :param fileList: list of input files to be tested
    :param inDir: path to directory where input files are stored
    :param outputDir: path to directory where output is stored
    :param parser: ConfigParser storing information from parameter.ini file
    :param databaseVersion: Database version (printed to output files)
    :param listOfExpRes: list of ExpResult objects to be considered
    :param timeout: set a timeout for one model point (0 means no timeout)
    :param development: turn on development mode (e.g. no crash report)
    :param parameterFile: parameter file, for crash reports
    :returns: printer(s) output, if not run in parallel mode
    """
    if len( fileList ) == 0:
        logger.error ( "no files given." )
        return None

    cleanedList = _cleanList ( fileList, inDir )
    if len(cleanedList) == 1:
        return runSingleFile ( cleanedList[0], outputDir, parser, databaseVersion,
                               listOfExpRes, timeout, development, parameterFile )
    ncpus = _determineNCPus ( parser.getint("parameters", "ncpus"), len(cleanedList) )
    if ncpus == 1:
        logger.info ("Running SModelS in a single process" )
    else:
        logger.info ("Running SModelS in %d processes" % ncpus )

    if ncpus == 1:
        return runSetOfFiles( cleanedList, outputDir, parser, databaseVersion,
                              listOfExpRes, timeout, development, parameterFile, 0 )

    ### now split up for every fork
    chunkedFiles = [cleanedList[x::ncpus] for x in range(ncpus)]
    children = []
    for (i,chunk) in enumerate(chunkedFiles):
        pid=os.fork()
        logger.debug("Forking: %s %s %s " % ( i,pid,os.getpid() ) )
        if pid == 0:
            logger.debug("chunk #%d: pid %d (parent %d)." %
                    ( i, os.getpid(), os.getppid() ) )
            logger.debug( " `-> %s" % " ".join ( chunk ) )
            runSetOfFiles(chunk, outputDir, parser, databaseVersion,
                            listOfExpRes, timeout, development, parameterFile, i )
            os._exit(0) ## not sys.exit(), return, nor continue
        if pid < 0:
            logger.error ( "fork did not succeed! Pid=%d" % pid )
            sys.exit()
        if pid > 0:
            children.append ( pid )
    for child in children:
        r = os.waitpid ( child, 0 )
        logger.debug ( "child %d terminated: %s" % (child,r) )
    logger.debug ( "all children terminated" )
    logger.debug ( "returning no output, because we are in parallel mode" )
    return None
Пример #8
0
 def openOutFile(self, filename, mode):
     """ creates and opens a data sink, 
         creates path if needed """
     d = os.path.dirname(filename)
     if not os.path.exists(d):
         os.makedirs(d)
         logger.info("creating directory %s" % d)
     return open(filename, mode)
Пример #9
0
 def openOutFile(self, filename, mode ):
     """ creates and opens a data sink, 
         creates path if needed """
     d = os.path.dirname ( filename )
     if not os.path.exists ( d ):
         os.makedirs ( d )
         logger.info ( "creating directory %s" % d )
     return open ( filename, mode )
Пример #10
0
 def loadTextDatabase(self):
     """ simply loads the textdabase """
     if self.txt_meta.databaseVersion and len(self.expResultList) > 0:
         logger.debug(
             "Asked to load database, but has already been loaded. Ignore.")
         return
     logger.info("Parsing text database at %s" % self.txt_meta.pathname)
     self.expResultList = self._loadExpResults()
Пример #11
0
    def fetchFromScratch(self, path, store, discard_zeroes):
        """ fetch database from scratch, together with
            description.
            :param store: filename to store json file.
        """
        def sizeof_fmt(num, suffix='B'):
            for unit in ['', 'K', 'M', 'G', 'T', 'P']:
                if abs(num) < 1024.:
                    return "%3.1f%s%s" % (num, unit, suffix)
                num /= 1024.0
            return "%.1f%s%s" % (num, 'Yi', suffix)

        import requests
        try:
            r = requests.get(path)
        except Exception as e:
            logger.error("Exception when trying to fetch database: %s" % e)
            logger.error(
                "Consider supplying a different database path in the ini file (possibly a local one)"
            )
            sys.exit()
        if r.status_code != 200:
            logger.error ( "Error %d: could not fetch %s from server." % \
                           ( r.status_code, path ) )
            sys.exit()
        ## its new so store the description
        with open(store, "w") as f:
            f.write(r.text)
        if not "url" in r.json().keys():
            logger.error("cannot parse json file %s." % path)
            sys.exit()
        size = r.json()["size"]
        logger.info ( "need to fetch %s. size is %s." % \
                      ( r.json()["url"], sizeof_fmt ( size ) ) )
        t0 = time.time()
        r2 = requests.get(r.json()["url"], stream=True)
        filename = "./" + r2.url.split("/")[-1]
        with open(filename, "wb") as dump:
            if not self.inNotebook():  ## \r doesnt work in notebook
                print("         " + " " * 51 + "<", end="\r")
            print("loading >", end="")
            for x in r2.iter_content(chunk_size=int(size / 50)):
                dump.write(x)
                dump.flush()
                print(".", end="")
                sys.stdout.flush()
            if self.inNotebook():
                print("done.")
            else:
                print("")
            dump.close()
        logger.info("fetched %s in %d secs." % (r2.url, time.time() - t0))
        logger.debug("store as %s" % filename)
        #with open( filename, "wb" ) as f:
        #    f.write ( r2.content )
        #    f.close()
        self.force_load = "pcl"
        return ("./", "%s" % filename)
Пример #12
0
def _getDictionariesFromSLHA(slhafile):
    """
    Create mass and BR dictionaries from an SLHA file.
    Ignore decay blocks with R-parity violating or unknown decays

    """

    from smodels.particlesLoader import rEven, rOdd

    res = pyslha.readSLHAFile(slhafile)

    # Get mass and branching ratios for all particles
    brDic = {}
    writeIgnoreMessage(res.decays.keys(), rEven, rOdd)

    for pid in res.decays.keys():
        if not pid in rOdd:
            continue
        brs = []
        for decay in res.decays[pid].decays:
            nEven = nOdd = 0.
            for pidd in decay.ids:
                if pidd in rOdd: nOdd += 1
                elif pidd in rEven: nEven += 1
                else:
                    logger.warning(
                        "Particle %i not defined in particles.py,decay %i -> [%s] will be ignored"
                        % (pidd, pid, decay.ids))
                    break
            if nOdd + nEven == len(decay.ids) and nOdd == 1:
                brs.append(decay)
            else:
                logger.info("Ignoring decay: %i -> [%s]", pid, decay.ids)

        brsConj = copy.deepcopy(brs)
        for br in brsConj:
            br.ids = [-x for x in br.ids]
        brDic[pid] = brs
        brDic[-pid] = brsConj
    # Get mass list for all particles
    massDic = dict(res.blocks['MASS'].items())
    for pid in list(massDic.keys())[:]:
        massDic[pid] = round(abs(massDic[pid]), 1) * GeV
        if not -pid in massDic: massDic[-pid] = massDic[pid]

    #Include proxy for displaced decays
    if 0 in massDic or 0 in brDic:
        logger.error(
            "PDG = 0 is reserved for displaced decays and it can not be used for other particles. Please redefine the input model PDG assignments."
        )
        raise SModelSError()
    else:
        dispPid = 0
        massDic[dispPid] = 0. * GeV
        dispDec = pyslha.Decay(br=1., ids=[], nda=0)
        brDic[dispPid] = [dispDec]

    return brDic, massDic
Пример #13
0
    def loadBinaryFile(self, lastm_only=False):
        """
        Load a binary database, returning last modified, file count, database.
        
        :param lastm_only: if true, the database itself is not read.
        :returns: database object, or None, if lastm_only == True.
        """
        if lastm_only and self.pcl_mtime[0]:
            ## doesnt need to load database, and mtime is already
            ## loaded
            return None

        if self.pcl_db:
            return self.pcl_db

        if not os.path.exists(self.binfile):
            return None

        try:
            with open(self.binfile, "rb") as f:
                t0 = time.time()
                self.pcl_python = serializer.load(f)
                self.pcl_format_version = serializer.load(f)
                self.pcl_mtime = serializer.load(f)
                self._databaseVersion = serializer.load(f)
                if not lastm_only:
                    if self.pcl_python != sys.version:
                        logger.warning(
                            "binary file was written with a different "
                            "python version. Regenerating.")
                        self.createBinaryFile()
                        return self
                    if self.pcl_format_version != self.sw_format_version:
                        logger.warning(
                            "binary file format (%s) and format "
                            "supported by software (%s) disagree." %
                            (self.pcl_format_version, self.sw_format_version))
                        logger.warning("will recreate binary.")
                        self.createBinaryFile()
                        return self

                    logger.info("loading binary db file %s format version %s" %
                                (self.binfile, self.pcl_format_version))
                    self.hasFastLim = serializer.load(f)
                    self.expResultList = serializer.load(f)
                    t1 = time.time() - t0
                    logger.info ( "Loaded database from %s in %.1f secs." % \
                            ( self.binfile, t1 ) )
        except EOFError as e:
            os.unlink(self.binfile)
            if lastm_only:
                self.pcl_format_version = -1
                self.pcl_mtime = 0
                return self
            logger.error("%s is not a binary database file! recreate it!" %
                         self.binfile)
            self.createBinaryFile()
        return self
Пример #14
0
    def loadBinaryFile(self, lastm_only=False):
        """
        Load a binary database, returning last modified, file count, database.

        :param lastm_only: if true, the database itself is not read.
        :returns: database object, or None, if lastm_only == True.
        """
        if lastm_only and self.pcl_meta.mtime:
            ## doesnt need to load database, and mtime is already
            ## loaded
            return None

        if not os.path.exists(self.pcl_meta.pathname):
            return None

        try:
            with open(self.pcl_meta.pathname, "rb") as f:
                t0 = time.time()
                pclfilename = self.pcl_meta.pathname
                self.pcl_meta = serializer.load(f)
                self.pcl_meta.pathname = pclfilename
                if self.force_load == "pcl":
                    self.txt_meta = self.pcl_meta
                if not lastm_only:
                    if not self.force_load == "pcl" and self.pcl_meta.needsUpdate(
                            self.txt_meta):
                        logger.warning("Something changed in the environment."
                                       "Regenerating.")
                        self.createBinaryFile()
                        return self
                    logger.info(
                        "loading binary db file %s format version %s" %
                        (self.pcl_meta.pathname, self.pcl_meta.format_version))
                    if sys.version[0] == "2":
                        self.expResultList = serializer.load(f)
                    else:
                        self.expResultList = serializer.load(f,
                                                             encoding="latin1")
                    t1 = time.time() - t0
                    logger.info ( "Loaded database from %s in %.1f secs." % \
                            ( self.pcl_meta.pathname, t1 ) )
        except (EOFError, ValueError) as e:
            os.unlink(self.pcl_meta.pathname)
            if lastm_only:
                self.pcl_meta.format_version = -1
                self.pcl_meta.mtime = 0
                return self
            logger.error ( "%s is not readable (%s)." % \
                            ( self.pcl_meta.pathname, str(e) ) )
            if self.source in ["http", "ftp", "pcl"]:
                logger.error(
                    "source cannot be rebuilt. supply a different path to the database in your ini file."
                )
                sys.exit()
            self.createBinaryFile()
        # self.txt_meta = self.pcl_meta
        return self
Пример #15
0
 def makePlots(self,outFolder):
     """
     Uses the data in self.data_dict to produce the plots.
     
     :parameter outFolder: Path to the output folder.
     """
     
     if not os.path.isdir(outFolder):
         os.makedirs(outFolder)
     
     
     logger.info('Making plots...')
       
     data_frame_all = helpers.make_data_frame(self.data_dict)
  
     data_frame_all = helpers.fill_hover(data_frame_all,
                                         self.SModelS_hover_information,
                                         self.slha_hover_information,
                                         self.ctau_hover_information,
                                         self.BR_hover_information) 
  
     data_frame_excluded,data_frame_nonexcluded = helpers.data_frame_excluded_nonexcluded(data_frame_all) 
     x_axis,y_axis = helpers.get_xy_axis(self.variable_x,self.variable_y) 
     cont_plots,disc_plots = helpers.separate_cont_disc_plots(self.plot_list,self.data_dict) 
     
     plot_descriptions=helpers.plot_description()
  
     helpers.make_continuous_plots_all(cont_plots,x_axis,
                                                       y_axis,outFolder,data_frame_all,self.plot_data,
                                                       self.plot_title,self.variable_x,self.variable_y,plot_descriptions)
      
     helpers.make_continuous_plots_excluded(cont_plots,x_axis,
                                                            y_axis,outFolder,data_frame_excluded,self.plot_data,
                                                            self.plot_title,self.variable_x,self.variable_y,plot_descriptions)
      
     helpers.make_continuous_plots_nonexcluded(cont_plots,x_axis, y_axis,
                                               outFolder,data_frame_nonexcluded,
                                               self.plot_data, self.plot_title,self.variable_x,self.variable_y,plot_descriptions)
      
     helpers.make_discrete_plots_all(disc_plots,x_axis,y_axis,
                                     outFolder,data_frame_all,self.plot_data,
                                     self.plot_title,self.variable_x,self.variable_y,plot_descriptions)
      
     helpers.make_discrete_plots_excluded(disc_plots,x_axis,y_axis, outFolder,
                                          data_frame_excluded,self.plot_data,
                                          self.plot_title,self.variable_x,self.variable_y,plot_descriptions)
      
     helpers.make_discrete_plots_nonexcluded(disc_plots,x_axis,y_axis, outFolder,
                                             data_frame_nonexcluded,
                                             self.plot_data, self.plot_title,self.variable_x,self.variable_y,plot_descriptions)
     
     helpers.create_index_html(outFolder,self.plot_data,self.plot_title,self.plot_list,plot_descriptions)
     
     logger.info('Generation of interactive plots finished. Go to: \n %s/index.html \n to see the plots.' %outFolder)
     
     return True
Пример #16
0
    def fetchFromScratch ( self, path, store, discard_zeroes ):
        """ fetch database from scratch, together with
            description.
            :param store: filename to store json file.
        """
        def sizeof_fmt(num, suffix='B'):
            for unit in [ '','K','M','G','T','P' ]:
                if abs(num) < 1024.:
                    return "%3.1f%s%s" % (num, unit, suffix)
                num /= 1024.0
            return "%.1f%s%s" % (num, 'Yi', suffix)

        import requests
        try:
            r = requests.get( path )
        except Exception as e:
            logger.error ( "Exception when trying to fetch database: %s" % e )
            logger.error ( "Consider supplying a different database path in the ini file (possibly a local one)" )
            sys.exit()
        if r.status_code != 200:
            logger.error ( "Error %d: could not fetch %s from server." % \
                           ( r.status_code, path ) )
            sys.exit()
        ## its new so store the description
        with open( store, "w" ) as f:
            f.write ( r.text )
        if not "url" in r.json().keys():
            logger.error ( "cannot parse json file %s." % path )
            sys.exit()
        size = r.json()["size"]
        logger.info ( "need to fetch %s. size is %s." % \
                      ( r.json()["url"], sizeof_fmt ( size ) ) )
        t0=time.time()
        r2=requests.get ( r.json()["url"], stream=True )
        filename= "./" + r2.url.split("/")[-1]
        with open ( filename, "wb" ) as dump:
            if not self.inNotebook(): ## \r doesnt work in notebook
                print ( "         " + " "*51 + "<", end="\r" )
            print ( "loading >", end="" )
            for x in r2.iter_content(chunk_size=int ( size / 50 ) ):
                dump.write ( x )
                dump.flush ()
                print ( ".", end="" )
                sys.stdout.flush()
            if self.inNotebook():
                print ( "done." )
            else:
                print( "" )
            dump.close()
        logger.info ( "fetched %s in %d secs." % ( r2.url, time.time()-t0 ) )
        logger.debug ( "store as %s" % filename )
        #with open( filename, "wb" ) as f:
        #    f.write ( r2.content )
        #    f.close()
        self.force_load = "pcl"
        return ( "./", "%s" % filename )
Пример #17
0
def _getDictionariesFromSLHA(slhafile):
    """
    Create mass and BR dictionaries from an SLHA file.
    Ignore decay blocks with R-parity violating or unknown decays

    """

    from smodels.particlesLoader import rEven, rOdd

    res = pyslha.readSLHAFile(slhafile)

   
    # Get mass and branching ratios for all particles
    brDic = {}
    writeIgnoreMessage(res.decays.keys(), rEven, rOdd)

    for pid in res.decays.keys():
        if not pid in rOdd:
            continue
        brs = []
        for decay in res.decays[pid].decays:
            nEven = nOdd = 0.
            for pidd in decay.ids:
                if pidd in rOdd: nOdd += 1
                elif pidd in rEven: nEven += 1
                else:
                    logger.warning("Particle %i not defined in particles.py,decay %i -> [%s] will be ignored" %(pidd,pid,decay.ids))
                    break
            if nOdd + nEven == len(decay.ids) and nOdd == 1:
                brs.append(decay)
            else:
                logger.info("Ignoring decay: %i -> [%s]",pid,decay.ids)

        brsConj = copy.deepcopy(brs)
        for br in brsConj:
            br.ids = [-x for x in br.ids]
        brDic[pid] = brs
        brDic[-pid] = brsConj
    # Get mass list for all particles
    massDic = dict(res.blocks['MASS'].items())
    for pid in list ( massDic.keys() )[:]:
        massDic[pid] = round(abs(massDic[pid]),1)*GeV
        if not -pid in massDic: massDic[-pid] = massDic[pid] 

    #Include proxy for displaced decays
    if 0 in massDic or 0 in brDic:
        logger.error("PDG = 0 is reserved for displaced decays and it can not be used for other particles. Please redefine the input model PDG assignments.")
        raise SModelSError()
    else:
        dispPid = 0
        massDic[dispPid] = 0. * GeV
        dispDec = pyslha.Decay(br=1., ids=[], nda=0)
        brDic[dispPid] = [dispDec]
   
 
    return brDic, massDic
Пример #18
0
def _cleanList ( fileList, inDir ):
    """ clean up list of files """
    cleanedList = []
    for f in fileList:
        tmp = os.path.join(inDir, f )
        if not os.path.isfile ( tmp ):
            logger.info ( "%s does not exist or is not a file. Skipping it." % tmp )
            continue
        cleanedList.append( tmp )
    return cleanedList
 def compile(self):
     """
     Compile pythia_lhe.
     
     """
     logger.info("Trying to compile pythia in %s" % self.srcPath)
     cmd = "cd %s; make" % self.srcPath
     outputMessage = executor.getoutput(cmd)
     #outputMessage = subprocess.check_output ( cmd, shell=True,
     #                                          universal_newlines=True )
     logger.info(outputMessage)
 def compile(self):
     """
     Try to compile nllfast.
     
     """
     logger.info("Trying to compile %s", self.name)
     cmd = "cd %s; make" % self.srcPath
     out = executor.getoutput(cmd)
     # out = subprocess.check_output ( cmd, shell=True, universal_newlines=True )
     logger.info(out)
     return True
Пример #21
0
 def checkBinaryFile(self):
     nu = self.needsUpdate()
     logger.debug("Checking binary db file.")
     logger.debug ( "Binary file dates to %s(%d)" % \
                   ( time.ctime(self.pcl_mtime[0]),self.pcl_mtime[1] ) )
     logger.debug ( "Database dates to %s(%d)" % \
                   ( time.ctime(self.txt_mtime[0]),self.txt_mtime[1] ) )
     if nu:
         logger.info("Binary db file needs an update.")
     else:
         logger.info("Binary db file does not need an update.")
     return nu
Пример #22
0
 def compile(self):
     """
     Tries to compile and install tools that are not yet marked
     as 'installed'.
     """
     for (name, instance) in self.tools.items():
         installOk = instance.checkInstallation()
         if installOk == True:
             continue
         logger.info("Installation of " + str(name) + " not correct. \
                     Trying to compile.")
         instance.compile()
Пример #23
0
 def checkBinaryFile ( self ):
     nu=self.needsUpdate()
     logger.debug ( "Checking binary db file." )
     logger.debug ( "Binary file dates to %s(%d)" % \
                   ( time.ctime(self.pcl_meta.mtime),self.pcl_meta.filecount ) )
     logger.debug ( "Database dates to %s(%d)" % \
                   ( time.ctime(self.txt_meta.mtime),self.txt_meta.filecount ) )
     if nu:
         logger.info ( "Binary db file needs an update." )
     else:
         logger.info ( "Binary db file does not need an update." )
     return nu
Пример #24
0
 def checkNCPUs(self, ncpus, inputFiles):
     if ncpus < -1 or ncpus == 0:
         logger.error("Weird number of CPUs given: %d" % ncpus)
         sys.exit()
     if ncpus == -1:
         ncpus = runtime.nCPUs()
     ncpus = min(len(inputFiles), ncpus)
     if ncpus == 1:
         logger.info("We run on a single cpu")
     else:
         logger.info("We run on %d cpus" % ncpus)
     return ncpus
def _getDictionariesFromSLHA(slhafile):
    """
    Create mass and BR dictionaries from an SLHA file.
    Ignore decay blocks with R-parity violating or unknown decays

    """

    res = pyslha.readSLHAFile(slhafile)

    rOdd = particleD.rOdd.keys()
    rEven = particleD.rEven.keys()

    # Get mass and branching ratios for all particles
    brDic = {}
    for pid in res.decays.keys():
        if not pid in rEven + rOdd:
            logger.warning(
                "Particle %i not defined in particles.py, its decays will be ignored"
                % (pid))
            continue
        if pid in rEven:
            logger.info("Ignoring %s decays", particleD.rEven[pid])
            continue
        brs = []
        for decay in res.decays[pid].decays:
            nEven = nOdd = 0.
            for pidd in decay.ids:
                if pidd in rOdd: nOdd += 1
                elif pidd in rEven: nEven += 1
                else:
                    logger.warning(
                        "Particle %i not defined in particles.py,decay %i -> [%s] will be ignored"
                        % (pidd, pid, decay.ids))
                    break
            if nOdd + nEven == len(decay.ids) and nOdd == 1:
                brs.append(decay)
            else:
                logger.info("Ignoring decay: %i -> [%s]", pid, decay.ids)

        brsConj = copy.deepcopy(brs)
        for br in brsConj:
            br.ids = [-x for x in br.ids]
        brDic[pid] = brs
        brDic[-pid] = brsConj
    # Get mass list for all particles
    massDic = dict(res.blocks['MASS'].items())
    for pid in list(massDic.keys())[:]:
        massDic[pid] *= GeV
        massDic[pid] = abs(massDic[pid])
        if not -pid in massDic: massDic[-pid] = massDic[pid]

    return brDic, massDic
Пример #26
0
    def loadBinaryFile ( self, lastm_only = False ):
        """
        Load a binary database, returning last modified, file count, database.

        :param lastm_only: if true, the database itself is not read.
        :returns: database object, or None, if lastm_only == True.
        """
        if lastm_only and self.pcl_meta.mtime:
            ## doesnt need to load database, and mtime is already
            ## loaded
            return None

        if not os.path.exists ( self.pcl_meta.pathname ):
            return None

        try:
            with open ( self.pcl_meta.pathname, "rb" ) as f:
                t0=time.time()
                pclfilename = self.pcl_meta.pathname
                self.pcl_meta = serializer.load ( f )
                self.pcl_meta.pathname = pclfilename
                if self.force_load == "pcl":
                    self.txt_meta = self.pcl_meta
                if not lastm_only:
                    if not self.force_load == "pcl" and self.pcl_meta.needsUpdate ( self.txt_meta ):
                        logger.warning ( "Something changed in the environment."
                                         "Regenerating." )
                        self.createBinaryFile()
                        return self
                    logger.info ( "loading binary db file %s format version %s" %
                            ( self.pcl_meta.pathname, self.pcl_meta.format_version ) )
                    if sys.version[0]=="2":
                        self.expResultList = serializer.load ( f )
                    else:
                        self.expResultList = serializer.load ( f, encoding="latin1" )
                    t1=time.time()-t0
                    logger.info ( "Loaded database from %s in %.1f secs." % \
                            ( self.pcl_meta.pathname, t1 ) )
        except (EOFError,ValueError) as e:
            os.unlink ( self.pcl_meta.pathname )
            if lastm_only:
                self.pcl_meta.format_version = -1
                self.pcl_meta.mtime = 0
                return self
            logger.error ( "%s is not readable (%s)." % \
                            ( self.pcl_meta.pathname, str(e) ) )
            if self.source in [ "http", "ftp", "pcl" ]:
                logger.error ( "source cannot be rebuilt. supply a different path to the database in your ini file." )
                sys.exit()
            self.createBinaryFile()
        # self.txt_meta = self.pcl_meta
        return self
Пример #27
0
def main(args):
    setLogLevel(args.verbosity)
    if args.query:
        return queryCrossSections(args.filename)
    sqrtses = getSqrtses(args)
    order = getOrder(args)
    checkAllowedSqrtses(order, sqrtses)
    inputFiles = getInputFiles(args)
    ncpus = args.ncpus

    if hasattr(args, 'pythiacard'):
        pythiacard = args.pythiacard
    else:
        pythiacard = None
    if ncpus < -1 or ncpus == 0:
        logger.error("Weird number of CPUs given: %d" % ncpus)
        sys.exit()
    if ncpus == -1:
        ncpus = runtime.nCPUs()
    ncpus = min(len(inputFiles), ncpus)
    if ncpus == 1:
        logger.info("We run on a single cpu")
    else:
        logger.info("We run on %d cpus" % ncpus)
    children = []
    for i in range(ncpus):
        pid = os.fork()
        chunk = inputFiles[i::ncpus]
        if pid < 0:
            logger.error("fork did not succeed! Pid=%d" % pid)
            sys.exit()
        if pid == 0:
            logger.debug("chunk #%d: pid %d (parent %d)." %
                         (i, os.getpid(), os.getppid()))
            logger.debug(" `-> %s" % " ".join(chunk))
            computeForBunch(sqrtses,
                            order,
                            args.nevents,
                            chunk,
                            not args.keep,
                            args.LOfromSLHA,
                            args.tofile,
                            pythiacard=pythiacard)
            os._exit(0)
        if pid > 0:
            children.append(pid)
    for child in children:
        r = os.waitpid(child, 0)
        logger.debug("child %d terminated: %s" % (child, r))
    logger.debug("all children terminated.")
Пример #28
0
 def compile(self):
     """
     Try to compile the tool.
     """
     logger.debug("Trying to compile %s", self.name)
     cmd = "cd %s; make" % self.srcPath
     out = executor.getoutput(cmd)
     # out = subprocess.check_output ( cmd, shell=True, universal_newlines=True )
     logger.debug(out)
     if not os.path.exists ( self.executablePath ):
         logger.error ( "Compilation of %s failed. Is the %s compiler installed?" % ( self.name, self.compiler ) )
         sys.exit()
     logger.info ( "Compilation of %s succeeded!" % ( self.name ) )
     return True
Пример #29
0
    def getSRUpperLimit(self,
                        alpha=0.05,
                        expected=False,
                        compute=False,
                        deltas_rel=0.2):
        """
        Computes the 95% upper limit on the signal*efficiency for a given dataset (signal region).
        Only to be used for efficiency map type results.

        :param alpha: Can be used to change the C.L. value. The default value is 0.05 (= 95% C.L.)
        :param expected: Compute expected limit ( i.e. Nobserved = NexpectedBG )
        :param deltas_rel: relative uncertainty in signal (float). Default value is 20%.        
        :param compute: If True, the upper limit will be computed
                        from expected and observed number of events. If False, the value listed
                        in the database will be used instead.
                        

        :return: upper limit value
        """

        if not self.getType() == 'efficiencyMap':
            logger.error(
                "getSRUpperLimit can only be used for efficiency map results!")
            raise SModelSError()

        if not compute:
            if expected:
                try:
                    return self.dataInfo.expectedUpperLimit
                except AttributeError:
                    logger.info(
                        "expectedUpperLimit field not found. Using observed UL instead."
                    )
                    return self.dataInfo.upperLimit
            else:
                return self.dataInfo.upperLimit

        Nobs = self.dataInfo.observedN  #Number of observed events
        if expected:
            Nobs = self.dataInfo.expectedBG
        Nexp = self.dataInfo.expectedBG  #Number of expected BG events
        bgError = self.dataInfo.bgError  # error on BG

        m = Data(Nobs, Nexp, bgError, detlas_rel=deltas_rel)
        computer = UpperLimitComputer(cl=1. - alpha)
        maxSignalXsec = computer.ulSigma(m)
        maxSignalXsec = maxSignalXsec / self.globalInfo.lumi

        return maxSignalXsec
Пример #30
0
 def computeForOneFile(self,
                       sqrtses,
                       inputFile,
                       unlink,
                       lOfromSLHA,
                       tofile,
                       pythiacard=None):
     """
     compute the cross sections for one file.
     :param sqrtses: list of sqrt{s} tu run pythia, as a unum (e.g. 7*TeV)
         
     """
     if tofile:
         logger.info("Computing SLHA cross section from %s, adding to "
                     "SLHA file." % inputFile)
         complain = True  ## dont complain about already existing xsecs,
         # if we were the ones writing them
         for s in sqrtses:
             ss = s * TeV
             self.compute(ss,
                          inputFile,
                          unlink=unlink,
                          loFromSlha=lOfromSLHA,
                          pythiacard=pythiacard)
             if tofile == "all":
                 comment = str(self.nevents)+" evts, pythia%d [pb]"%\
                                           self.pythiaVersion
                 self.addXSecToFile(self.loXsecs, inputFile, comment,
                                    complain)
                 complain = False
             comment = str(self.nevents)+" events, [pb], pythia%d for LO"%\
                                           self.pythiaVersion
             self.addXSecToFile(self.xsecs, inputFile, comment, complain)
             complain = False
     else:
         logger.info("Computing SLHA cross section from %s." % inputFile)
         print()
         print("     Cross sections:")
         print("=======================")
         for s in sqrtses:
             ss = s * TeV
             self.compute(ss,
                          inputFile,
                          unlink=unlink,
                          loFromSlha=lOfromSLHA)
             for xsec in self.xsecs:
                 print( "%s %20s:  %.3e pb" % \
                         ( xsec.info.label,xsec.pid,xsec.value/pb ) )
         print()
Пример #31
0
 def testT1(self):
     from smodels.tools.smodelsLogging import logger
     logger.info("T1")
     """ test with the T1 slha input file """
     slhafile="./testFiles/slha/simplyGluino.slha"
     topos = slhaDecomposer.decompose ( slhafile, .1*fb, False, False, 5.*GeV )
     for topo in topos:
         for element in topo.elementList:
             masses=element.getMasses()
             # print "e=",element,"masses=",masses
             mgluino=masses[0][0]
             mLSP=masses[0][1]
             self.assertEqual( str(element), "[[[q,q]],[[q,q]]]" )
             self.assertEqual( int ( mgluino / GeV ), 675 )
             self.assertEqual( int ( mLSP / GeV ), 200 )
Пример #32
0
    def addXSecToFile(self, xsecs, slhafile, comment=None, complain=True):
        """
        Write cross sections to an SLHA file.

        :param xsecs: a XSectionList object containing the cross sections
        :param slhafile: target file for writing the cross sections in SLHA format
        :param comment: optional comment to be added to each cross section block
        :param complain: complain if there are already cross sections in file

        """

        if not os.path.isfile(slhafile):
            line = f"SLHA file {slhafile} not found."
            logger.error(line)
            raise SModelSError(line)
        if len(xsecs) == 0:
            self.countNoXSecs += 1
            if self.countNoXSecs < 3:
                logger.warning("No cross sections available.")
            if self.countNoXSecs == 3:
                logger.warning(
                    "No cross sections available (will quench such warnings in future)."
                )
            return False
        # Check if file already contain cross section blocks
        xSectionList = crossSection.getXsecFromSLHAFile(slhafile)
        if xSectionList and complain:
            logger.info("SLHA file already contains XSECTION blocks. Adding "
                        "only missing cross sections.")

        # Write cross sections to file, if they do not overlap any cross section in
        # the file
        outfile = open(slhafile, 'a')
        nxsecs = 0
        for xsec in xsecs:
            writeXsec = True
            for oldxsec in xSectionList:
                if oldxsec.info == xsec.info and set(oldxsec.pid) == set(
                        xsec.pid):
                    writeXsec = False
                    break
            if writeXsec:
                nxsecs += 1
                outfile.write(
                    self.xsecToBlock(xsec, (2212, 2212), comment) + "\n")
        outfile.close()

        return nxsecs
Пример #33
0
    def getEfficiencyFor(self, element):
        """
        For upper limit results, checks if the input element falls inside the
        upper limit grid and has a non-zero reweigthing factor.
        If it does, returns efficiency = 1, else returns
        efficiency = 0.  For efficiency map results, returns the
        signal efficiency including the lifetime reweighting.
        If a mass array is given as input, no lifetime reweighting will be applied.

        :param element: Element object or mass array with units.
        :return: efficiency (float)
        """

        if self.txnameData.dataType == 'efficiencyMap':
            if hasattr(self, "dbClient"):
                query = self.getQueryStringForElement(element)
                logger.info ( "sending em query %s to %s:%d" % \
                              ( query, self.dbClient.servername, self.dbClient.port ) )
                #print ( "query will be", query )
                #return 0.001
                eff = self.dbClient.query(query)
            else:
                eff = self.txnameData.getValueFor(element)

            if not eff or math.isnan(eff):
                eff = 0.  #Element is outside the grid or has zero efficiency
        elif self.txnameData.dataType == 'upperLimit':
            if hasattr(self, "dbClient"):
                query = self.getQueryStringForElement(element)
                logger.info ( "sending query %s to %s:%d" % \
                              ( query, self.dbClient.servername, self.dbClient.port ) )
                #print ( "query will be", query )
                #return 0.001
                ul = self.dbClient.query(query)
            else:
                ul = self.txnameData.getValueFor(element)
            if isinstance(element, Element):
                element._upperLimit = ul  #Store the upper limit for convenience
            if ul is None:
                eff = 0.  #Element is outside the grid or the decays do not correspond to the txname
            else:
                eff = 1.
        else:
            logger.error("Unknown txnameData type: %s" %
                         self.txnameData.dataType)
            raise SModelSError()

        return eff
Пример #34
0
 def compile(self):
     """
     Try to compile the tool.
     """
     logger.debug("Trying to compile %s", self.name)
     cmd = "cd %s; make" % self.srcPath
     out = executor.getoutput(cmd)
     # out = subprocess.check_output ( cmd, shell=True, universal_newlines=True )
     logger.debug(out)
     if not os.path.exists(self.executablePath):
         logger.error(
             "Compilation of %s failed. Is the %s compiler installed?" %
             (self.name, self.compiler))
         sys.exit()
     logger.info("Compilation of %s succeeded!" % (self.name))
     return True
Пример #35
0
 def testT1(self):
     from smodels.tools.smodelsLogging import logger
     logger.info("T1")
     """ test with the T1 slha input file """
     slhafile = "./testFiles/slha/simplyGluino.slha"
     topos = slhaDecomposer.decompose(slhafile, .1 * fb, False, False,
                                      5. * GeV)
     for topo in topos:
         for element in topo.elementList:
             masses = element.getMasses()
             # print "e=",element,"masses=",masses
             mgluino = masses[0][0]
             mLSP = masses[0][1]
             self.assertEqual(str(element), "[[[q,q]],[[q,q]]]")
             self.assertEqual(int(mgluino / GeV), 675)
             self.assertEqual(int(mLSP / GeV), 200)
Пример #36
0
 def loadDatabase ( self ):
     """ if no binary file is available, then
         load the database and create the binary file.
         if binary file is available, then check if
         it needs update, create new binary file, in
         case it does need an update.
     """
     if not os.path.exists ( self.pcl_meta.pathname ):
         logger.info ( "Creating binary database " )
         logger.info ( "(this may take a few minutes, but it's done only once!)" )
         self.loadTextDatabase()
         self.createBinaryFile()
     else:
         if self.needsUpdate():
             self.createBinaryFile()
         else:
             self.loadBinaryFile( lastm_only = False )
Пример #37
0
    def getSRUpperLimit(self, alpha=0.05, expected=False, compute=False):
        """
        Computes the 95% upper limit on the signal*efficiency for a given dataset (signal region).
        Only to be used for efficiency map type results.

        :param alpha: Can be used to change the C.L. value. The default value is 0.05 (= 95% C.L.)
        :param expected: Compute expected limit ( i.e. Nobserved = NexpectedBG )
        :param compute: If True, the upper limit will be computed
                        from expected and observed number of events. If False, the value listed
                        in the database will be used instead.

        :return: upper limit value
        """

        if not self.dataInfo.dataType == 'efficiencyMap':
            logger.error(
                "getSRUpperLimit can only be used for efficiency map results!")
            raise SModelSError()

        if not compute:
            if expected:
                try:
                    return self.dataInfo.expectedUpperLimit
                except AttributeError:
                    logger.info(
                        "expectedUpperLimit field not found. Using observed UL instead."
                    )
                    return self.dataInfo.upperLimit
            else:
                return self.dataInfo.upperLimit

        Nobs = self.dataInfo.observedN  #Number of observed events
        if expected:
            Nobs = self.dataInfo.expectedBG
        Nexp = self.dataInfo.expectedBG  #Number of expected BG events
        bgError = self.dataInfo.bgError  # error on BG
        lumi = self.globalInfo.lumi
        if (lumi * fb).normalize()._unit:
            ID = self.globalInfo.id
            logger.error("Luminosity defined with wrong units for %s" % (ID))
            return False

        maxSignalXsec = statistics.upperLimit(Nobs, Nexp, bgError, lumi, alpha)

        return maxSignalXsec
Пример #38
0
 def loadDatabase(self):
     """ if no binary file is available, then
         load the database and create the binary file.
         if binary file is available, then check if
         it needs update, create new binary file, in
         case it does need an update.
     """
     if not os.path.exists(self.pcl_meta.pathname):
         logger.info("Creating binary database ")
         logger.info(
             "(this may take a few minutes, but it's done only once!)")
         self.loadTextDatabase()
         self.createBinaryFile()
     else:
         if self.needsUpdate():
             self.createBinaryFile()
         else:
             self.loadBinaryFile(lastm_only=False)
Пример #39
0
def loadDatabaseResults(parser, database):
    """
    Load database entries specified in parser
    
    :parameter parser: ConfigParser, containing analysis and txnames selection
    :parameter database: Database object
    :returns: List of experimental results
        
    """
    """ In case that a list of analyses or txnames are given, retrieve list """
    tmp = parser.get("database", "analyses").split(",")
    analyses = [x.strip() for x in tmp]
    tmp_tx = parser.get("database", "txnames").split(",")
    txnames = [x.strip() for x in tmp_tx]
    if parser.get("database", "dataselector") == "efficiencyMap":
        dataTypes = ['efficiencyMap']
        datasetIDs = ['all']
    elif parser.get("database", "dataselector") == "upperLimit":
        dataTypes = ['upperLimit']
        datasetIDs = ['all']
    else:
        dataTypes = ['all']
        tmp_dIDs = parser.get("database", "dataselector").split(",")
        datasetIDs = [x.strip() for x in tmp_dIDs]

    useSuperseded = False
    useNonValidated = False
    if parser.has_option("database", "useSuperseded"):
        useSuperseded = parser.getboolean("database", "usesuperseded")
    if parser.has_option("database", "useNonValidated"):
        useNonValidated = parser.getboolean("database", "usenonvalidated")
    if useSuperseded:
        logger.info('Including superseded results')
    if useNonValidated:
        logger.info('Including non-validated results')
    """ Load analyses """

    ret = database.getExpResults(analysisIDs=analyses,
                                 txnames=txnames,
                                 datasetIDs=datasetIDs,
                                 dataTypes=dataTypes,
                                 useSuperseded=useSuperseded,
                                 useNonValidated=useNonValidated)
    return ret
Пример #40
0
    def checkConsistency(self):
        """
        Check if the all the elements in elementList are
        consistent with the topology (same number of vertices and final states)
        
        :returns: True if all the elements are consistent. Print error message
                  and exits otherwise.
        """

        for element in self.elementList:
            info = element.getEinfo()
            if self.vertnumb != info["vertnumb"]:
                logger.error("Inconsistent topology.")
                raise SModelSError()
            if self.vertparts != info["vertparts"]:
                logger.error("Inconsistent topology.")
                raise SModelSError()
        logger.info("Consistent topology.")
        return True
Пример #41
0
    def getSRUpperLimit(self,alpha = 0.05, expected = False, compute = False, deltas_rel=0.2):
        """
        Computes the 95% upper limit on the signal*efficiency for a given dataset (signal region).
        Only to be used for efficiency map type results.

        :param alpha: Can be used to change the C.L. value. The default value is 0.05 (= 95% C.L.)
        :param expected: Compute expected limit ( i.e. Nobserved = NexpectedBG )
        :param deltas_rel: relative uncertainty in signal (float). Default value is 20%.        
        :param compute: If True, the upper limit will be computed
                        from expected and observed number of events. If False, the value listed
                        in the database will be used instead.
                        

        :return: upper limit value
        """

        if not self.getType() == 'efficiencyMap':
            logger.error("getSRUpperLimit can only be used for efficiency map results!")
            raise SModelSError()

        if not compute:
            if expected:
                try:
                    return self.dataInfo.expectedUpperLimit
                except AttributeError:
                    logger.info("expectedUpperLimit field not found. Using observed UL instead.")
                    return self.dataInfo.upperLimit
            else:
                return self.dataInfo.upperLimit

        Nobs = self.dataInfo.observedN  #Number of observed events
        if expected:
            Nobs = self.dataInfo.expectedBG
        Nexp = self.dataInfo.expectedBG  #Number of expected BG events
        bgError = self.dataInfo.bgError # error on BG        

        m = Data(Nobs,Nexp,bgError,detlas_rel=deltas_rel)
        computer = UpperLimitComputer(cl=1.-alpha )
        maxSignalXsec = computer.ulSigma(m)
        maxSignalXsec = maxSignalXsec/self.globalInfo.lumi

        return maxSignalXsec
 def fetch(self):
     """
     Fetch and unpack tarball.
     
     """
     import urllib, tarfile
     tempfile = "/tmp/nllfast7.tar.gz"
     f = open(tempfile, "w")
     url = "http://smodels.hephy.at/externaltools/nllfast%d.tar.gz" \
             % self.sqrts
     logger.info("fetching tarball from " + url)
     R = urllib.urlopen(url)
     l = R.readlines()
     for line in l:
         f.write(line)
     R.close()
     f.close()
     tar = tarfile.open(tempfile)
     for item in tar:
         tar.extract(item, self.srcPath + "/")
Пример #43
0
    def checkPathName( self, path, discard_zeroes ):
        """
        checks the path name,
        returns the base directory and the pickle file name.
        If path starts with http or ftp, fetch the description file
        and the database.
        returns the base directory and the pickle file name
        """
        logger.debug('Try to set the path for the database to: %s', path)
        if path.startswith( ( "http://", "https://", "ftp://" ) ):
            return self.fetchFromServer ( path, discard_zeroes )
        if path.startswith( ( "file://" ) ):
            path=path[7:]

        tmp = os.path.realpath(path)
        if os.path.isfile ( tmp ):
            base = os.path.dirname ( tmp )
            return ( base, tmp )

        if tmp[-4:]==".pcl":
            self.source="pcl"
            if not os.path.exists ( tmp ):
                if self.force_load == "pcl":
                    logger.error ( "File not found: %s" % tmp )
                    sys.exit()
                logger.info ( "File not found: %s. Will generate." % tmp )
                base = os.path.dirname ( tmp )
                return ( base, tmp )
            logger.error ( "Supplied a pcl filename, but %s is not a file." % tmp )
            sys.exit()

        path = tmp + '/'
        if not os.path.exists(path):
            logger.error('%s is no valid path!' % path)
            raise DatabaseNotFoundException("Database not found")
        m=Meta ( path, discard_zeroes = discard_zeroes )
        self.source="txt"
        return ( path, path + m.getPickleFileName() )
Пример #44
0
    def createBinaryFile(self, filename=None):
        """ create a pcl file from the text database,
            potentially overwriting an old pcl file. """
        if self.txt_meta == None:
            logger.error ( "Trying to create database pickle, but no txt_meta defined." )
            sys.exit()
        logger.debug ( "database timestamp: %s, filecount: %d" % \
                     ( time.ctime ( self.txt_meta.mtime ), self.txt_meta.filecount ) )
        binfile = filename
        if binfile == None:
            binfile = self.pcl_meta.pathname
        logger.debug (  " * create %s" % binfile )
        with open ( binfile, "wb" ) as f:
            logger.debug (  " * load text database" )
            self.loadTextDatabase()
            logger.debug (  " * write %s db version %s, format version %s, %s" % \
                    ( binfile, self.txt_meta.databaseVersion,
                      self.txt_meta.format_version, self.txt_meta.cTime() ) )
            ptcl = serializer.HIGHEST_PROTOCOL
#             ptcl = 2
            serializer.dump(self.txt_meta, f, protocol=ptcl)
            serializer.dump(self.expResultList, f, protocol=ptcl)
            logger.info (  "%s created." % ( binfile ) )
Пример #45
0
    def loadData(self,npoints=-1):
        """
        Reads the data from the smodels and SLHA folders.
        If npoints > 0, it will limit the number of points in the plot to npoints.
        
        :parameter npoints: Number of points to be plotted (int). If < 0, all points will be used.
        """
        logger.info("Reading data folders %s and %s ..." %(self.smodelsFolder,self.slhaFolder))
        
        n = 0
        for f in glob.glob(self.smodelsFolder+'/*'):
            
            if npoints > 0 and n >= npoints:
                break
            
            smodelsOutput = helpers.import_python_output(f)
            if not smodelsOutput:
                continue
            
            #Get SLHA file name:
            slhaFile = helpers.get_slha_file(smodelsOutput)
            slhaFile = os.path.join(self.slhaFolder,os.path.basename(slhaFile))
            #Get SLHA data:
            slhaData = helpers.get_slha_data(slhaFile)
            if not slhaData:
                continue
            
            #Data read successfully
            self.data_dict['file'].append(f.split('/')[-1])
            outputStatus = helpers.output_status(smodelsOutput)
            if outputStatus == -1:
                self.fillWith(None,slhaData)
            else:
                self.fillWith(smodelsOutput,slhaData)
            n += 1

        return True
Пример #46
0
def decompose(slhafile, sigcut=.1 * fb, doCompress=False, doInvisible=False,
              minmassgap=-1.*GeV, useXSecs=None):
    """
    Perform SLHA-based decomposition.

    :param slhafile: the slha input file. May be an URL (though http, ftp only).
    :param sigcut: minimum sigma*BR to be generated, by default sigcut = 0.1 fb
    :param doCompress: turn mass compression on/off
    :param doInvisible: turn invisible compression on/off
    :param minmassgap: maximum value (in GeV) for considering two R-odd particles
                       degenerate (only revelant for doCompress=True )
    :param useXSecs: optionally a dictionary with cross sections for pair
                 production, by default reading the cross sections
                 from the SLHA file.
    :returns: list of topologies (TopologyList object)

    """
    if slhafile.startswith("http") or slhafile.startswith("ftp"):
        logger.info ( "asked for remote slhafile %s. will fetch it." % slhafile )
        import requests
        import os.path
        r=requests.get(slhafile)
        if r.status_code != 200:
            logger.error ( "could not retrieve remote file %d: %s" % ( r.status_code, r.reason ) )
            raise SModelSError()
        basename = os.path.basename ( slhafile )
        f=open ( basename, "w" )
        f.write ( r.text )
        f.close()
        slhafile = basename
    t1 = time.time()

    if doCompress and minmassgap / GeV < 0.:
        logger.error("Asked for compression without specifying minmassgap. Please set minmassgap.")        
        raise SModelSError()

    if type(sigcut) == type(1.):
        sigcut = sigcut * fb

    try:
        f=pyslha.readSLHAFile ( slhafile )
    except pyslha.ParseError as e:
        logger.error ( "The file %s cannot be parsed as an SLHA file: %s" % (slhafile, e) )
        raise SModelSError()

    # Get cross section from file
    xSectionList = crossSection.getXsecFromSLHAFile(slhafile, useXSecs)
    # Get BRs and masses from file
    brDic, massDic = _getDictionariesFromSLHA(slhafile)
    # Only use the highest order cross sections for each process
    xSectionList.removeLowerOrder()
    # Order xsections by PDGs to improve performance
    xSectionList.order()
    #Reweight decays by fraction of prompt decays and add fraction of long-lived
    brDic = _getPromptDecays(slhafile,brDic)

    # Get maximum cross sections (weights) for single particles (irrespective
    # of sqrtS)
    maxWeight = {}
    for pid in xSectionList.getPIDs():
        maxWeight[pid] = xSectionList.getXsecsFor(pid).getMaxXsec()    

    # Generate dictionary, where keys are the PIDs and values 
    # are the list of cross sections for the PID pair (for performance)
    xSectionListDict = {}    
    for pids in xSectionList.getPIDpairs():
        xSectionListDict[pids] = xSectionList.getXsecsFor(pids)

    # Create 1-particle branches with all possible mothers
    branchList = []
    for pid in maxWeight:
        branchList.append(Branch())
        branchList[-1].PIDs = [[pid]]
        if not pid in massDic:
            logger.error ( "pid %d does not appear in masses dictionary %s in slhafile %s" % 
                    ( pid, massDic, slhafile ) )
        branchList[-1].masses = [massDic[pid]]
        branchList[-1].maxWeight = maxWeight[pid]

    # Generate final branches (after all R-odd particles have decayed)
    finalBranchList = decayBranches(branchList, brDic, massDic, sigcut)
    # Generate dictionary, where keys are the PIDs and values are the list of branches for the PID (for performance)
    branchListDict = {}
    for branch in finalBranchList:
        if len(branch.PIDs) != 1:
            logger.error("During decomposition the branches should \
                            not have multiple PID lists!")
            return False   
        if branch.PIDs[0][0] in branchListDict:
            branchListDict[branch.PIDs[0][0]].append(branch)
        else:
            branchListDict[branch.PIDs[0][0]] = [branch]
    for pid in xSectionList.getPIDs():
        if not pid in branchListDict: branchListDict[pid] = []

    #Sort the branch lists by max weight to improve performance:
    for pid in branchListDict:
        branchListDict[pid] = sorted(branchListDict[pid], 
                                     key=lambda br: br.maxWeight, reverse=True)
    
    smsTopList = topology.TopologyList()
    # Combine pairs of branches into elements according to production
    # cross section list
    for pids in xSectionList.getPIDpairs():
        weightList = xSectionListDict[pids]
        minBR = (sigcut/weightList.getMaxXsec()).asNumber()
        if minBR > 1.: continue
        for branch1 in branchListDict[pids[0]]:
            BR1 = branch1.maxWeight/maxWeight[pids[0]]  #Branching ratio for first branch            
            if BR1 < minBR: break  #Stop loop if BR1 is already too low            
            for branch2 in branchListDict[pids[1]]:
                BR2 = branch2.maxWeight/maxWeight[pids[1]]  #Branching ratio for second branch
                if BR2 < minBR: break  #Stop loop if BR2 is already too low
                
                finalBR = BR1*BR2                
                if type(finalBR) == type(1.*fb):
                    finalBR = finalBR.asNumber()
                if finalBR < minBR: continue # Skip elements with xsec below sigcut

                if len(branch1.PIDs) != 1 or len(branch2.PIDs) != 1:
                    logger.error("During decomposition the branches should \
                            not have multiple PID lists!")
                    return False    

                newElement = element.Element([branch1, branch2])
                newElement.weight = weightList*finalBR
                newElement.sortBranches()  #Make sure elements are sorted BEFORE adding them
                smsTopList.addElement(newElement)
    
    smsTopList.compressElements(doCompress, doInvisible, minmassgap)
    smsTopList._setElementIds()

    logger.debug("slhaDecomposer done in %.2f s." % (time.time() -t1 ) )
    return smsTopList
Пример #47
0
    def __init__(self, path, globalObj, infoObj):
        self.path = path
        self.globalInfo = globalObj
        self._infoObj = infoObj
        self.txnameData = None
        self.txnameDataExp = None ## expected Data
        self._topologyList = TopologyList()

        logger.debug('Creating object based on txname file: %s' %self.path)
        #Open the info file and get the information:
        if not os.path.isfile(path):
            logger.error("Txname file %s not found" % path)
            raise SModelSError()
        txtFile = open(path,'r')
        txdata = txtFile.read()
        txtFile.close()
        if not "txName" in txdata: raise TypeError
        if not 'upperLimits' in txdata and not 'efficiencyMap' in txdata:
            raise TypeError
        content = concatenateLines(txdata.split("\n"))

        #Get tags in info file:
        tags = [line.split(':', 1)[0].strip() for line in content]
        data = None
        expectedData = None
        dataType = None
        for i,tag in enumerate(tags):
            if not tag: continue
            line = content[i]
            value = line.split(':',1)[1].strip()
            if tags.count(tag) != 1:
                logger.info("Duplicated field %s found in file %s" \
                             % (tag, self.path))
            if ';' in value: value = value.split(';')
            if tag == 'upperLimits' or tag == 'efficiencyMap':
                data = value
                dataType = tag
            elif tag == 'expectedUpperLimits':
                expectedData = value
                dataType = 'upperLimits'
            else:
                self.addInfo(tag,value)

        ident = self.globalInfo.id+":"+dataType[0]+":"+ str(self._infoObj.dataId)
        ident += ":" + self.txName
        self.txnameData = TxNameData(data, dataType, ident )
        if expectedData:
            self.txnameDataExp = TxNameData( expectedData, dataType, ident )

        #Builds up a list of elements appearing in constraints:
        if hasattr(self,'finalState'):
            finalState = self.finalState
        else:
            finalState = ["MET","MET"]        
        elements = []
        if hasattr(self,'constraint'):
            elements += [Element(el,finalState) for el in elementsInStr(str(self.constraint))]
        if hasattr(self,'condition') and self.condition:
            conds = self.condition
            if not isinstance(conds,list): conds = [conds]
            for cond in conds:
                for el in elementsInStr(str(cond)):
                    newEl = Element(el,finalState)
                    if not newEl in elements: elements.append(newEl)

        # Builds up TopologyList with all the elements appearing in constraints
        # and conditions:
        for el in elements:
            self._topologyList.addElement(el)
Пример #48
0
    def run( self, slhafile, lhefile=None, unlink=True ):
        """
        Execute pythia_lhe with n events, at sqrt(s)=sqrts.

        :param slhafile: input SLHA file
        :param lhefile: option to write LHE output to file; if None, do not write
                        output to disk. If lhe file exists, use its events for
                        xsecs calculation.
        :param unlink: Clean up temp directory after running pythia

        :returns: List of cross sections

        """
        if lhefile and os.path.isfile ( lhefile ):
            lheFile = open(lhefile, 'r')
            xsecsInfile = crossSection.getXsecFromSLHAFile(slhafile)
            loXsecs = crossSection.XSectionList()
            for xsec in xsecsInfile:
                if xsec.info.order == LO and \
                        float (xsec.info.sqrts.asNumber(TeV)) == self.sqrts:
                    loXsecs.add(xsec)
            return loXsecs

        #Change pythia card, if defined:
        if self.pythiacard:
            pythiacard_default = self.cfgfile
            self.cfgfile = self.pythiacard
        # Check if template config file exists
        if unlink:
            self.unlink()
        else:
            self.tempdir = None
        self.replaceInCfgFile({"NEVENTS": self.nevents, "SQRTS":1000 * self.sqrts})
        self.setParameter("MSTP(163)", "6")

        if unlink==False:
            logger.info ( "keeping temporary directory at %s" % self.tempDirectory() )
        r = self.checkInstallation()
        if r == False:
            logger.info ( "Installation check failed." )
            sys.exit()
        self.replaceInCfgFile({"NEVENTS": self.nevents, "SQRTS":1000 * self.sqrts})
        self.setParameter("MSTP(163)", "6")
        lhedata = self._run(slhafile, unlink=unlink )
        if not "<LesHouchesEvents" in lhedata:
            pythiadir = "%s/log" % self.tempDirectory()
            logger.error("No LHE events found in pythia output %s" % pythiadir )
            if not os.path.exists ( pythiadir ):
                logger.error ("Will dump pythia output to %s" % pythiadir )
                f=open ( pythiadir, "w" )
                for line in lhedata:
                    f.write ( line )
                f.close()
            raise SModelSError( "No LHE events found in %s" % pythiadir )

        #Reset pythia card to its default value
        if self.pythiacard:
            self.cfgfile = pythiacard_default

        #if not unlink:
        #    lhefile = self.tempdir + "/events.lhe"
        # Generate file object with lhe events
        if lhefile:
            lheFile = open(lhefile, 'w')
            lheFile.write(lhedata)
            lheFile.close()
            lheFile = open(lhefile, 'r')
        else:
            # Create memory only file object
            if sys.version[0]=="2":
                lhedata = unicode ( lhedata )
            lheFile = io.StringIO(lhedata)
        return crossSection.getXsecFromLHEFile ( lheFile )
Пример #49
0
    def loadParameters(self):
        """
        Reads the parameters from the plotting parameter file.
        """
        
        logger.info("Reading parameters from %s ..." %(self.parameterFile))        
        
        parFile = self.parameterFile
        import imp
        
        try:
            with open(self.parameterFile, 'rb') as fParameters: ## imports parameter file
                parameters = imp.load_module("parameters",fParameters,self.parameterFile,('.py', 'rb', imp.PY_SOURCE))
        except:
            logger.error("Error loading parameters file %s" %self.parameterFile)
            return False
         
        if not hasattr(parameters, 'slha_hover_information'):
            logger.debug("slha_hover_information dictionary was not found in %s. SLHA data will not be included in info box." %parFile)
            self.slha_hover_information = {}
        else:
            self.slha_hover_information = parameters.slha_hover_information
    
        if not hasattr(parameters, 'ctau_hover_information'):
            logger.debug("ctau_hover_information dictionary was not found in %s. Lifetime data will not be included in info box." %parFile)
            self.ctau_hover_information = {}
        else:
            self.ctau_hover_information = parameters.ctau_hover_information
    
        if not hasattr(parameters, 'BR_hover_information'):
            logger.debug("BR_hover_information dictionary was not found in %s. Branching ratio data will not be included in info box." %parFile)
            self.BR_hover_information = {}
        else:
            self.BR_hover_information = parameters.BR_hover_information
    
        if not hasattr(parameters, 'SModelS_hover_information'):
            logger.debug("SModelS_hover_information dictionary was not found in %s. SModelS data will not be included in info box." %parFile)
            self.SModelS_hover_information = {}
        else:
            self.SModelS_hover_information = list(set(parameters.SModelS_hover_information))
    
        if not hasattr(parameters, 'plot_data'):
            logger.debug("plot_data list was not found in %s. All points will be plotted" %parFile)
            self.plot_data = ['all']
        else:
            self.plot_data = list(set(parameters.plot_data))
    
        if not hasattr(parameters, 'variable_x'):
            raise SModelSError("variable_x was not found in %s. Please define the variable to be plotted in the x-axis." %parFile)
        else:
            self.variable_x = parameters.variable_x
        if not hasattr(parameters, 'variable_y'):
            raise SModelSError("variable_y was not found in %s. Please define the variable to be plotted in the y-axis." %parFile)
        else:
            self.variable_y = parameters.variable_y
        if not hasattr(parameters, 'plot_list'):
            raise SModelSError("plot_list was not found in %s. Please define the list of plots to be plotted." %parFile)
        else:
            self.plot_list = list(set(parameters.plot_list))
            
        if not hasattr(parameters,'BR_get_top'):
            logger.debug("BR_get_top not found in %s. Will include all decay channels")
            self.BR_get_top = 'all'
        else:
            self.BR_get_top = parameters.BR_get_top

        if not hasattr(parameters,'plot_title'):
            logger.warning("plot_title not defined in %s. Using default title" %parFile)
            self.plot_title = 'interactive-plots'
        else:
            self.plot_title = parameters.plot_title
Пример #50
0
        return ret


    def chmod(self):
        """ 
        chmod 755 on pythia executable, if it exists.
        Do nothing, if it doesnt exist.
        """
        if not os.path.exists ( self.executablePath ):
            logger.error("%s doesnt exist" % self.executablePath )
            return False
        import stat
        mode = stat.S_IRWXU | stat.S_IRWXG | stat.S_IXOTH | stat.S_IROTH
        os.chmod ( self.executablePath, mode )
        return True

if __name__ == "__main__":
    setLogLevel ( "debug" )
    tool = Pythia8Wrapper()
    tool.nevents=10
    logger.info("installed: " + str(tool.installDirectory()))
    logger.info("check: " + wrapperBase.ok(tool.checkInstallation()))
    logger.info("seconds per event: %d" % tool.secondsPerEvent)
    slhafile = "inputFiles/slha/simplyGluino.slha"
    slhapath = os.path.join ( installation.installDirectory(), slhafile )
    logger.info ( "slhafile: " + slhapath )
    output = tool.run(slhapath, unlink = True )
    #for i in output:
    #    print ( "%s" % i )
    logger.info ( "done: %s" % output )
Пример #51
0
    def run( self, slhaFile, lhefile=None, unlink=True ):
        """
        Run pythia8.
        
        :param slhaFile: SLHA file
        :param lhefile: option to write LHE output to file; if None, do not write
                        output to disk. If lhe file exists, use its events for
                        xsecs calculation.
        :param unlink: clean up temporary files after run?
        :returns: List of cross sections
        
        """
        
        #Change pythia configuration file, if defined:
        if self.pythiacard:
            pythiacard_default = self.cfgfile
            self.cfgfile = self.pythiacard

        
        self.xsecs = {}
        logger.debug ( "wrapper.run()" )
        slha = self.checkFileExists(slhaFile)
        logger.debug ( "file check: " + slha )
        cfg = self.absPath(self.cfgfile)
        logger.debug("running with cfgfile " + str(cfg))
        lhefile = self.tempDirectory() + "/events.lhe"
        cmd = "%s -n %d -f %s -s %d -c %s -l %s" % \
             ( self.executablePath, self.nevents, slha, self.sqrts, cfg, lhefile )
        xmldoc = self.executablePath.replace ( "pythia8.exe", "xml.doc" )        
        logger.debug ( "exe path=%s" % self.executablePath )
        self.checkInstallation ( compile=True )
        if os.path.exists (xmldoc ):
            logger.debug ( "xml.doc found at %s." % xmldoc )
            with open ( xmldoc ) as f:
                xmlDir = f.read()
                toadd = os.path.join ( os.path.dirname ( xmldoc ) , xmlDir.strip() )
                logger.debug ( "adding -x %s" % toadd )
                cmd += " -x %s" % toadd
        logger.debug("Now running ''%s''" % str(cmd) )
        out = executor.getoutput(cmd)
        logger.debug ( "out=%s" % out )
        if not os.path.isfile(lhefile):
            raise SModelSError( "LHE file %s not found" % lhefile )
        lheF = open(lhefile,'r')
        lhedata = lheF.read()
        lheF.close()        
        os.remove(lhefile)        
        if not "<LesHouchesEvents" in lhedata:
            raise SModelSError("No LHE events found in pythia output")                
        if not unlink:
            tempfile = self.tempDirectory() + "/log"
            f = open( tempfile, "w")
            f.write (cmd + "\n\n\n")
            f.write (out + "\n")
            f.write (lhedata + "\n")
            f.close()
            logger.info ( "stored everything in %s" % tempfile )
            
        # Create memory only file object
        if sys.version[0]=="2":
            lhedata = unicode( lhedata )
        lheFile = io.StringIO(lhedata)
        ret = getXsecFromLHEFile(lheFile)            
        
        #Reset pythia card to its default value
        if self.pythiacard:
            self.cfgfile = pythiacard_default
        
        return ret
Пример #52
0
 def printFastlimBanner ( self ):
     """ check if fastlim appears in data.
         If yes, print a statement to stdout. """
     if not self.hasFastLim: return
     # print ( "FastLim v1.1 efficiencies loaded. Please cite: arXiv:1402.0492, EPJC74 (2014) 11" )
     logger.info ( "FastLim v1.1 efficiencies loaded. Please cite: arXiv:1402.0492, EPJC74 (2014) 11" )
Пример #53
0
def decompose(lhefile, inputXsecs=None, nevts=None, doCompress=False,
              doInvisible=False, minmassgap=-1. * GeV ):
    """
    Perform LHE-based decomposition. 

    :param lhefile: LHE file with e.g. pythia events, may be given as URL
                    (though http and ftp only)
    :param inputXsecs: xSectionList object with cross sections for the mothers
           appearing in the LHE file. If None, use information from file.
    :param nevts: (maximum) number of events used in the decomposition. If
                  None, all events from file are processed.
    :param doCompress: mass compression option (True/False)
    :param doInvisible: invisible compression option (True/False)
    :param minmassgap: minimum mass gap for mass compression (only used if
                       doCompress=True)
    :returns: list of topologies (TopologyList object) 
    
    """
    if lhefile.startswith("http") or lhefile.startswith("ftp"):
        logger.info ( "asked for remote lhefile %s. will fetch it." % lhefile )
        import requests
        import os.path
        r=requests.get(lhefile)
        if r.status_code != 200:
            logger.error ( "could not retrieve remote file %d: %s" % ( r.status_code, r.reason ) )
            sys.exit()
        basename = os.path.basename ( lhefile )
        f=open ( basename, "w" )
        f.write ( r.text )
        f.close()
        lhefile = basename

    if doCompress and minmassgap < 0. * GeV:
        logger.error("Asked for compression without specifying minmassgap. Please set minmassgap.")
        raise SModelSError()

    reader = lheReader.LheReader(lhefile, nevts)
    smsTopList = topology.TopologyList()
    # Get cross section from file (= event weight, assuming a common weight for
    # all events)
    if not inputXsecs:
        xSectionList = crossSection.getXsecFromLHEFile(lhefile,
                                                       addEvents=False)
    else:
        xSectionList = inputXsecs

    # Loop over events and decompose
    for event in reader:
        momPDG = tuple(sorted(event.getMom()))  # Get mother PDGs
        eventweight = xSectionList.getXsecsFor(momPDG)
        # Get event element
        newElement = elementFromEvent(event, eventweight)
        if not newElement:
            continue
        allElements = [newElement]
        # Perform compression
        if doCompress or doInvisible:
            allElements += newElement.compressElement(doCompress, doInvisible,
                                                      minmassgap)

        for el in allElements:
            el.sortBranches()            
            smsTopList.addElement(el)

    smsTopList._setElementIds()
    return smsTopList