Exemple #1
0
def initial_sdl(logParams={}):
    global sdlogger
    name = "SDL"

    if not logParams.has_key('sparkhome'):
        from spark.util.misc import getSparkHomeDirectory
        logParams['sparkhome'] = getSparkHomeDirectory()

    if not logParams.has_key('logdir'):
        # Create log directory if not present
        logParams['logdir'] = os.path.join(logParams['sparkhome'], "log")

    #keep logging.config happy since it expects string literals
    logParams['logdir'] = logParams['logdir'].replace("\\", "\\\\")

    #    print "logdir=", logParams['logdir']
    #    print "sparkhome=", logParams['sparkhome']
    from spark.internal.persist_aux import ensure_dir
    ensure_dir(logParams['logdir'])
    #
    try:
        import logging.config
        config_log_file = os.path.join(logParams['sparkhome'], "config",
                                       "logger.cfg")

        if os.path.isfile(config_log_file):
            logging.config.fileConfig(config_log_file, logParams)
        else:
            # Use default setting if it can not find the config file
            print "Could not find %s. Using default logger configuration..." % config_log_file
            logger = logging.getLogger(name)
            sparklog = os.path.join(logParams['logdir'], 'spark.log')
            #print "sparklog=", sparklog
            hdlr = logging.FileHandler(sparklog)
            formatter = logging.Formatter(
                '%(created)f [%(thread)d] %(levelname)s %(name)s - %(message)s'
            )
            hdlr.setFormatter(formatter)
            logger.addHandler(hdlr)
        print "%s initiated." % name
    except AnyException, e:
        errid = NEWPM.displayError()
        print "Error initiating %s." % e
Exemple #2
0
 def __init__(self, location, extra = (), fn = None):
     ConstructibleValue.__init__(self)
     self._location = location
     ensure_dir(os.path.abspath(location))
     self.name  = "ExecutionTracer"
     self._desc = "ExecutionTracer"
     if os.path.isabs(location): # if it is not an absolute path:
         self.abs_location = location
     else:
         self.abs_location = os.path.join(get_persist_root_dir(), location)
     ensure_dir(self.abs_location)
     try:
         if fn != None and os.path.exists(self.abs_location + "/" + fn):
             # Reopen the old file
             resuming = True
             self._filename = fn
             ffn = self.abs_location + "/" + fn
         else:
             #Creating a new file.
             resuming = False
             self._filename = "sparklog_%04d%02d%02dT%02d%02d%02d.xml"%time.localtime()[0:6]
             ffn = self.abs_location + "/" + self._filename
     except:
         ConstructibleValue.__init__(self)
         NEWPM.displayError()
         debug("Error constructing the log file name.")
         return
     try:
         self.descriptor = FileWrapper(ffn, "a")
         self.writeLog("<?xml version=\"1.0\" ?>\n")
         self.writeLog("<ExecutionTracer>\n")
         if not resuming:
             global machineName
             self.writeLog("\t<Initiator>TaskManager</Initiator>\n")
             self.writeLog("\t<TimeZone>%s</TimeZone>\n"%time.tzname[0])
             self.writeLog("\t<MachineName>%s</MachineName>\n"%machineName)
         for x in extra:
             key, value = x
             self.writeLog("\t<%s>%s</%s>\n"%(key, value, key))
     except:
         NEWPM.displayError()
         debug("Error initiating the log file.")
Exemple #3
0
def initial_sdl(logParams={}):
    global sdlogger
    name = "SDL"
    
    if not logParams.has_key('sparkhome'):
        from spark.util.misc import getSparkHomeDirectory
        logParams['sparkhome'] = getSparkHomeDirectory()
    
    if not logParams.has_key('logdir'):
        # Create log directory if not present
        logParams['logdir'] = os.path.join(logParams['sparkhome'], "log")
    
    #keep logging.config happy since it expects string literals
    logParams['logdir'] = logParams['logdir'].replace("\\", "\\\\")
    
#    print "logdir=", logParams['logdir']
#    print "sparkhome=", logParams['sparkhome']
    from spark.internal.persist_aux import ensure_dir
    ensure_dir(logParams['logdir'])
    #
    try:
        import logging.config
        config_log_file = os.path.join(logParams['sparkhome'], "config", "logger.cfg")
        
        if os.path.isfile(config_log_file):
            logging.config.fileConfig(config_log_file, logParams)
        else:
            # Use default setting if it can not find the config file
            print "Could not find %s. Using default logger configuration..."%config_log_file
            logger = logging.getLogger(name)
            sparklog = os.path.join(logParams['logdir'], 'spark.log')
            #print "sparklog=", sparklog
            hdlr = logging.FileHandler(sparklog)
            formatter = logging.Formatter('%(created)f [%(thread)d] %(levelname)s %(name)s - %(message)s')
            hdlr.setFormatter(formatter)
            logger.addHandler(hdlr)
        print "%s initiated."%name
    except AnyException, e:
        errid = NEWPM.displayError()
        print "Error initiating %s."%e