Esempio n. 1
0
def initial_sdl(logParams={}):
    global sdlogger
    name = "SDL"

    if not logParams.has_key('sparkhome'):
        from spark.util.misc import getSparkHomeDirectory
        logParams['sparkhome'] = getSparkHomeDirectory()

    if not logParams.has_key('logdir'):
        # Create log directory if not present
        logParams['logdir'] = os.path.join(logParams['sparkhome'], "log")

    #keep logging.config happy since it expects string literals
    logParams['logdir'] = logParams['logdir'].replace("\\", "\\\\")

    #    print "logdir=", logParams['logdir']
    #    print "sparkhome=", logParams['sparkhome']
    from spark.internal.persist_aux import ensure_dir
    ensure_dir(logParams['logdir'])
    #
    try:
        import logging.config
        config_log_file = os.path.join(logParams['sparkhome'], "config",
                                       "logger.cfg")

        if os.path.isfile(config_log_file):
            logging.config.fileConfig(config_log_file, logParams)
        else:
            # Use default setting if it can not find the config file
            print "Could not find %s. Using default logger configuration..." % config_log_file
            logger = logging.getLogger(name)
            sparklog = os.path.join(logParams['logdir'], 'spark.log')
            #print "sparklog=", sparklog
            hdlr = logging.FileHandler(sparklog)
            formatter = logging.Formatter(
                '%(created)f [%(thread)d] %(levelname)s %(name)s - %(message)s'
            )
            hdlr.setFormatter(formatter)
            logger.addHandler(hdlr)
        print "%s initiated." % name
    except AnyException, e:
        errid = NEWPM.displayError()
        print "Error initiating %s." % e
Esempio n. 2
0
def initial_sdl(logParams={}):
    global sdlogger
    name = "SDL"
    
    if not logParams.has_key('sparkhome'):
        from spark.util.misc import getSparkHomeDirectory
        logParams['sparkhome'] = getSparkHomeDirectory()
    
    if not logParams.has_key('logdir'):
        # Create log directory if not present
        logParams['logdir'] = os.path.join(logParams['sparkhome'], "log")
    
    #keep logging.config happy since it expects string literals
    logParams['logdir'] = logParams['logdir'].replace("\\", "\\\\")
    
#    print "logdir=", logParams['logdir']
#    print "sparkhome=", logParams['sparkhome']
    from spark.internal.persist_aux import ensure_dir
    ensure_dir(logParams['logdir'])
    #
    try:
        import logging.config
        config_log_file = os.path.join(logParams['sparkhome'], "config", "logger.cfg")
        
        if os.path.isfile(config_log_file):
            logging.config.fileConfig(config_log_file, logParams)
        else:
            # Use default setting if it can not find the config file
            print "Could not find %s. Using default logger configuration..."%config_log_file
            logger = logging.getLogger(name)
            sparklog = os.path.join(logParams['logdir'], 'spark.log')
            #print "sparklog=", sparklog
            hdlr = logging.FileHandler(sparklog)
            formatter = logging.Formatter('%(created)f [%(thread)d] %(levelname)s %(name)s - %(message)s')
            hdlr.setFormatter(formatter)
            logger.addHandler(hdlr)
        print "%s initiated."%name
    except AnyException, e:
        errid = NEWPM.displayError()
        print "Error initiating %s."%e
Esempio n. 3
0
# To fix problem with running python under emacs on windows
sys.stderr = sys.stdout

runAgent = None
defaultModule = None
exitcode = 0

_opts = None
def getConfig():
    "Return the ConfigParser object containing the SPARK configuration"
    return _opts

DEFAULT_DEFAULT_MODULE = "spark.lang.builtin"

#'defaults' used in configuration file reading
_CONFIG_DEFAULTS = {'sparkhome' : getSparkHomeDirectory(),
                    'logdir'    : '',
                    'user'      : (os.environ.get('LOGNAME') or
                                   os.getlogin() or
                                   ''),
                    'home'      : (os.environ.get('HOME', None) or
                                   os.environ.get('USERPROFILE', None) or 
                                   os.getenv('HOME') or
                                   ''),
                    'cwd'       : os.getcwd()}


# Define section and option constants
SPARK="spark"
DEFAULTS="defaults"
PERSIST_STATE="persistState"
Esempio n. 4
0
exitcode = 0

_opts = None


def getConfig():
    "Return the ConfigParser object containing the SPARK configuration"
    return _opts


DEFAULT_DEFAULT_MODULE = "spark.lang.builtin"

#'defaults' used in configuration file reading
_CONFIG_DEFAULTS = {
    'sparkhome':
    getSparkHomeDirectory(),
    'logdir':
    '',
    'user': (os.environ.get('LOGNAME') or os.getlogin() or ''),
    'home':
    (os.environ.get('HOME', None) or os.environ.get('USERPROFILE', None)
     or os.getenv('HOME') or ''),
    'cwd':
    os.getcwd()
}

# Define section and option constants
SPARK = "spark"
DEFAULTS = "defaults"
PERSIST_STATE = "persistState"
PERSIST_DIR = "persistdir"