Exemple #1
0
def load_module(agent, modname):
    chron = time.time()
    succeeds = 1
    try:
        #PERSIST:INSTRUMENT        
        report_instrumentation_point(agent, PRE_USER_MODULE_LOAD)
        mod = ensure_modpath_installed(Symbol(modname))
        if not mod:
            print "=== COULD NOT INSTALL MODULE %s ==="%modname
            succeeds = 0
        currMod = get_default_module()
        #check for equality here because we are reporting instrumentation points
        if mod is not currMod:
            set_default_module(mod) 
            if not ensure_default_module_loaded(agent):
                print "=== COULD NOT LOAD MODULE %s ==="%modname
                succeeds = 0
            #PERSIST:INSTRUMENT        
            report_instrumentation_point(agent, POST_USER_MODULE_LOAD)
    except SPARKException:
        errid = NEWPM.displayError()
    chron = time.time() - chron
    print "Total Loading Time: %3.4f seconds."%chron
    #-*-*-*-*-*-* This is a hack to remove the logs from the console. A better solution is needed later.
    from spark.util.logger import get_sdl
    get_sdl().log("Default Module: %s", modname)
    return succeeds
def load_module(agent, modname):
    chron = time.time()
    succeeds = 1
    try:
        # PERSIST:INSTRUMENT
        report_instrumentation_point(agent, PRE_USER_MODULE_LOAD)
        mod = ensure_modpath_installed(Symbol(modname))
        if not mod:
            print "=== COULD NOT INSTALL MODULE %s ===" % modname
            succeeds = 0
        currMod = get_default_module()
        # check for equality here because we are reporting instrumentation points
        if mod is not currMod:
            set_default_module(mod)
            if not ensure_default_module_loaded(agent):
                print "=== COULD NOT LOAD MODULE %s ===" % modname
                succeeds = 0
            # PERSIST:INSTRUMENT
            report_instrumentation_point(agent, POST_USER_MODULE_LOAD)
    except SPARKException:
        errid = NEWPM.displayError()
    chron = time.time() - chron
    print "Total Loading Time: %3.4f seconds." % chron
    # -*-*-*-*-*-* This is a hack to remove the logs from the console. A better solution is needed later.
    from spark.util.logger import get_sdl

    get_sdl().log("Default Module: %s", modname)
    return succeeds
Exemple #3
0
 def _createExceptionInfo(self):
     count = self.next_count
     self.next_count += 1
     result = ExceptionInfo(count)
     from spark.util.logger import get_sdl
     if get_sdl() is not None:
         get_sdl().logger.error(result.format_exception())
     self.log[count % self.maxLogLength] = result
     return result
Exemple #4
0
 def _createExceptionInfo(self):
     count = self.next_count
     self.next_count += 1
     result = ExceptionInfo(count)
     from spark.util.logger import get_sdl
     if get_sdl() is not None:
         get_sdl().logger.error(result.format_exception())
     self.log[count % self.maxLogLength] = result
     return result
 def __init__(self, *args):
     GoalMetaEvent.__init__(self, *args)
     if isinstance(args[0], DoEvent):
         # TODO: clean this up - this is a hack - DNM
         try:
             reason = str(args[1].getFailureValue())
         except AttributeError:
             reason = str(args[1])
         get_sdl().logger.debug("action failed: [%s]\n\treason: %s", args[0]._symbol, reason)
Exemple #6
0
 def __init__(self, *args):
     GoalMetaEvent.__init__(self, *args)
     if isinstance(args[0], DoEvent):
         # TODO: clean this up - this is a hack - DNM
         try:
             reason = str(args[1].getFailureValue())
         except AttributeError:
             reason = str(args[1])
         get_sdl().logger.debug("action failed: [%s]\n\treason: %s",
                                args[0]._symbol, reason)
Exemple #7
0
 def __init__(self, *args):
     GoalMetaEvent.__init__(self, *args)
     if isinstance(args[0], DoEvent):
         get_sdl().logger.debug("action succeeded: [%s]", args[0]._symbol)
Exemple #8
0
 def __init__(self, *args):
     ProcedureEvent.__init__(self, *args)
     from spark.lang.meta_aux import getProcedureName
     reason = str(args[1].getFailureValue())
     get_sdl().logger.debug("procedure failed [%s]...\n\treason: %s",
                            getProcedureName(args[0]), reason)
Exemple #9
0
 def __init__(self, *args):
     ProcedureEvent.__init__(self, *args)
     from spark.lang.meta_aux import getProcedureName
     get_sdl().logger.debug("Procedure Succeeded: %s",
                            getProcedureName(args[0]))
Exemple #10
0
 def __init__(self, *args):
     GoalMetaEvent.__init__(self, *args)
     if isinstance(args[0], DoEvent):
         get_sdl().logger.debug("action succeeded: [%s]", args[0]._symbol)
Exemple #11
0
def init_spark(**parameters):
    """initialization function for all of SPARK. properties that must be set
    before rest of SPARK starts, as well as actions such as persistence/resume
    operations that must execute first, should be placed/initialized here. The
    current recognized parameters are 'persist' and 'resume'"""
    global _resumeState, _persistState, _isInteractive, _persistIntentions
    persistDir = None
    if parameters.has_key('resume'):
        _resumeState = parameters['resume']
    if parameters.has_key('persist'):
        if not PERSIST_SPARK:
            print "SPARK persistence is currently disabled"
            _persistState = False
            _persistIntentions = False
        else:
            _persistState = parameters['persist']
            _persistIntentions = _persistState  #by default, same as persist
    if parameters.has_key('persistIntentions'):
        _persistIntentions = parameters['persistIntentions']
    if parameters.has_key('persistDir'):
        persistDir = parameters['persistDir']
        if persistDir is not None and persistDir.startswith('"'):
            persistDir = persistDir[1:-1]
    if parameters.has_key('interactive'):
        _isInteractive = bool(parameters['interactive'])
        if _isInteractive != _ISINTERACTIVE_DEFAULT:
            print "SPARK: overriding interactive-mode setting,", _isInteractive

    #we don't need to tell persist.py about the persistIntentions parameter --
    #it gets read by the CurrentlyIntended class instead
    set_persist_params(_persistState, persistDir)
    set_resume_params(_resumeState, _isInteractive)

    # Initiating SDL"
    if parameters.has_key('logParams'):
        initial_sdl(parameters['logParams'])
    else:
        initial_sdl()
    get_sdl().logger.info("Parameters: %s", parameters)
    #parameters have been initialized, so their values can be checked
    global _initialized
    _initialized = True

    from spark.internal.parse.newparse import init_builtin
    init_builtin()
    #from spark.lang.builtin import install_builtin   # TODO: make this usable
    #install_builtin()                                #

    #PERSIST
    if _persistState or _resumeState:
        from spark.internal.version import VERSION
        persistVersion = get_persisted_version_number()
        if _resumeState and persistVersion is None:
            # Resuming, but no persist directory exists
            print "WARNING: No persist state to resume from"
        elif _resumeState and persistVersion == VERSION:
            # Resuming, and valid persist directory exists
            resume_sources()
        else:
            # Persisting but not resuming OR invalid persistVersion
            #REMOVAL OF PREVIOUSLY PERSISTED STATE - we currently use
            # the versioning mechanism to wipe older files when we
            # update process models, as we do not have a merging
            # mechanism yet
            if persistVersion and persistVersion != VERSION and _resumeState:
                print "WARNING: SPARK cannot resume persisted files with version ID [%s]. \nSPARK's current version ID is [%s]. " % (
                    persistVersion, VERSION)
                if _isInteractive:
                    print "The persisted files will be removed if SPARK continues loading."
                    print " * Type 'q' to quit loading SPARK (persisted state will not be altered)"
                    print " * Hit <enter> to continue (persisted state will be deleted)"
                    option = raw_input(">> ")
                    if option.startswith('q'):
                        import sys
                        sys.exit(-1)
                    print "Continuing SPARK agent resume"
                else:
                    print "SPARK is removing the previously persisted state in order to continue loading."
            # Give the agent a blank slate with which to record new
            # persisted data
            remove_persisted_files()
Exemple #12
0
def init_spark(**parameters):
    """initialization function for all of SPARK. properties that must be set
    before rest of SPARK starts, as well as actions such as persistence/resume
    operations that must execute first, should be placed/initialized here. The
    current recognized parameters are 'persist' and 'resume'"""
    global _resumeState, _persistState, _isInteractive, _persistIntentions
    persistDir = None
    if parameters.has_key('resume'):
        _resumeState = parameters['resume']
    if parameters.has_key('persist'):
    	if not PERSIST_SPARK:
            print "SPARK persistence is currently disabled"
            _persistState = False
            _persistIntentions = False
    	else:
            _persistState = parameters['persist']
            _persistIntentions = _persistState #by default, same as persist
    if parameters.has_key('persistIntentions'):
        _persistIntentions = parameters['persistIntentions']
    if parameters.has_key('persistDir'):
        persistDir = parameters['persistDir']
        if persistDir is not None and persistDir.startswith('"'):
            persistDir = persistDir[1:-1]
    if parameters.has_key('interactive'):
        _isInteractive = bool(parameters['interactive'])
        if _isInteractive != _ISINTERACTIVE_DEFAULT:
            print "SPARK: overriding interactive-mode setting,",_isInteractive

    #we don't need to tell persist.py about the persistIntentions parameter --
    #it gets read by the CurrentlyIntended class instead
    set_persist_params(_persistState, persistDir)
    set_resume_params(_resumeState, _isInteractive)

    # Initiating SDL"
    if parameters.has_key('logParams'):
        initial_sdl(parameters['logParams'])
    else:
        initial_sdl()
    get_sdl().logger.info("Parameters: %s", parameters)
    #parameters have been initialized, so their values can be checked
    global _initialized
    _initialized = True

    from spark.internal.parse.newparse import init_builtin
    init_builtin()
    #from spark.lang.builtin import install_builtin   # TODO: make this usable
    #install_builtin()                                #

    #PERSIST
    if _persistState or _resumeState:
        from spark.internal.version import VERSION
        persistVersion = get_persisted_version_number()
        if _resumeState and persistVersion is None:
            # Resuming, but no persist directory exists
            print "WARNING: No persist state to resume from"
        elif _resumeState and persistVersion == VERSION:
            # Resuming, and valid persist directory exists
            resume_sources()
        else:
            # Persisting but not resuming OR invalid persistVersion
            #REMOVAL OF PREVIOUSLY PERSISTED STATE - we currently use
            # the versioning mechanism to wipe older files when we
            # update process models, as we do not have a merging
            # mechanism yet
            if persistVersion and persistVersion != VERSION and _resumeState:
                print "WARNING: SPARK cannot resume persisted files with version ID [%s]. \nSPARK's current version ID is [%s]. "%(persistVersion, VERSION)
                if _isInteractive:
                    print "The persisted files will be removed if SPARK continues loading."
                    print " * Type 'q' to quit loading SPARK (persisted state will not be altered)" 
                    print " * Hit <enter> to continue (persisted state will be deleted)"
                    option = raw_input(">> ")
                    if option.startswith('q'):
                        import sys
                        sys.exit(-1)
                    print "Continuing SPARK agent resume"
                else:
                    print "SPARK is removing the previously persisted state in order to continue loading." 
            # Give the agent a blank slate with which to record new
            # persisted data
            remove_persisted_files()
Exemple #13
0
 def __init__(self, *args):
     ProcedureEvent.__init__(self, *args)
     from spark.lang.meta_aux import getProcedureName
     reason = str(args[1].getFailureValue())            
     get_sdl().logger.debug("procedure failed [%s]...\n\treason: %s", getProcedureName(args[0]), reason)
Exemple #14
0
 def __init__(self, *args):
     ProcedureEvent.__init__(self, *args)
     from spark.lang.meta_aux import getProcedureName
     get_sdl().logger.debug("Procedure Succeeded: %s", getProcedureName(args[0]))