def decodeXMLValue(xmlValue): "Converts a python value returned by xmlrpc to a SPARK value" if isinstance(xmlValue, UNCHANGED_TYPES): return xmlValue elif xmlValue == None: return sparkNULL() elif isinstance(xmlValue, types.DictType): functor = xmlValue.get(FUNCTOR) if functor is not None: args = xmlValue.get(ARGS) if args is None: raise LowError("Missing %r element for structure"%ARGS) return Structure(Symbol(functor), decodeXMLValues(args)) sym = xmlValue.get(SYM) if sym is not None: return Symbol(sym) var = xmlValue.get(VAR) if var is not None: return Variable(var) # default return dictMap(xmlValue, decodeXMLValue) elif isinstance(xmlValue, types.ListType): return decodeXMLValues(xmlValue) elif isinstance(xmlValue, types.TupleType): return decodeXMLValues(xmlValue) else: raise LowError("Cannot convert value of type %r from XML"%xmlValue.__class__)
def get_persist_modpath_load_order(agent): filename = os.path.join(get_persist_root_dir(), agent.name + '.loadOrder') if not os.path.exists(filename): return [] #no persisted data f = open(filename, 'r') order = [] try: l = f.readline().rstrip() while l: data = l.split(':') l = f.readline().rstrip() modpathname = data[1].rstrip() if data[0] == 'load': order.append(( 'load', Symbol(modpathname), None, )) elif data[0] == 'string': order.append(( 'string', Symbol(modpathname), string.atoi(data[2]), )) finally: f.close() #console_debug("MODPATH LOAD ORDER FOR %s: %s", agent.name, str(order)) #clear the filename: cannot guarantee same load order on resume due to reloadProcessModels #os.remove(filename) return order
def icl_to_value(icl): "Map an ICL object to a value" if icl.isVar(): return Variable(ICL_CONSTRUCTOR.asString(icl)) # name = iclVarName(icl) # if name.startswith('_'): # return OAAVar('$' + name[1:]) # else: # return OAAVar('$' + name) elif icl.isInt(): i = icl.toLong() try: return int(i) except: return i elif icl.isList(): list = [] for elt in icl.listIterator(): list.append(icl_to_value(elt)) return tuple(list) elif icl.isStruct(): functor = getFunctor(icl) args = [icl_to_value(x) for x in icl.iterator()] if functor in SPECIAL_FUNCTORS: if len(args) == 1: arg = args[0] if functor is REF_FUNCTOR: if isinstance(arg, types.IntegerType): obj = getObject(arg) if obj is not None: return obj else: err = "Referenced object no longer exists: %r" else: err = "Reference functor must take an integer argument: %r" elif isinstance(arg, basestring): if functor == ATOM_FUNCTOR: return Symbol(arg) else: return Symbol(arg).structure() else: err = "Special functor must take a string argument: %r" else: err = "Special functor must take exactly one argument: %r" else: return Structure(Symbol(functor), args) elif icl.isStr(): return icl.toUnquotedString() elif icl.isFloat(): return icl.toFloat() # elif icl.isIclDataQ(): # return icl elif icl.isIclDataQ(): # converting IclDataQ to string return str(String(icl.getData())) else: err = "Unknown ICL type: %r" raise InvalidICLError(err % icl)
def dictMap(d, fun=_identity): "Convert a dict to a map (appying fun to each component except the type)." fname = d.get(_TYPE) items = d.items() items.sort() args = [ Symbol(k + ":").structure(fun(v)) for (k, v) in items if k != _TYPE ] if fname == None: return Structure(_MAP_FUNCTOR, args) else: return Structure(Symbol(fname), args)
def defprocedure_exprs_name(agent, exprs): if not (isList(exprs)): raise AssertionError if (len(exprs) != 1): raise AssertionError pfe = exprs[0] if pfe.functor != Symbol('defprocedure{}'): raise LocatedError(pfe, "must be {defprocedure ...}") return pfe[0].asValue()
def load_module(agent, modname): chron = time.time() succeeds = 1 try: #PERSIST:INSTRUMENT report_instrumentation_point(agent, PRE_USER_MODULE_LOAD) mod = ensure_modpath_installed(Symbol(modname)) if not mod: print "=== COULD NOT INSTALL MODULE %s ==="%modname succeeds = 0 currMod = get_default_module() #check for equality here because we are reporting instrumentation points if mod is not currMod: set_default_module(mod) if not ensure_default_module_loaded(agent): print "=== COULD NOT LOAD MODULE %s ==="%modname succeeds = 0 #PERSIST:INSTRUMENT report_instrumentation_point(agent, POST_USER_MODULE_LOAD) except SPARKException: errid = NEWPM.displayError() chron = time.time() - chron print "Total Loading Time: %3.4f seconds."%chron #-*-*-*-*-*-* This is a hack to remove the logs from the console. A better solution is needed later. from spark.util.logger import get_sdl get_sdl().log("Default Module: %s", modname) return succeeds
def builtin_evaluate(agent, value): if (isSymbol(value)): raise AssertionError, \ "A naked symbol is not evaluable" if isString(value): return value elif isList(value): elements = [builtin_evaluate(agent, v) for v in value] return List(elements) elif isInteger(value): return value elif isFloat(value): return value elif isStructure(value): sym = value.functor if sym == BACKQUOTE_SYMBOL: return builtin_quoted(agent, value[0]) else: argvalues = [builtin_evaluate(agent, v) for v in value] fullsym = Symbol(BUILTIN_PACKAGE_NAME + "." + sym.id) imp = agent.getImp(fullsym) #if not (isinstance(imp, FunImpInt)): raise AssertionError, \ # "Not a function: %s"%sym b, z = valuesBZ(argvalues) result = imp.call(agent, b, z) return result else: return value
def solve(agent, predString, argList): imp = agent.getImp(Symbol(predString)) b, z = optBZ(argList) solutions = [] for solved in imp.solutions(agent, b, z): if solved: solutions.append([termEvalEnd(agent, b, zi) for zi in z]) return tuple(solutions)
def generateBinFile(spu): compfile = getPickledFilename(spu.filename) binFile = open(compfile, "wb") sourcefile = must_find_file(Symbol(spu.filename)) modiftime = os.path.getmtime(sourcefile) cPickle.dump(modiftime, binFile, -1) pickledString = cPickle.dumps(spu, -1) compressedString = zlib.compress(pickledString, 1) cPickle.dump(compressedString, binFile, -1) binFile.close() return compfile
def isBinFileUpdate(source): try: global BINARY_EXTENSION sourcefile = must_find_file(Symbol(source)) compfile = sourcefile[:-6] + "." + BINARY_EXTENSION modiftime1 = os.path.getmtime(sourcefile) if not os.path.exists(compfile): return False binFile = open(compfile, "rb") modiftime2 = cPickle.load(binFile) binFile.close() if modiftime1 == modiftime2: return True except IOError, e: print "Error reading %s" % soruce
def ppl_term(agent, expr, map, output): value = map.meval( agent, expr) # TODO: This assumes that variables are never rebound by failure if value is not None: return value_km_str(value) if isinstance(expr, NonAnonVarPatExpr): return map.mapfind(Symbol(expr.varid), "v") elif isinstance(expr, ConstantPatExpr): return value_km_str(expr.get_value()) elif isinstance(expr, ListPatExpr): elements = [ppl_term(agent, elt, map, output) \ for elt in expr] return "(list " + " ".join(elements) + ")" else: return "(sparkeval " + value_str(str(expr)) + ")"
def _load_default_module(): dmFilename = os.path.join(get_persist_root_dir(), "defaultModule") if not os.path.exists(dmFilename): return f = open(dmFilename, 'r') defaultModulename = f.read().rstrip() if len(defaultModulename) == 0: return #debug("(persist) default module is %s", defaultModulename) defaultModule = ensure_modpath_installed(Symbol(defaultModulename)) if defaultModule is None: raise Exception( "Persistence could not properly load default module '%s'" % defaultModulename) from spark.internal.parse.processing import set_default_module set_default_module(defaultModule) f.close()
def __init__(self, task_icl): from spark.io.oaa import icl_to_value, iterator_to_zexpr ObjectiveEvent.__init__(self, None) if not (isinstance(task_icl, IclStruct)): raise AssertionError if (task_icl.iclFunctor() != "task"): raise AssertionError if (task_icl.size() != 4): raise AssertionError [ikind_string, iact_name, itask_args, imods] = [x for x in task_icl.iterator()] if (icl_to_value(ikind_string) != "Do"): raise AssertionError if not (isinstance(iact_name, IclStr)): raise AssertionError act_name = icl_to_value(iact_name) self._symbol = Symbol(act_name) if not (isinstance(itask_args, IclList)): raise AssertionError self._expr = iterator_to_zexpr(itask_args.listIterator()) # from spark.symbol import Symbol, isSymbol # self._symbol = Symbol("spark.common.print") # expr = ListPatExpr(None, # ConstantPatExpr(None, "MESSAGE %s"), # ListPatExpr(None, ConstantPatExpr(None, 2))) self._expr.process(()) self._result = None self._bindings = DictBindings(())
def ppl_translate_task(agent, task, map, output, taskid): debug("Start ppl_translate_task %s", task) if isinstance(task, DoTaskExpr): actexpr = task[0] # was .actexpr # actsym = actexpr.actsym # doesn't work for cue as cue is not processed actsym = actexpr.term.key_term().sym_check_arity(ActSymInfo, actexpr) #print "ACTSYM", actsym if map.mapfirst(actsym.id, "taskname"): # first occurrence of this task name output.append(("superclasses", actsym.id, "Task")) if taskid is None: # We haven't been told to use a different id (e.g. procedurename) taskid = map.mapfind(task, "t") output.append(("instance-of", taskid, actsym.id)) argnames = requiredargnames(actsym) # doesn't handle rest args argtypefacts = agent.factslist1(P_ArgTypes, actsym) #print "argtypefacts", argtypefacts if argtypefacts: for argexpr, type in zip(actexpr, argtypefacts[0][1]): argid = ppl_term(agent, argexpr, map, output) output.append(("instance-of", argid, type.id)) rolefacts = agent.factslist1(P_Roles, actsym) #print "rolefacts", rolefacts role_dict = {} if rolefacts: for role in rolefacts[0][1]: varsym = role[0] rolesym = role.functor role_dict[varsym] = rolesym for (argexpr, namesym, argnum) \ in zip(actexpr, argnames, range(len(actexpr))): role = role_dict.get(namesym, None) if role is None: role = Symbol("arg%d" % argnum) argid = ppl_term(agent, argexpr, map, output) output.append((role.id, taskid, argid)) return taskid else: pass # TODO: work out what this should really be
def jd_test(to_test, **varvalues): from java.lang import String from java.util import ArrayList try: test = TestTestExpr(to_test, Symbol(get_modname()), varvalues) testagent.add_test(test) result = test.wait_result(1) # should not wait too long if (result is None): raise AssertionError sols = ArrayList() for sol in test.result: solList = ArrayList() for val in sol: solList.add(String(value_str(val))) sols.add(solList) varids = ArrayList() for varid in test.outvarids: varids.add(String(varid)) res = ArrayList() res.add(varids) res.add(sols) return res except LocatedError, err: return err
if icl == None: if string == None: raise LowError("At least one argument needs to be bound") else: return (IclTerm.fromString(True, string), string) else: if string == None: return (icl, str(icl)) else: if str(icl) == string: return (icl, string) else: return None SYM_generateIclTerm = Symbol("spark.io.oaa.generateIclTerm") SYM_applyfun = Symbol("applyfun") QUOTED_GENERATEICLTERM = BACKQUOTE_SYMBOL.structure(SYM_generateIclTerm) SYM_oaavar = Symbol("spark.io.oaa.oaavar") QUOTED_OAAVAR = BACKQUOTE_SYMBOL.structure(SYM_oaavar) def icl_inverse_eval(icl): string = icl_string(icl, None)[1] return SYM_applyfun.structure(QUOTED_GENERATEICLTERM, string) def icl_append_value_str(icl, buf): buf.append(",") return append_value_str(icl_inverse_eval(icl), buf)
def getPickledFilename(source): global BINARY_EXTENSION sourcefile = must_find_file(Symbol(source)) compfile = sourcefile[:-6] + "." + BINARY_EXTENSION return compfile
def _s(x): return Symbol(BUILTIN_PACKAGE_NAME + "." + x)
def anon(self, name="$_"): return Symbol(name)
def __init__(self, decl): Imp.__init__(self, decl) self._detindices_lists = () self._det_check_all = False self._idsym = Symbol(self.symbol.id)
def ground(self, name, value): # bound variable return Symbol(name)
from spark.internal.parse.basicvalues import Symbol, Structure, Variable, Boolean, List from spark.internal.parse.basicvalues import isStructure, isList, isNumber, isBoolean, isString, isVariable, isSymbol from spark.internal.exception import RuntimeException, LowError import thread import socket import types debug = DEBUG(__name__).on() from xmlrpclib import ServerProxy, Fault, METHOD_NOT_FOUND, INTERNAL_ERROR, ProtocolError, TRANSPORT_ERROR from SimpleXMLRPCServer import SimpleXMLRPCServer from SocketServer import ThreadingMixIn ################################################################ PACKAGE = "spark.io.xmlrpc" S_XMLRPC = Symbol(PACKAGE + ".XMLRPC") # S_requestCallback = Symbol(PACKAGE + ".requestCallback") ################################################################ # IMPLEMENTS NON-PERSISTABLE SERVERS class MyServer(ThreadingMixIn, SimpleXMLRPCServer): __slots__ = ["agent", ] def __init__(self, agent, port): self.agent = agent SimpleXMLRPCServer.__init__(self, ("127.0.0.1", port)) def _dispatch(self, method, params):
from spark.pylang.defaultimp import Structure, isStructure from spark.pylang.defaultimp import ObjectiveEvent, SparkEvent from spark.pylang.implementation import ActImpInt, PredImpInt from spark.lang.meta_aux import find_first_taskexpr, find_next_taskexpr def pi_task_structure(agent, pi): if not (isinstance(pi, ProcedureTFrame)): raise AssertionError proc = pi.procedure() prefix = pi.name() + "|" return procedure_task_structure(agent, proc, prefix) AVG_EXEC_TIME = Symbol("spark.lang.temporal.avgExecTime") AVG_RECOVERY_TIME = Symbol("spark.lang.temporal.avgRecoveryTime") DEADLINE = Symbol("spark.lang.temporal.Deadline") PROPERTY = Symbol("spark.module.property") def procedure_task_structure(agent, proc, prefix=""): """Get the task structure of a procedure. If prefix is not "" then assume we are looking at task instances rather than task expressions. Also check for deadlines and only out put structure if there is at least one deadline.""" print "procedure_task_structure%s" % ((agent, proc, prefix), ) body = proc.closed_zexpr.keyget0("body:") todo = find_first_taskexpr(body) done = [] label_task = {}
def nonground(self, name): # variable not bound to ground value return Symbol(name)
class DefaultImp(Imp, PersistablePredImpInt, ActImpInt, FunImpInt): __slots__ = ( "_detindices_lists", "_det_check_all", # must all factslists be tested for facts to retract? "_idsym", ) def __init__(self, decl): Imp.__init__(self, decl) self._detindices_lists = () self._det_check_all = False self._idsym = Symbol(self.symbol.id) ################ # Function implementation def call(self, agent, bindings, zexpr): args = [termEvalErr(agent, bindings, zitem) for zitem in zexpr] return self._idsym.structure(*args) def match_inverse(self, agent, bindings, zexpr, obj): if not isStructure(obj): return False if obj.functor != self._idsym: return False length = len(obj) if length != len(zexpr): return False i = 0 while i < length: if not termMatch(agent, bindings, zexpr[i], obj[i]): return False i += 1 return True ################ # Action implememtation def tframes(self, agent, event, bindings, zexpr): return get_all_tframes(agent, event, bindings, P_Do, self.symbol, zexpr) ################ # Predicate implementation #PERSIST def persist_arity_or_facts(self, agent): return agent.factslist0(self.symbol) def generateInfo(self): return {} def solution(self, agent, bindings, zexpr): for x in self.solutions(agent, bindings, zexpr): if x: return SOLVED return NOT_SOLVED def solutions(self, agent, bindings, zexpr): d = agent.getInfo(self.symbol) nargs = len(zexpr) if nargs == 0: if d.get(None): # a list containing a single empty tuple yield SOLVED else: val0 = termEvalOpt(agent, bindings, zexpr[0]) if val0 is not None: for x in self._solutions_aux(agent, bindings, val0, zexpr, nargs, d): yield x else: for val0 in d: if val0 is not None and termMatch(agent, bindings, zexpr[0], val0): for x in self._solutions_aux(agent, bindings, val0, zexpr, nargs, d): yield x def _solutions_aux(self, agent, bindings, val0, zexpr, nargs, d): for fact in d.get(val0, ()): if nargs == len(fact): i = 1 while i < nargs: if not termMatch(agent, bindings, zexpr[i], fact[i]): break i += 1 else: # no arg match failed yield SOLVED def retractall(self, agent, bindings, zexpr): d = agent.getInfo(self.symbol) if len(zexpr) > 0: val0 = termEvalOpt(agent, bindings, zexpr[0]) if val0 is not None: if val0 in d: self._retract_matching_facts(agent, bindings, val0, zexpr, d) else: for key in d.keys(): if termMatch(agent, bindings, zexpr[0], key): self._retract_matching_facts(agent, bindings, key, zexpr, d) else: factslist = d.get(None, ()) if factslist: event = RemoveFactEvent(self.symbol, factslist[0]) agent.post_event(event) del factslist[:] #PERSIST def resume_conclude(self, agent, bindings, zexpr): self.conclude(agent, bindings, zexpr, True) def conclude(self, agent, bindings, zexpr, kbResume=False): d = agent.getInfo(self.symbol) if (d is None): raise AssertionError #PERSIST #have to 'fake' conclude during resuming so that objectIds are set #properly and can be mapped during the persist_kb(). #TODO: redo the loadFacts flag check to make sure that we only #call conclude on PersistablePredImpInts from spark.internal.persist_aux import is_resume_block_concludes if is_resume_block_concludes() and not kbResume: for z in zexpr: termEvalErr(agent, bindings, z)#generate objects (will be stored using objectId, so we don't need to cache) return if len(zexpr) > 0: newfact = tuple([termEvalErr(agent, bindings, z) for z in zexpr]) #print " CONCLUDING", self.symbol, str(newfact) key = newfact[0] if self._detindices_lists: # may need to deleted things if self._det_check_all: # need to check all factslists for key1 in d.keys(): self._retract_similar_facts(agent, key1, newfact, d, kbResume) elif key in d: self._retract_similar_facts(agent, key, newfact, d, kbResume) else: newfact = () #print " CONCLUDING", self.symbol, newfact key = None try: factslist = d[key] except KeyError: factslist = [] d[key] = factslist if newfact not in factslist: factslist.append(newfact) #PERSIST if not kbResume: #don't generate events on a resume agent.post_event(AddFactEvent(self.symbol, newfact)) def _retract_matching_facts(self, agent, bindings, val0, zexpr, d, kbResume=False): facts = d[val0] length = len(facts) arity = len(zexpr) i = 0 some_deleted = False while i < length: oldfact = facts[i] if len(oldfact) != arity: # allow different arities continue j = 1 while j < arity: if not termMatch(agent, bindings, zexpr[j], oldfact[j]): break # continue with next fact j = j + 1 else: #self._retracts.append(oldfact) #PERSIST if not kbResume: from spark.pylang.defaultimp import RemoveFactEvent agent.post_event(RemoveFactEvent(self.symbol, oldfact)) facts[i] = None some_deleted = True i = i + 1 if some_deleted: delete_all(facts, None) if not facts: del d[val0] def _retract_similar_facts(self, agent, val0, newfact, d, kbResume=False): facts = d[val0] length = len(facts) arity = len(newfact) i = 0 some_deleted = False while i < length: oldfact = facts[i] if len(oldfact) != arity: # allow different arities continue for detindices in self._detindices_lists: for index in detindices: if newfact[index] != oldfact[index]: break # continue with next detmode else: #PERSIST if not kbResume: agent.post_event(RemoveFactEvent(self.symbol, oldfact)) #self._retracts.append(oldfact) facts[i] = None some_deleted = True break # continue with next old i = i + 1 if some_deleted: delete_all(facts, None) if not facts: del d[val0]
# mapFromSpark # fromSpark # ) # toXPS = _converter.toXps # fromXPS = _converter.toSpark from spark.internal.repr.common_symbols import P_Properties from spark.internal.repr.taskexpr import DoEvent from spark.internal.common import NEWPM, DEBUG from spark.lang.builtin import partial from spark.io.common import sparkNULL, log_incoming_request, log_outgoing_result, log_outgoing_request, log_incoming_result debug = DEBUG(__name__).on() PACKAGE = "spark.io.exec" FAIL_REQUEST = "fail" S_ExecuteTask = Symbol(PACKAGE + ".ExecuteTask") S_taskFailed = Symbol(PACKAGE + ".taskFailed") S_taskSucceeded = Symbol(PACKAGE + ".taskSucceeded") def keyget(seq, key): for elt in seq: functor = elt.functor if functor is not None and functor.name == key: return elt return None def taskNameToSymOrError(agent, taskname): propfacts = agent.factslist0(P_Properties) facts = [
return None _converter = XPS( sparkNULL(), # nullValue True, # useDouble False, # useLong True, # useBoolean mapToSpark, # toSpark mapFromSpark # fromSpark ) toXPS = _converter.toXps fromXPS = _converter.toSpark S_XPS = Symbol("spark.io.xps.XPS") def startXPSServer(agent): XPSSolver.setSparkSolver(SPARKSolver(agent)) def stopXPSServer(agent): XPSSolver.setSparkSolver(None) def requestXPS(name, parameters): remote = XPSSolver.getNonsparkSolver() requestId = log_outgoing_request(None, name, parameters) if remote is None: errnum = ERR_SERVER_NOT_AVAILABLE
def process_command(agent, next): """Process a command in the interpreter loop. Return True if interpreter should exit, False otherwise""" #NOTE: updates to the commands in this list need to be mirrored in print_help() try: # Things that can be done whether we are stepping or not if next.startswith("module "): # change module text = next[6:].strip() if (len(text)): load_module(agent, text) else: print 'Please enter in a module name, e.g. "module spark.lang.builtin"' elif next == "trace": print "Turning tracing on." enable_tracing() elif next == "notrace": print "Turning tracing off." disable_tracing() elif next == "persist": print "Persisting SPARK agent knowledge base" user_directed_persist(agent) elif next == "step" or next == "pause": print "Turning stepping on." enable_stepping() elif next == "nostep" or next == "resume": print "Turning stepping off." disable_stepping() elif next.startswith("monitor "): text = next[8:].strip() if (len(text)): add_trace_monitor(text) else: print 'Please type in a string to ignore during tracing, e.g. "ignore EXECUTING"' elif next.startswith("ignore "): text = next[7:].strip() if (len(text)): add_trace_ignore(text) else: print 'Please type in a string to monitor for during tracing, e.g. "monitor FAILED"' elif next == "exit": print "exiting SPARK interpreter" return True elif next == "help": print_help() elif next == "clear all": # essentially undoes everything from this session print "removing all new facts and intentions..." clear_all(agent) print "session refreshed" elif next == "clear filters": clear_filters() print "step/trace filters cleared" elif next.startswith("get "): if next == "get intentions": # just prints intention ID #s #XXX: get rid of java calls (kwc) print "Root Objectives:" print "--------------------" intentions = agent.get_intentions() for intent in intentions: print " ", intent try: from com.sri.ai.spark.model.task import IntentionStructure from com.sri.ai.spark.model.util import TextModel print "" print "Task structure:" print "--------------------" structure = IntentionStructure(agent, agent._intention_structure) print TextModel.getIntentionStructureModel(structure) except ImportError: pass #XXX:TODO:there is an excessive number of e_d_m_l calls here. #talk to dm to see if this is necessary elif next == "get predicates": ensure_default_module_loaded(agent) names = get_local_predicates(agent) _print_sym_list(agent, "displaying local predicates", names) elif next == "get functions": ensure_default_module_loaded(agent) names = get_local_functions(agent) _print_sym_list(agent, "displaying local functions", names) elif next == "get tasks": ensure_default_module_loaded(agent) names = get_local_tasks(agent) _print_sym_list(agent, "displaying actions", names) elif next == "get all predicates": ensure_default_module_loaded(agent) names = get_all_predicates(agent) _print_sym_list(agent, "displaying all predicates", names) elif next == "get all functions": ensure_default_module_loaded(agent) names = get_all_functions(agent) _print_sym_list(agent, "displaying all functions", names) elif next == "get all tasks": ensure_default_module_loaded(agent) names = get_all_tasks(agent) _print_sym_list(agent, "displaying actions", names) else: print "Invalid command: don't know how to get '%s'" % next[4:] elif next.startswith("debugon "): if next == "debugon oaa": M.spark__io__oaa.debug.on() elif next == "debugon pubsub": M.iris__events.debug.on() elif next == "debugon tir": M.task_manager__tir.debug.on() else: print "Invalid command: don't know how to debugon '%s'" % next[ 8:] elif next.startswith("debugoff "): if next == "debugoff oaa": M.spark__io__oaa.debug.off() elif next == "debugoff pubsub": M.iris__events.debug.off() elif next == "debugoff tir": M.task_manager__tir.debug.off() else: print "Invalid command: don't know how to debugoff '%s'" % next[ 9:] elif next.startswith("debug"): debug_arg = next[5:].strip() id_num = None if debug_arg != "": try: id_num = string.atoi(debug_arg) except AnyException: errid = NEWPM.displayError() NEWPM.pm(id_num) elif next == "python": runPyConsole() elif next == "" and get_step_controller().step(): pass elif next == "": print "" #ignoring blank input line" # Remaining commands require the kb lock (don't block!) else: # We have the KB lock if next.startswith("test "): agent.test(next[4:], get_modname()) elif next.startswith("eval ") or next.startswith("evaluate "): term = next[5:] if next.startswith("evaluate "): term = term[4:] result = agent.eval(term, get_modname()) print "->", value_str(result) elif next.startswith("run "): term = next[4:] print "synchronously running command" ext = agent.run(term, get_modname()) ext.thread_event.wait() if ext.result is SUCCESS: print "SUCCEEDED" else: print "FAILED", ext.result elif next.startswith("addfact "): print "adding fact to KB" agent.run("[conclude: " + next[7:] + "]", get_modname()) elif next.startswith("removefact "): print "adding fact to KB" agent.run("[retract: " + next[10:] + "]", get_modname()) elif next.startswith("unload "): # drop module modname = next[6:].strip() agent.unload_modpath(Symbol(modname)) elif next.startswith("["): print "running command" agent.run(next, get_modname()) else: print "ignoring unrecognized request:", next except AnyException: errid = NEWPM.displayError() return False
def doc_module(module_name): """generates the documentation page for the specified module""" print "doc_module: ", module_name try: mod = ensure_modpath_installed(module_name) testagent.load_modpath(Symbol(module_name)) except: print "Cannot load %s, may be part of another module" % (module_name) return #old: modname = mod.get_modpath().name modname = mod.filename has_actions = False has_predicates = False has_functions = False #old: syminfos = mod.get_sparkl_unit().get_exports().values() #new decls = getPackageDecls(testagent, mod.filename) #old: for syminfo in syminfos: for decl in decls: if decl is None: continue sym = decl.asSymbol() if sym.modname == modname: if decl.optMode(ACTION_DO): has_actions = True elif decl.optMode(PRED_SOLVE): has_predicates = True elif decl.optMode(TERM_EVAL): has_functions = True f = get_outputfile(module_name) f.write(get_header(module_name)) f.write('<h2>Listing for module '+module_name+'</h2>'+\ '<div class="entry">'+\ '<p>This is auto-generated documentation.</p>'+\ '<h4>Contents:</h4>'+\ '<ul>') if has_actions: f.write('<li><a href="#actions">Actions</a></li>') if has_predicates: f.write('<li><a href="#predicates">Predicates</a></li>') if has_functions: f.write('<li><a href="#functions">Functions</a></li>') f.write('</ul></div>') if has_actions: f.write( '<h3><a name="actions"></a><a href="#actions">Actions</a></h3>') doc_symbol(mod, ACTION_DO, f) if has_predicates: f.write( '<h3><a name="predicates"></a><a href="#predicates">Predicates</a></h3>' ) doc_symbol(mod, PRED_SOLVE, f) if has_functions: f.write( '<h3><a name="functions"></a><a href="#functions">Functions</a></h3>' ) doc_symbol(mod, TERM_EVAL, f) f.write(get_footer(module_name)) f.close()
def find_install_module(modname): """this is mostly a legacy hook and has been superceded by ensure_modpath_installed""" return ensure_modpath_installed(Symbol(modname))
def __init__(self, agent, actionName): self.impKey = Symbol(actionName) self.action = actionName Inst.theAgent = agent Interface.__init__(self, None, None, 0)
if len(k) == lenKey1 and k.startswith(key) and k.endswith(":"): return kv[0] else: raise LowError("Not a valid map") return None elif isList(map) and len(map) == 2 and len(map[0]) == len( map[1]): # allows for old list of lists format for index, keyi in enumerate(map[0]): if keyi == key: return map[1][index] return None else: raise LowError("Not a valid map") _MAP_FUNCTOR = Symbol("") def _identity(x): return x def mapDict(map, fun=_identity): "Convert a map to a dict (appying fun to each component except the type), return None if it isn't a proper map" fname = map.functor.name if fname == _MAP_FUNCTOR.name: d = {} else: d = {_TYPE: fname} for keyval in map: if not isStructure(keyval) or len(keyval) != 1: