from spark.internal.version import * from spark.internal.exception import ProcessingError from spark.internal.parse.processing import Imp from spark.internal.common import tuple_repr, DEBUG, SOLVED, NOT_SOLVED, ONE_SOLUTION, NO_SOLUTIONS, POSITIVE, NEWPM from spark.internal.parse.basicvalues import * from spark.internal.parse.expr import ExprString, ExprSymbol, ExprInteger, ExprList, ExprVariable, ExprStructure, ExprCompound from spark.internal.parse.usagefuns import * from spark.internal.parse.usages import * from spark.internal.repr.varbindings import valuesBZ from spark.internal.exception import NoProcedureFailure, UnlocatedError from spark.internal.repr.common_symbols import * from spark.pylang.simpleimp import * from spark.internal.engine.find_module import ensure_modpath_installed from spark.internal.debug.trace import TRACING debug = DEBUG(__name__) ################################################################ class TermIf(Imp): __slots__ = () def call(self, agent, bindings, zexpr): if predSolve1(agent, bindings, zexpr[0]): return termEvalErr(agent, bindings, zexpr[1]) else: return termEvalErr(agent, bindings, zexpr[2]) def match_inverse(self, agent, bindings, zexpr, obj): if predSolve1(agent, bindings, zexpr[0]): return termMatch(agent, bindings, zexpr[1], obj)
from spark.internal.version import * from spark.internal.common import NEWPM, DEBUG from spark.internal.repr.common_symbols import P_Do, P_ArgTypes, P_Roles #from spark.internal.repr.patexpr import * #NonAnonVarPatExpr from spark.internal.repr.taskexpr import * #from spark.internal.repr.predexpr import SimplePredExpr #from spark.internal.repr.newbuild import ActSymInfo from spark.lang.builtin import requiredargnames from spark.internal.parse.basicvalues import Symbol, isSymbol from spark.internal.repr.procedure import ProcedureValue from spark.internal.parse.basicvalues import value_str, objectId, String, isString, Integer, isInteger, Float, isFloat, Symbol, isSymbol from spark.internal.exception import CapturedError debug = DEBUG(__name__) #.on() PROC = None def ppl_translate_procname(agent, procname, map=None, output=None): """Translate a procedure into PPL. map is a mapping from variables to Skolem constants. output is a list of tuples - the list is appended to""" if map is None: map = {} if output is None: output = [] procname = str(procname) # Find the procedure do_facts = agent.factslist0(P_Do)
#* "$HeadURL:: https://svn.ai.sri.com/projects/spark/trunk/spark/src/spar#$" *# #*****************************************************************************# from spark.internal.version import * from spark.internal.parse.basicvalues import Symbol, Structure, EMPTY_SPARK_LIST, isList, isString, List, isStructure, value_str from spark.internal.common import DEBUG import thread import time # The following enables this file to work with older versions of SPARK try: from spark.lang.builtin import sparkNULL except: def sparkNULL(): return Symbol("utilities.rdf.NULL") debug = DEBUG(__name__)#.on() commsDebug = DEBUG("****").on() PACKAGE = "spark.io.common" S_ProvideService = Symbol(PACKAGE + ".ProvideService") S_Request = Symbol(PACKAGE + ".Request") S_standardCallback = Symbol(PACKAGE + ".standardCallback") #S_NULL = Symbol(PACKAGE + ".NULL") def agentServices(agent, mechanism, name=None): debug("Calling agentServices(%r, %r, %r)", agent, mechanism, name) def _appropriate(fact): m = fact[3] return m == EMPTY_SPARK_LIST \ or m == mechanism \