def isMap(map): if not isStructure(map): return False for elt in map: if (not isStructure(elt) or len(elt) != 1 or not elt.functor.name.endswith(":")): return False return True
def builtin_evaluate(agent, value): if (isSymbol(value)): raise AssertionError, \ "A naked symbol is not evaluable" if isString(value): return value elif isList(value): elements = [builtin_evaluate(agent, v) for v in value] return List(elements) elif isInteger(value): return value elif isFloat(value): return value elif isStructure(value): sym = value.functor if sym == BACKQUOTE_SYMBOL: return builtin_quoted(agent, value[0]) else: argvalues = [builtin_evaluate(agent, v) for v in value] fullsym = Symbol(BUILTIN_PACKAGE_NAME + "." + sym.id) imp = agent.getImp(fullsym) #if not (isinstance(imp, FunImpInt)): raise AssertionError, \ # "Not a function: %s"%sym b, z = valuesBZ(argvalues) result = imp.call(agent, b, z) return result else: return value
def builtin_evaluate(agent, value): if isSymbol(value): raise AssertionError, "A naked symbol is not evaluable" if isString(value): return value elif isList(value): elements = [builtin_evaluate(agent, v) for v in value] return List(elements) elif isInteger(value): return value elif isFloat(value): return value elif isStructure(value): sym = value.functor if sym == BACKQUOTE_SYMBOL: return builtin_quoted(agent, value[0]) else: argvalues = [builtin_evaluate(agent, v) for v in value] fullsym = Symbol(BUILTIN_PACKAGE_NAME + "." + sym.id) imp = agent.getImp(fullsym) # if not (isinstance(imp, FunImpInt)): raise AssertionError, \ # "Not a function: %s"%sym b, z = valuesBZ(argvalues) result = imp.call(agent, b, z) return result else: return value
def icl_or_spark_functor_string(x): if isStructure(x): return x.functor.id elif isinstance(x, IclStruct): return x.iclFunctor() elif isinstance(x, IclStr): return x.toUnquotedString() else: raise Exception("Expecting ICL/SPARK structure or atom: %r" % x)
def icl_or_spark_functor_string(x): if isStructure(x): return x.functor.id elif isinstance(x, IclStruct): return x.iclFunctor() elif isinstance(x, IclStr): return x.toUnquotedString() else: raise Exception("Expecting ICL/SPARK structure or atom: %r"%x)
def loadFactsFromFile(agent, filename, failedConcludes, diffIo, missingPredsList): from spark.internal.parse.basicvalues import VALUE_CONSTRUCTOR, Structure, isStructure from spark.internal.parse.generic_tokenizer import FileSource #from spark.internal.parse.sparkl_parser import EOF_DELIMITER, SPARKLTokenizer, SPARKLParser, BogusValue from spark.internal.parse.sparkl_parser import parseSPARKL from spark.lang.builtin_eval import builtin_evaluate from spark.internal.repr.varbindings import valuesBZ from spark.pylang.implementation import PersistablePredImpInt from spark.internal.parse.usagefuns import termEvalErr print "RESUMING KB FROM", filename #parser = SPARKLParser(VALUE_CONSTRUCTOR, SPARKLTokenizer(FileSource(filename))) #parseVal = parser.terms_and_taggeds(True, EOF_DELIMITER, "end of input") f = open(filename, 'rb') string = f.read() f.close() parseVal = parseSPARKL(string, "File "+filename, VALUE_CONSTRUCTOR) facts = [] # keep track of all facts in file for val in parseVal: # if isinstance(val, BogusValue): # bogusValues.append(val) # continue if not (isStructure(val)): raise AssertionError functor = val.functor try: imp = agent.getImp(functor) except LowError, e: if functor not in missingPredsList: #TODO: make warning only once per prediate functor console_warning("Predicate %s is no longer part of the SPARK process models and facts for it will be purged", functor.name) missingPredsList.append(functor) continue if not (isinstance(imp, PersistablePredImpInt)): raise AssertionError #evaluate each of the args before putting into the zexpr try: fact = [builtin_evaluate(agent, arg) for arg in val] facts.append(Structure(functor, fact)) b, z = valuesBZ(fact) except: errid = NEWPM.displayError() console_error("(persist) unable to resume knowledge base fact \n\t%s\n", val) continue bindings_altzexpr = imp.resume_conclude(agent, b, z) if bindings_altzexpr is not None: (bindings, altzexpr) = bindings_altzexpr failedConcludes.append((imp, bindings, altzexpr,)) diffIo.write("-(%s %s)\n"%(val.functor.name, \ " ".join([persist_strrep(v) for v in val]))) diffIo.write("+(%s %s)\n"%(val.functor.name, \ " ".join([persist_strrep(termEvalErr(agent, bindings, z)) for z in altzexpr])))
def mapGet(map, key): "Return the value found for the given key in the map or None if not found" if isStructure(map): if key == _TYPE: return map.functor.name lenKey1 = len(key) + 1 for kv in map: if isStructure(kv): k = kv.functor.name if len(k) == lenKey1 and k.startswith(key) and k.endswith(":"): return kv[0] else: raise LowError("Not a valid map") return None elif isList(map) and len(map) == 2 and len(map[0]) == len(map[1]): # allows for old list of lists format for index, keyi in enumerate(map[0]): if keyi == key: return map[1][index] return None else: raise LowError("Not a valid map")
def builtin_quoted(agent, value): if isList(value): elements = [builtin_quoted(agent, v) for v in value] return List(elements) elif isStructure(value): sym = value.functor if sym == PREFIX_COMMA_SYMBOL: return builtin_evaluate(agent, value[0]) else: argvalues = [builtin_quoted(agent, v) for v in value] return Structure(sym, argvalues) else: return value
def isLearnedProcedure(agent, tframe): "This is a learned procedure that is triggered directly by a learned task, not via doActionSpec" event = tframe.event() if not isinstance(event, DoEvent): return False eventSym = event.goalsym() # TODO: should factor out the following "look for property" propfacts = agent.factslist1(P_Properties, eventSym) if propfacts: for prop in propfacts[0][1]: if isStructure(prop) and prop.functor.id == "uri": return True return False
def mergePartial(p1, p2): if len(p1) != len(p2): return None def merge(x, y): if x == None: return y else: return x newargs = [merge(e1, e2) for (e1, e2) in zip(p1, p2)] if isList(p1): return List(newargs) elif isStructure(p1): return Structure(p1.functor, newargs)
def mapGet(map, key): "Return the value found for the given key in the map or None if not found" if isStructure(map): if key == _TYPE: return map.functor.name lenKey1 = len(key) + 1 for kv in map: if isStructure(kv): k = kv.functor.name if len(k) == lenKey1 and k.startswith(key) and k.endswith(":"): return kv[0] else: raise LowError("Not a valid map") return None elif isList(map) and len(map) == 2 and len(map[0]) == len( map[1]): # allows for old list of lists format for index, keyi in enumerate(map[0]): if keyi == key: return map[1][index] return None else: raise LowError("Not a valid map")
def icl_or_spark_elements(x): if isinstance(x, types.TupleType): return x elif isStructure(x): return x.args elif isinstance(x, IclList): return tuple([x for x in x.listIterator()]) elif isinstance(x, IclStruct): return tuple([x for x in x.iterator()]) elif isinstance(x, IclStr): return () else: raise Exception("Expecting ICL/SPARK list or structure or atom: %r"%x)
def match_inverse(self, agent, bindings, zexpr, obj): if not isStructure(obj): return False if obj.functor != self._idsym: return False length = len(obj) if length != len(zexpr): return False i = 0 while i < length: if not termMatch(agent, bindings, zexpr[i], obj[i]): return False i += 1 return True
def icl_or_spark_elements(x): if isinstance(x, types.TupleType): return x elif isStructure(x): return x.args elif isinstance(x, IclList): return tuple([x for x in x.listIterator()]) elif isinstance(x, IclStruct): return tuple([x for x in x.iterator()]) elif isinstance(x, IclStr): return () else: raise Exception("Expecting ICL/SPARK list or structure or atom: %r" % x)
def mapDict(map, fun=_identity): "Convert a map to a dict (appying fun to each component except the type), return None if it isn't a proper map" fname = map.functor.name if fname == _MAP_FUNCTOR.name: d = {} else: d = {_TYPE: fname} for keyval in map: if not isStructure(keyval) or len(keyval) != 1: break fname = keyval.functor.name if not fname.endswith(":"): break d[fname[:-1]] = fun(keyval[0]) else: # reached end of args return d return None
def mapDict(map, fun=_identity): "Convert a map to a dict (appying fun to each component except the type), return None if it isn't a proper map" fname = map.functor.name if fname == _MAP_FUNCTOR.name: d = {} else: d = {_TYPE:fname} for keyval in map: if not isStructure(keyval) or len(keyval) != 1: break fname = keyval.functor.name if not fname.endswith(":"): break d[fname[:-1]] = fun(keyval[0]) else: # reached end of args return d return None
def encodeXMLValue(sparkValue): "Converts a SPARK value to a python value that can be passed by XML" if isinstance(sparkValue, UNCHANGED_TYPES): return sparkValue if sparkValue == sparkNULL(): return None elif isStructure(sparkValue): d = mapDict(sparkValue, encodeXMLValue) if d != None: return d else: # Use FUNCTOR/ARGS notation return {FUNCTOR:sparkValue.functor.name, ARGS:encodeXMLValues(sparkValue)} elif isList(sparkValue): return encodeXMLValues(sparkValue) elif isSymbol(sparkValue): return {SYM:sparkValue.name} elif isVariable(sparkValue): return {VAR:sparkValue.name} else: raise LowError("Cannot convert python type %r to XML"%sparkValue.__class__)
def access(obj, *accessors): value = obj for accessor in accessors: if isList(accessor): if len(accessor) == 1: value = value[accessor[0]] else: raise LowError("Invalid accessor: %r", accessor) elif isStructure(accessor): methodname = accessor.functor.name method = getattr(value, methodname, None) if method == None: raise LowError("Object has no method called %s", methodname) value = method(*accessor) elif isSymbol(accessor): value = getattr(value, accessor.name, None) elif isInteger(accessor): value = value[accessor] else: raise LowError("Invalid accessor: %r", accessor) if value == None: return None return value
def loadFactsFromFile(agent, filename, failedConcludes, diffIo, missingPredsList): from spark.internal.parse.basicvalues import VALUE_CONSTRUCTOR, Structure, isStructure from spark.internal.parse.generic_tokenizer import FileSource #from spark.internal.parse.sparkl_parser import EOF_DELIMITER, SPARKLTokenizer, SPARKLParser, BogusValue from spark.internal.parse.sparkl_parser import parseSPARKL from spark.lang.builtin_eval import builtin_evaluate from spark.internal.repr.varbindings import valuesBZ from spark.pylang.implementation import PersistablePredImpInt from spark.internal.parse.usagefuns import termEvalErr print "RESUMING KB FROM", filename #parser = SPARKLParser(VALUE_CONSTRUCTOR, SPARKLTokenizer(FileSource(filename))) #parseVal = parser.terms_and_taggeds(True, EOF_DELIMITER, "end of input") f = open(filename, 'rb') string = f.read() f.close() parseVal = parseSPARKL(string, "File " + filename, VALUE_CONSTRUCTOR) facts = [] # keep track of all facts in file for val in parseVal: # if isinstance(val, BogusValue): # bogusValues.append(val) # continue if not (isStructure(val)): raise AssertionError functor = val.functor try: imp = agent.getImp(functor) except LowError, e: if functor not in missingPredsList: #TODO: make warning only once per prediate functor console_warning( "Predicate %s is no longer part of the SPARK process models and facts for it will be purged", functor.name) missingPredsList.append(functor) continue if not (isinstance(imp, PersistablePredImpInt)): raise AssertionError #evaluate each of the args before putting into the zexpr try: fact = [builtin_evaluate(agent, arg) for arg in val] facts.append(Structure(functor, fact)) b, z = valuesBZ(fact) except: errid = NEWPM.displayError() console_error( "(persist) unable to resume knowledge base fact \n\t%s\n", val) continue bindings_altzexpr = imp.resume_conclude(agent, b, z) if bindings_altzexpr is not None: (bindings, altzexpr) = bindings_altzexpr failedConcludes.append(( imp, bindings, altzexpr, )) diffIo.write("-(%s %s)\n"%(val.functor.name, \ " ".join([persist_strrep(v) for v in val]))) diffIo.write("+(%s %s)\n"%(val.functor.name, \ " ".join([persist_strrep(termEvalErr(agent, bindings, z)) for z in altzexpr])))