def _recurse_paths(self, pred, paths, path): paths.append(path + ('_verb',)) if not isa(pred, self.lexicon.verb): # not a verb var paths.append(path + ('_neg',)) for label in sorted(pred.objects): o = pred.objects[label].value if isa(o, self.lexicon.exist): self._recurse_paths(o, paths, path + (label,)) elif isa(o, self.lexicon.number): paths.append(path + (label, '_num')) else: paths.append(path + (label, '_term'))
def dispatch(self, match, network): if not self.test_conditions(match, network): return if self.condcode: if not self.condcode.test(match, network): return cons = [] for con in self.consecuences: cons.append(con.substitute(match)) for con in self.vconsecuences: new_pred = match[con.name].copy() cons.append(new_pred) for con in cons: factset = network.present if isa(con, network.lexicon.exclusive_endure): old_pred = Predicate(con.true, con.term_type) old_pred.add_object('subj', con.get_object('subj')) for label in con.objects: if label.startswith('u-'): old_pred.add_object(label, con.get_object(label)) network.finish(old_pred) elif isa(con, network.lexicon.finish): tofinish = con.get_object('what') network.finish(tofinish) # XXX make contradiction configurabe #neg = con.copy() #neg.true = not neg.true #contradiction = factset.query(neg) #if contradiction: # raise exceptions.Contradiction('we already have ' + str(neg)) if factset.query_facts(con, {}).count() == 0: if isa(con, network.lexicon.endure): con.add_object('since_', network.lexicon.now_term) fact = factset.add_fact(con) if isa(con, network.lexicon.happen): if network.pipe is not None: network.pipe.send_bytes(str(con).encode('utf8')) if network.root.child_path: logger.debug('con to add: ' + str(con)) m = Match(con) m.paths = network.get_paths(con) logger.debug('con in fact: ' + str(fact.pred)) m.fact = fact network.activations.append(m)
def get_verb(name, kb): verb = kb.lexicon.get_term(name) resp = [] for ot in verb.object_types: isverb = isa(ot.obj_type, kb.lexicon.verb) resp.append([ot.label, ot.obj_type.name, isverb]) return json.dumps(resp, cls=TermsJSONEncoder)
def make_var(self, name): ''' Make a term that represents a variable in a rule or query. It is not added to the session. Its name has the original trailing digits. ''' try: return self.get_term(name) except exceptions.TermNotFound: pass m = patterns.varpat.match(name) if m.group(2): basename = m.group(1).lower() bases = self.get_term(basename) var = self.make_subterm(name, bases) else: tname = m.group(1).lower() if len(tname) == 1: tvar = self.number else: tvar = self.get_term(tname) if isa(tvar, self.verb) or tvar == self.number: var = Term(name, ttype=tvar) else: var = self.make_term(name, tvar) var.var = True self.session.add(var) return var
def dispatch(cls, parent, match, network): if parent.child_path: path = parent.child_path ntype_name = path[-1] chcls = network._get_nclass(ntype_name) value = chcls.resolve(match.pred, path) children = chcls.get_children(parent, value, network) logger.debug('value "{!r}" got children from parent {!s}'.format( value, parent)) for ch in children: for child in ch: logger.debug('One match {!s} from parent {!s}'.format(child, parent)) new_match = match.copy() if child.var: val = None if chcls is VerbNode and isa(child.value, network.lexicon.exist): val = TermNode.resolve(match.pred, path) else: val = value if child.var in match and match[child.var] != val: continue new_match[child.var] = val if chcls is VerbNode and child.redundant_var: new_match[child.redundant_var] = TermNode.resolve(match.pred, path) chcls.dispatch(child, new_match, network) else: logger.debug('parent {!r} has no child path'.format(parent)) if parent.terminal: parent.terminal.dispatch(match, network)
def _make_noun(self, name, bases=None, ntype=None): if bases is None: bases = (self.thing,) elif isa(bases, self.word): bases = (bases,) if ntype is None: ntype = self.noun return Term(name, ttype=ntype, bases=tuple(bases))
def _recurse_paths(self, verb_, pred, paths, path, first=False): paths.append(path + ('_verb',)) if not isa(pred, self.lexicon.verb): # not a verb var paths.append(path + ('_neg',)) for obt in sorted(verb_.object_types, key=lambda x: x.label): if obt.label in ('till_', 'at_'): continue t = obt.obj_type if isa(t, self.lexicon.verb): if obt.label in pred.objects: pred = pred.get_object(obt.label) verb_ = pred.term_type self._recurse_paths(verb_, pred, paths, path + (obt.label,)) else: paths.append(path + (obt.label, '_verb')) else: paths.append(path + (obt.label, '_term'))
def filter_segment_first_var(cls, qfacts, value, path, factset, taken_vars, sec_vars): salias = aliased(cls) talias = aliased(Term) if value.name in taken_vars: sec_vars.append({'cls': cls, 'path': path, 'first': taken_vars[value.name][1]}) return qfacts else: taken_vars[value.name] = (path, salias) # if value.name == 'Exists1': # import pdb;pdb.set_trace() if isa(value, factset.lexicon.verb): sbases = factset.lexicon.get_subterms(get_bases(value)[0]) elif isa(value, factset.lexicon.exist): sbases = factset.lexicon.get_subterms(value.term_type) sbases = [b.id for b in sbases] path_str = '.'.join(path) qfacts = qfacts.join(salias, Fact.id==salias.fact_id).filter(salias.path==path_str).join(talias, salias.verb_id==talias.id).filter(talias.id.in_(sbases)) return qfacts
def finish(self, predicate): fs = self.present.query_facts(predicate, {}) for f in fs: if isa(f.pred, self.lexicon.endure): logger.info('Finish: ' + str(f.pred)) new_pred = f.pred.copy() self.session.delete(f) new_pred.add_object('at_', self.lexicon.now_term) self.past.add_fact(new_pred) self.session.flush()
def add_fact(self, pred): factset = self.present if isa(pred, self.lexicon.exclusive_endure): old_pred = Predicate(pred.true, pred.term_type) old_pred.add_object('subj', pred.get_object('subj')) for label in pred.objects: if label.startswith('u-'): old_pred.add_object(label, pred.get_object(label)) self.finish(old_pred) elif isa(pred, self.lexicon.finish): tofinish = pred.get_object('what') self.finish(tofinish) #neg = pred.copy() #neg.true = not neg.true #contradiction = factset.query(neg) #if contradiction: # raise exceptions.Contradiction('we already have ' + str(neg)) facts = factset.query_facts(pred, {}) if facts.count() == 0: if isa(pred, self.lexicon.endure): pred.add_object('since_', self.lexicon.now_term) fact = factset.add_fact(pred) if isa(pred, self.lexicon.happen): if self.pipe is not None: self.pipe.send_bytes(str(pred).encode('utf8')) if self.root.child_path: m = Match(pred) m.paths = self.get_paths(pred) m.fact = fact Node.dispatch(self.root, m, self) n = 0 while self.activations: n += 1 cmc = int(self.config['commit_many_consecuences']) if cmc and n % cmc == 0: self.session.commit() match = self.activations.pop(0) Node.dispatch(self.root, match, self) return fact else: return facts.first()
def _from_lexicon(self, totell): q = totell.split(':') ttype = self.compiler.lexicon.get_term(q[2]) if q[1] == 'get-words': resp = self.compiler.lexicon.get_terms(ttype) elif q[1] == 'get-subwords': resp = self.compiler.lexicon.get_subterms(ttype) elif q[1] == 'get-verb': resp = [] for ot in ttype.object_types: isverb = isa(ot.obj_type, self.compiler.lexicon.verb) resp.append([ot.label, ot.obj_type.name, isverb]) return json.dumps(resp, cls=TermsJSONEncoder)
def compile_mods(self, verb, ast): mods = {} for mod in ast: label = mod.label try: otype = tuple(filter(lambda x: x.label == label, verb.object_types))[0] except IndexError: raise WrongLabel('Error: label %s is unknown for verb %s (in a rule)' % (label, verb.name)) obj = self.compile_obj(mod.obj) if not isa(obj, otype.obj_type): raise WrongObjectType('Error: word %s for label %s is not the correct type: ' 'it is a %s and should be a %s' % (obj.name, label, obj.term_type.name, otype.obj_type.name)) mods[label] = obj return mods
def get_children(cls, parent, value, network): if isa(value, network.lexicon.exist): types = (value.term_type.term_type,) + get_bases(value.term_type.term_type) type_ids = [t.id for t in types] return network.session.query(cls).filter(cls.parent_id==parent.id, Node.var>0).join(Term, cls.term_id==Term.id).filter(Term.type_id.in_(type_ids)), children = network.session.query(cls).filter(cls.parent_id==parent.id, (cls.value==value) | (cls.value==None)) vchildren = () if value is not None: types = (value.term_type,) + get_bases(value.term_type) type_ids = [t.id for t in types] if type_ids: vchildren = network.session.query(cls).filter(cls.parent_id==parent.id).join(Term, cls.term_id==Term.id).filter(Term.var==True).filter(Term.type_id.in_(type_ids)) # if not isa(value, network.lexicon.thing) and not isa(value, network.lexicon.number): # bases = (value,) + get_bases(value) # tbases = aliased(Term) # base_ids = (b.id for b in bases) # if base_ids and vchildren: # vchildren = vchildren.join(term_to_base, Term.id==term_to_base.c.term_id).join(tbases, term_to_base.c.base_id==tbases.id).filter(tbases.id.in_(base_ids)) # XXX can get duplicates return children, vchildren
def make_subterm(self, name, super_terms, **objs): ''' Make a Term from a name (string) and bases (Term's). The bases are the supertypes of a type, and can be a tuple of terms or a single term. The term is not saved or added to the session. ''' try: return self.get_term(name) except exceptions.TermNotFound: if isa(super_terms, self.word): super_terms = (super_terms,) term_base = super_terms[0] if are(term_base, self.noun): return self._make_subnoun(name, bases=super_terms) elif are(term_base, self.thing): return self._make_noun(name, bases=super_terms) elif are(term_base, self.verb): return self._make_subverb(name, bases=super_terms) elif are(term_base, self.exist): return self._make_verb(name, bases=super_terms, objs=objs)
def test(self, match, network): return isa(self.args[0].solve(match), self.args[1].solve(match))