def _retract_similar_facts(self, agent, val0, newfact, d, kbResume=False): facts = d[val0] length = len(facts) arity = len(newfact) i = 0 some_deleted = False while i < length: oldfact = facts[i] if len(oldfact) != arity: # allow different arities continue for detindices in self._detindices_lists: for index in detindices: if newfact[index] != oldfact[index]: break # continue with next detmode else: #PERSIST if not kbResume: agent.post_event(RemoveFactEvent(self.symbol, oldfact)) #self._retracts.append(oldfact) facts[i] = None some_deleted = True break # continue with next old i = i + 1 if some_deleted: delete_all(facts, None) if not facts: del d[val0]
def _retract_matching_facts(self, agent, bindings, val0, zexpr, d, kbResume=False): facts = d[val0] length = len(facts) arity = len(zexpr) i = 0 some_deleted = False while i < length: oldfact = facts[i] if len(oldfact) != arity: # allow different arities continue j = 1 while j < arity: if not termMatch(agent, bindings, zexpr[j], oldfact[j]): break # continue with next fact j = j + 1 else: #self._retracts.append(oldfact) #PERSIST if not kbResume: from spark.pylang.defaultimp import RemoveFactEvent agent.post_event(RemoveFactEvent(self.symbol, oldfact)) facts[i] = None some_deleted = True i = i + 1 if some_deleted: delete_all(facts, None) if not facts: del d[val0]