def __init__(self, config): self.config = config self._buffer = '' self.no_response = object() self.prompt = '>> ' if HAS_READLINE and config['terms_history_file'] and int(config['terms_history_length']): readline.set_history_length(int(config['terms_history_length'])) fn = os.path.expanduser(config['terms_history_file']) try: if not os.path.exists(fn): with open(fn, 'w') as f: f.write('# terms history\n') readline.read_history_file(fn) except Exception: pass address = '%s/%s' % (config['dbms'], config['dbname']) engine = create_engine(address) Session = sessionmaker(bind=engine) if config['dbname'] == ':memory:': from terms.core.terms import Base Base.metadata.create_all(engine) from terms.core.network import Network Network.initialize(Session()) self.compiler = Compiler(Session(), config) register_exec_global(Runtime(self.compiler), name='runtime')
class KnowledgeBase(object): def __init__(self, session, config): self._config = config self._sa_session = session self._compiler = Compiler(session, config) self._network = self._compiler.network self._lexicon = self._compiler.lexicon self._present = self._network.present self._memory = self._network.past def tell(self, s): return self._compiler.parse(s) def tell_word(self, w): return self.tell(w.define()) def tell_fact(self, f): return self.tell(str(f)) def ask_fact(self, f): resp = self._network.query(str(f)) resp = ({label: str(obj) for label, obj in row} for row in resp) return resp def get_words(self, type): term_type = self._lexicon.get_term(type) terms = self._lexicon.get_terms(term_type) return json.dumps(terms) def get_subwords(self, base): sterm = self._lexicon.get_term(base) terms = self._lexicon.get_subterms(sterm) return json.dumps(terms)
def __init__(self, session, config): self._config = config self._sa_session = session self._compiler = Compiler(session, config) self._network = self._compiler.network self._lexicon = self._compiler.lexicon self._present = self._network.present self._memory = self._network.past
def run(self): for client in iter(self.teller_queue.get, None): totell = [] for msg in iter(client.recv_bytes, b'FINISH-TERMS'): totell.append(msg.decode('utf8')) totell = '\n'.join(totell) session = self.session_factory() self.compiler = Compiler(session, self.config) register_exec_global(Runtime(self.compiler), name='runtime') load_exec_globals(session) if totell.startswith('lexicon:'): resp = self._from_lexicon(totell) elif totell.startswith('compiler:exec_globals:'): resp = self._add_execglobal(totell) else: self.compiler.network.pipe = client try: resp = self.compiler.parse(totell) except TermNotFound as e: session.rollback() resp = 'Unknown word: ' + e.args[0] except TermsSyntaxError as e: session.rollback() resp = 'Terms syntax error: ' + e.args[0] except WrongLabel as e: session.rollback() resp = e.args[0] except IllegalLabel as e: session.rollback() resp = 'Error: labels cannot contain underscores: %s' % e.args[0] except WrongObjectType as e: session.rollback() resp = e.args[0] except ImportProblems as e: session.rollback() resp = e.args[0] except DuplicateWord as e: session.rollback() resp = e.args[0] self.compiler.network.pipe = None resp = json.dumps(resp, cls=TermsJSONEncoder) try: client.send_bytes(str(resp).encode('utf8')) except BrokenPipeError: pass else: client.send_bytes(b'END') client.close() session.close() # XXX needed? self.compliler = None self.teller_queue.task_done() # abyss self.teller_queue.task_done() self.teller_queue.close()
class TermsRepl(object): def __init__(self, config): self.config = config self._buffer = '' self.no_response = object() self.prompt = '>> ' if HAS_READLINE and config['terms_history_file'] and int(config['terms_history_length']): readline.set_history_length(int(config['terms_history_length'])) fn = os.path.expanduser(config['terms_history_file']) try: if not os.path.exists(fn): with open(fn, 'w') as f: f.write('# terms history\n') readline.read_history_file(fn) except Exception: pass address = '%s/%s' % (config['dbms'], config['dbname']) engine = create_engine(address) Session = sessionmaker(bind=engine) if config['dbname'] == ':memory:': from terms.core.terms import Base Base.metadata.create_all(engine) from terms.core.network import Network Network.initialize(Session()) self.compiler = Compiler(Session(), config) register_exec_global(Runtime(self.compiler), name='runtime') def _parse_buff(self): resp = self.compiler.parse(self._buffer) self.compiler.session.commit() return resp # conn = Client((self.config['kb_host'], int(self.config['kb_port']))) # conn.send_bytes(self._buffer.encode('ascii')) # while True: # resp = conn.recv_bytes().decode('ascii') # if resp == 'END': # conn.close() # break # resp = self.format_results(resp) # print(resp) def reset_state(self): self._buffer = '' self.prompt = '>> ' def format_results(self, res): if isinstance(res, str): return res resps = [', '.join([k + ': ' + str(v) for k, v in r.items()]) for r in res] return '; '.join(resps) def process_line(self, line): self.prompt = '.. ' resp = self.no_response if line: self._buffer = '\n'.join((self._buffer, line)) if self._buffer.endswith(('.', '?')): resp = self._parse_buff() self.reset_state() return resp def run(self): ic = InteractiveConsole() while True: line = ic.raw_input(prompt=self.prompt) if line in ('quit', 'exit'): self.quit() resp = self.process_line(line) if resp is not self.no_response: print(resp) def quit(self): if HAS_READLINE and self.config['terms_history_file'] and int(self.config['terms_history_length']): readline.write_history_file(os.path.expanduser(self.config['terms_history_file'])) sys.exit('bye')
class Teller(Process): def __init__(self, config, session_factory, teller_queue, *args, **kwargs): super(Teller, self).__init__(*args, **kwargs) self.config = config self.session_factory = session_factory self.teller_queue = teller_queue self.compiler = None def run(self): for client in iter(self.teller_queue.get, None): totell = [] try: for msg in iter(client.recv_bytes, b'FINISH-TERMS'): totell.append(msg.decode('utf8')) except OSError: client.close() self.teller_queue.task_done() continue totell = '\n'.join(totell) session = self.session_factory() self.compiler = Compiler(session, self.config) register_exec_global(Runtime(self.compiler), name='runtime') load_exec_globals(session) if totell.startswith('lexicon:'): try: resp = self._from_lexicon(totell) except TermNotFound as e: resp = 'Unknown word: ' + e.args[0] elif totell.startswith('compiler:exec_globals:'): resp = self._add_execglobal(totell) else: self.compiler.network.pipe = client try: resp = self.compiler.parse(totell) except TermNotFound as e: session.rollback() resp = 'Unknown word: ' + e.args[0] except TermsSyntaxError as e: session.rollback() resp = 'Terms syntax error: ' + e.args[0] except WrongLabel as e: session.rollback() resp = e.args[0] except IllegalLabel as e: session.rollback() resp = 'Error: labels cannot contain underscores: %s' % e.args[0] except WrongObjectType as e: session.rollback() resp = e.args[0] except ImportProblems as e: session.rollback() resp = e.args[0] except DuplicateWord as e: session.rollback() resp = e.args[0] self.compiler.network.pipe = None resp = json.dumps(resp, cls=TermsJSONEncoder) try: client.send_bytes(str(resp).encode('utf8')) except BrokenPipeError: pass else: client.send_bytes(b'END') client.close() session.close() # XXX needed? self.compliler = None self.teller_queue.task_done() # abyss self.teller_queue.task_done() self.teller_queue.close() def _from_lexicon(self, totell): q = totell.split(':') ttype = self.compiler.lexicon.get_term(q[2]) if q[1] == 'get-words': resp = self.compiler.lexicon.get_terms(ttype) elif q[1] == 'get-subwords': resp = self.compiler.lexicon.get_subterms(ttype) elif q[1] == 'get-verb': resp = [] for ot in ttype.object_types: isverb = isa(ot.obj_type, self.compiler.lexicon.verb) resp.append([ot.label, ot.obj_type.name, isverb]) return json.dumps(resp, cls=TermsJSONEncoder) def _add_execglobal(self, totell): # XXX put it in terms.core.exec_globals, in all processes egs = totell[22:] eg = ExecGlobal(egs) session = self.session_factory() session.add(eg) session.commit() session.close()