Example #1
0
 def __init__(self, config):
     self.config = config
     self._buffer = ''
     self.no_response = object()
     self.prompt = '>> '
     if HAS_READLINE and config['terms_history_file'] and int(config['terms_history_length']):
         readline.set_history_length(int(config['terms_history_length']))
         fn = os.path.expanduser(config['terms_history_file'])
         try:
             if not os.path.exists(fn):
                 with open(fn, 'w') as f:
                     f.write('# terms history\n')
             readline.read_history_file(fn)
         except Exception:
             pass
     address = '%s/%s' % (config['dbms'], config['dbname'])
     engine = create_engine(address)
     Session = sessionmaker(bind=engine)
     if config['dbname'] == ':memory:':
         from terms.core.terms import Base
         Base.metadata.create_all(engine)
         from terms.core.network import Network
         Network.initialize(Session())
     self.compiler = Compiler(Session(), config)
     register_exec_global(Runtime(self.compiler), name='runtime')
Example #2
0
def get_sasession(config):
    address = '%s/%s' % (config['dbms'], config['dbname'])
    engine = create_engine(address)
    Session = sessionmaker(bind=engine)
    if config['dbname'] == ':memory:':
        session = Session()
        Base.metadata.create_all(engine)
        Network.initialize(session)
        session.close()
    return Session
Example #3
0
def init_terms():
    config = get_config()
    address = '%s/%s' % (config['dbms'], config['dbname'])
    engine = create_engine(address)
    Base.metadata.create_all(engine)
    Session = sessionmaker(bind=engine)
    session = Session()
    Network.initialize(session)
    session.commit()
    session.close()
    sys.exit(0)
Example #4
0
    def __init__(
            self, session, config,
            lex_optimize=False,
            yacc_optimize=True,
            yacc_debug=False):

        self.session = session
        self.config = config
        self.network = Network(session, config)
        self.lexicon = self.network.lexicon

        self.parser = Parser(
            lex_optimize=lex_optimize,
            yacc_optimize=yacc_optimize,
            yacc_debug=yacc_debug)
Example #5
0
def test_terms():  # test generator
    # read contents of tests/
    # feed each test to run_npl
    d = os.path.dirname(sys.modules['terms.core'].__file__)
    d = os.path.join(d, 'tests')
    files = os.listdir(d)
    config = ConfigParser()
    config.read_string(CONFIG)
    config = config['test']
    for f in files:
        if f.endswith('.test'):
            address = '%s/%s' % (config['dbms'], config['dbname'])
            engine = create_engine(address)
            Session = sessionmaker(bind=engine)
            session = Session()
            Base.metadata.create_all(engine)
            Network.initialize(session)
            kb = KnowledgeBase(session, config,
                               lex_optimize=False,
                               yacc_optimize=False,
                               yacc_debug=True)
            yield run_terms, kb, os.path.join(d, f)
            kb.session.close()
            Base.metadata.drop_all(engine)
Example #6
0
class Compiler(object):

    def __init__(
            self, session, config,
            lex_optimize=False,
            yacc_optimize=True,
            yacc_debug=False):

        self.session = session
        self.config = config
        self.network = Network(session, config)
        self.lexicon = self.network.lexicon

        self.parser = Parser(
            lex_optimize=lex_optimize,
            yacc_optimize=yacc_optimize,
            yacc_debug=yacc_debug)

    def parse(self, s):
        s = '\n'.join([l for l in s.splitlines() if l and not l.startswith('#')])
        module = self.parser.parse(s)
        url = module.url
        headers = module.headers
        known = False
        if url is not None:
            known = True
            try:
                self.session.query(Import).filter_by(url=url).one()
            except NoResultFound:
                known = False
        if not known:
            asts = module.code
            if len(asts) == 1:
                return self.compile(asts[0])
            asts.reverse()
            for ast in asts:
                self.compile(ast)
            if url is not None:  # XXX Save import even if compile throws an exceptin, saving the line it was thrown at?
                headers = '\n'.join(module.headers) if headers is not None else headers
                new = Import(s, url, headers)
                self.session.add(new)
                self.session.commit()
        return 'OK'

    def compile(self, ast):
        if ast.type == 'definition':
            return self.compile_definition(ast.definition)
        elif ast.type == 'rule':
            return self.compile_rule(ast)
        elif ast.type == 'instant-rule':
            return self.compile_instant_rule(ast)
        elif ast.type == 'fact-set':
            return self.compile_factset(ast.facts)
        elif ast.type == 'question':
            return self.compile_question(ast.facts)
        elif ast.type == 'removal':
            return self.compile_removal(ast.facts)
        elif ast.type == 'import':
            return self.compile_import(ast.url)

    def compile_definition(self, definition):
        if definition.type == 'verb-def':
            term = self.compile_verbdef(definition)
        elif definition.type == 'noun-def':
            term = self.compile_noundef(definition)
        elif definition.type == 'name-def':
            term = self.compile_namedef(definition)
        self.session.commit()
        return term

    def _test_previous(self, name):
        try:
            prev = self.lexicon.get_term(name)
        except TermNotFound:
            pass
        else:
            raise DuplicateWord('Word {} already exists, with type {}'.format(
                name, prev.term_type.name))

    def compile_verbdef(self, defn):
        self._test_previous(defn.name.val)
        bases = [self.lexicon.get_term(t.val) for t in defn.bases]
        objs = {o.label: self.lexicon.get_term(o.obj_type.val)
                for o in defn.objs}
        return self.lexicon.add_subterm(defn.name.val, bases, **objs)

    def compile_noundef(self, defn):
        self._test_previous(defn.name.val)
        bases = [self.lexicon.get_term(t.val) for t in defn.bases]
        return self.lexicon.add_subterm(defn.name.val, bases)

    def compile_namedef(self, defn):
        self._test_previous(defn.name.val)
        term_type = self.lexicon.get_term(defn.term_type.val)
        return self.lexicon.add_term(defn.name.val, term_type)

    def _prepare_rule(self, rule):
        condcode = None
        if rule.pycode:
            condcode = CondCode(rule.pycode)
        conds, prems = [], []
        for sen in rule.prems:
            if sen.type == 'fact':
                prem = self.compile_fact(sen)
                prems.append(prem)
            else:
                cond = self.compile_conddef(sen)
                conds.append(cond)
        consecs = []
        for sen in rule.cons:
            con = self.compile_fact(sen)
            consecs.append(con)
        return prems, conds, condcode, consecs

    def compile_rule(self, rule):
        args = self._prepare_rule(rule)
        self.network.add_rule(*args)
        self.session.commit()
        return 'OK'

    def compile_instant_rule(self, rule_ast):
        args = self._prepare_rule(rule_ast)
        rule = self.network.add_rule(*args)
        # remove premnodes & nodes that have no other rules
        for prem in rule.prems:
            if len(prem.node.prems) == 1:
                node = prem.node
                while len(node.parent.children) == 1:
                    node = node.parent
                self.session.delete(node)
        self.session.delete(rule)
        return 'OK'

    def compile_fact(self, fact):
        true = fact.true
        verb = self.compile_vterm(fact.predicate.verb)
        if fact.predicate.subj is None:
            return verb
        subj = self.compile_obj(fact.predicate.subj)
        mods = self.compile_mods(verb, fact.predicate.mods)
        mods['subj'] = subj
        redundant_var = None
        if fact.predvar:
            redundant_var = self.lexicon.make_var(fact.predvar.val)
        return Predicate(true, verb, redundant_var_=redundant_var, **mods)

    def compile_vterm(self, vterm):
        if vterm.type == 'var':
            return self.lexicon.make_var(vterm.val)
        return self.lexicon.get_term(vterm.val)

    def compile_obj(self, obj):
        if obj.type == 'var':
            return self.lexicon.make_var(obj.val)
        elif obj.type == 'term':
            return self.lexicon.get_term(obj.val)
        elif obj.type == 'fact':
            return self.compile_fact(obj)
        elif obj.type == 'number':
            return self.lexicon.make_term(obj.val, self.lexicon.number)
        elif obj.type == 'set':
            return self.compile_set(obj)

    def compile_set(self, ast):
        var = self.lexicon.make_var(ast.var)
        var.set_condition = ast.stmnt
        return var

    def compile_mods(self, verb, ast):
        mods = {}
        for mod in ast:
            label = mod.label
            try:
                otype = tuple(filter(lambda x: x.label == label, verb.object_types))[0]
            except IndexError:
                raise WrongLabel('Error: label %s is unknown for verb %s (in a rule)' % (label, verb.name))
            obj = self.compile_obj(mod.obj)
            if not isa(obj, otype.obj_type):
                raise WrongObjectType('Error: word %s for label %s is not the correct type: '
                                       'it is a %s and should be a %s' %
                                      (obj.name, label, obj.term_type.name, otype.obj_type.name))
            mods[label] = obj
        return mods

    def compile_conddef(self, sen):
        if sen.type == 'name-def':
            name = self.compile_vterm(sen.name)
            term_type = self.compile_vterm(sen.term_type)
            return CondIsa(name, term_type)
        else:
            name = self.compile_vterm(sen.name)
            base = self.compile_vterm(sen.bases[0])
            return CondIs(name, base)

    def compile_factset(self, facts):
        for f in facts:
            pred = self.compile_fact(f)
            self.network.add_fact(pred)
            self.session.commit()
        return 'OK'

    def compile_question(self, sentences):
        matches = []
        if sentences:
            facts, defs = [], []
            for s in sentences:
                if s.type == 'fact':
                    facts.append(s)
                else:
                    defs.append(s)
            q = [self.compile_fact(f) for f in facts]
            matches = self.network.query(*q)
            for defn in defs:
                if defn.type == 'noun-def':
                    if defn.name.type == 'var':
                        terms = self.lexicon.get_terms
                        #  XXX unfinished
                elif defn.type == 'name-def':
                    term = self.compile_namedef(defn)

        if not matches:
            matches = 'false'
        elif not matches[0]:
            matches = 'true'
        return matches

    def compile_removal(self, facts):
        for f in facts:
            pred = self.compile_fact(f)
            self.network.del_fact(pred)
        self.session.commit()
        return 'OK'

    def compile_import(self, url):
        try:
            self.session.query(Import).filter_by(url=url).one()
        except NoResultFound:
            if url.startswith('file://'):
                path = url[7:]
                try:
                    f = open(path, 'r')
                except Exception as e:
                    raise ImportProblems('Problems opening the file: ' + str(e))
                code = f.read()
                f.close()
            elif url.startswith('http'):
                try:
                    resp = urlopen(url)
                except Exception as e:
                    raise ImportProblems('Problems loading the file: ' + str(e))
                code = resp.read().decode('utf8')
                resp.close()
            else:
                raise ImportProblems('Unknown protocol for <%s>' % url)
            self.parse(code)
        return 'OK'