def __init__(self, rules_db): Rule.__init__( self, [], expression_from_list( [rules_db.add_constant_by_name("atom"), Variable("X")]), rules_db.add_constant_by_name("atom_rule"), "(built-in)", rules_db)
def eval_step(self, expr): target_var = self.compiler.rules_db.introduce_variable() eval_proposition = [ expression_from_list([self.eval_predicate_name, expr, target_var]) ] steps, subs = next( prover.prove_dfs(self.compiler.rules_db, eval_proposition, self.proof_steps_budget, self.prover_cache)) return steps, subs.replacements[ target_var] if subs is not None else subs
def add_builtins(rules_db): rules_db.add_rule( Rule([], expression_from_list([ Variable("X"), rules_db.add_constant_by_name("is"), Variable("X") ]), rules_db.add_constant_by_name("is_rule"), "(built-in)", rules_db)) rules_db.add_rule(RuleIsNot(rules_db)) rules_db.add_rule(RuleAtom(rules_db)) rules_db.add_rule(RuleNewAtom(rules_db))
def compile_expression(self, parsed_expression, tokens): if isinstance(parsed_expression, lexer.Token): if parsed_expression.type == lexer.TokenType.CONSTANT: return self.rules_db.add_constant_by_name( parsed_expression.string) if parsed_expression.type == lexer.TokenType.VARIABLE: return Variable(parsed_expression.string) raise CompilerError( "Unknown token:\n" + tokens.part_until_here_str(parsed_expression.loc)) if isinstance(parsed_expression, TokenList): parsed_expression = parsed_expression.elems return expression_from_list( [self.compile_expression(e, tokens) for e in parsed_expression])
def property_check(self, expr, property_name): test_proposition = [expression_from_list([property_name, expr])] steps, subs = next( prover.prove_dfs(self.compiler.rules_db, test_proposition, self.proof_steps_budget, self.prover_cache)) return steps, subs is not None