Exemple #1
0
    def test_relativize_quantifiers(self) -> None:
        minipaxos = '''
            sort node
            sort quorum
            immutable relation member(node, quorum)
            mutable relation active_node(node)
            mutable relation active_quorum(quorum)
        '''
        prog = mypyvy.parse_program(minipaxos)
        typechecker.typecheck_program(prog)
        node = prog.scope.get_sort('node')
        assert node is not None
        quorum = prog.scope.get_sort('quorum')
        assert quorum is not None
        active_node = prog.scope.get('active_node')
        assert isinstance(active_node, syntax.RelationDecl)
        active_quorum = prog.scope.get('active_quorum')
        assert isinstance(active_quorum, syntax.RelationDecl)
        guards = {node: active_node, quorum: active_quorum}

        e = parser.parse_expr('forall Q1, Q2. exists N. member(N, Q1) & member(N, Q2)')
        typechecker.typecheck_expr(prog.scope, e, None)

        expected = parser.parse_expr('forall Q1, Q2. active_quorum(Q1) & active_quorum(Q2) -> '
                                     'exists N. active_node(N) & (member(N, Q1) & member(N, Q2))')
        with prog.scope.n_states(1):
            typechecker.typecheck_expr(prog.scope, expected, None)

        self.assertEqual(syntax.relativize_quantifiers(guards, e), expected)
Exemple #2
0
 def test_as_clauses_lockserv(self) -> None:
     with open(lockserv_path) as f:
         prog = mypyvy.parse_program(f.read())
     typechecker.typecheck_program(prog)
     for inv in prog.invs():
         expr = inv.expr
         with self.subTest(expr=expr):
             syntax.as_clauses(expr)
Exemple #3
0
    def test_consistent_hashing(self) -> None:
        with open(lockserv_path) as f:
            prog1 = mypyvy.parse_program(f.read())
        with open(lockserv_path) as f:
            prog2 = mypyvy.parse_program(f.read())

        typechecker.typecheck_program(prog1)
        typechecker.typecheck_program(prog2)
        for d1, d2 in zip(prog1.decls_containing_exprs(), prog2.decls_containing_exprs()):
            e1 = d1.expr
            e2 = d2.expr
            with self.subTest(msg='expr hash/eq', e1=e1, e2=e2):
                self.assertEqual(e1, e2)
                self.assertEqual(hash(e1), hash(e2))
def main() -> None:
    # limit RAM usage to 45 GB
    # TODO: make this a command line argument
    # TODO: not sure if this is actually the right way to do this (also, what about child processes?)
    resource.setrlimit(resource.RLIMIT_AS, (90 * 10**9, 90 * 10**9))

    utils.args = parse_args(sys.argv[1:])

    if utils.args.log_xml:
        fmt = '%(message)s'
    elif utils.args.log_time:
        fmt = '%(asctime)s %(filename)s:%(lineno)d: %(message)s'
    else:
        fmt = '%(filename)s:%(lineno)d: %(message)s'

    if 'json' in utils.args and utils.args.json:
        utils.args.log = 'critical'

    utils.logger.setLevel(getattr(logging, utils.args.log.upper(), None))
    handler = logging.StreamHandler(stream=sys.stdout)
    handler.terminator = ''
    handler.setFormatter(MyFormatter(fmt))
    logging.root.addHandler(handler)

    if utils.args.print_cmdline:
        utils.logger.always_print(' '.join([sys.executable] + sys.argv))
        utils.logger.info('Running mypyvy with the following options:')
        for k, v in sorted(vars(utils.args).items()):
            utils.logger.info(f'    {k} = {v!r}')

    utils.logger.info('setting seed to %d' % utils.args.seed)
    z3.set_param('smt.random_seed', utils.args.seed)
    z3.set_param('sat.random_seed', utils.args.seed)

    # utils.logger.info('enable z3 macro finder')
    # z3.set_param('smt.macro_finder', True)

    if utils.args.timeout is not None:
        utils.logger.info('setting z3 timeout to %s' % utils.args.timeout)
        z3.set_param('timeout', utils.args.timeout)

    pre_parse_error_count = utils.error_count

    with open(utils.args.filename) as f:
        prog = parse_program(f.read(),
                             forbid_rebuild=utils.args.forbid_parser_rebuild,
                             filename=utils.args.filename)

    if utils.error_count > pre_parse_error_count:
        utils.logger.always_print('program has syntax errors.')
        utils.exit(1)

    if utils.args.print_program is not None:
        if utils.args.print_program == 'str':
            to_str: Callable[[Program], str] = str
            end = '\n'
        elif utils.args.print_program == 'repr':
            to_str = repr
            end = '\n'
        elif utils.args.print_program == 'faithful':
            to_str = syntax.faithful_print_prog
            end = ''
        elif utils.args.print_program == 'without-invariants':

            def p(prog: Program) -> str:
                return syntax.faithful_print_prog(prog, skip_invariants=True)

            to_str = p
            end = ''
        else:
            assert False

        utils.logger.always_print(to_str(prog), end=end)

    pre_typecheck_error_count = utils.error_count

    typechecker.typecheck_program(prog)
    if utils.error_count > pre_typecheck_error_count:
        utils.logger.always_print('program has resolution errors.')
        utils.exit(1)

    syntax.the_program = prog

    s = Solver(use_cvc4=utils.args.cvc4)

    utils.args.main(s)

    if utils.args.ipython:
        ipython(s)

    utils.exit(1 if utils.error_count > 0 else 0)
def sandbox(s: Solver) -> None:
    ####################################################################################
    # SANDBOX for playing with relaxed traces
    import pickle
    trns: logic.Trace = pickle.load(open("paxos_trace.p", "rb"))

    diff_conjunctions = relaxed_traces.derived_rels_candidates_from_trace(
        trns, [], 2, 3)

    print("num candidate relations:", len(diff_conjunctions))
    for diffing_conjunction in diff_conjunctions:
        # print("relation:")
        # for conj in diffing_conjunction:
        #     print("\t %s" % str(conj))
        print(diffing_conjunction[1])

    derrel_name = syntax.the_program.scope.fresh("nder")
    (free_vars, def_expr) = diff_conjunctions[0]
    def_axiom = syntax.Forall(
        tuple(free_vars),
        syntax.Iff(
            syntax.Apply(derrel_name,
                         tuple(syntax.Id(v.name) for v in free_vars)),
            # TODO: extract pattern
            def_expr))

    derrel = syntax.RelationDecl(
        name=derrel_name,
        arity=tuple(syntax.safe_cast_sort(var.sort) for var in free_vars),
        mutable=True,
        derived=def_axiom)

    # TODO: this irreversibly adds the relation to the context, wrap
    typechecker.typecheck_statedecl(syntax.the_program.scope, derrel)
    syntax.the_program.decls.append(
        derrel
    )  # TODO: hack! because typecheck_statedecl only adds to prog.scope
    s.mutable_axioms.extend([
        def_axiom
    ])  # TODO: hack! currently we register these axioms only on solver init

    print("Trying derived relation:", derrel)

    # the new decrease_domain action incorporates restrictions that derived relations remain the same on active tuples
    new_decrease_domain = relaxed_traces.relaxation_action_def(
        syntax.the_program, fresh=False)
    new_prog = relaxed_traces.replace_relaxation_action(
        syntax.the_program, new_decrease_domain)
    typechecker.typecheck_program(new_prog)
    print(new_prog)

    syntax.the_program = new_prog

    # TODO: recover this, making sure the candidate blocks the trace
    # trace_decl = next(syntax.the_program.traces())
    # trns2_o = bmc_trace(new_prog, trace_decl, s, lambda s, ks: logic.check_solver(s, ks, minimize=True))
    # assert trns2_o is None

    # migrated_trace = load_relaxed_trace_from_updr_cex(syntax.the_program, s)
    import pickle
    trns2_o = pickle.load(open("migrated_trace.p", "rb"))

    trns2 = cast(logic.Trace, trns2_o)
    print(trns2)
    print()
    assert not relaxed_traces.is_rel_blocking_relax(
        trns2, ([(v, str(syntax.safe_cast_sort(v.sort)))
                 for v in free_vars], def_expr))

    # for candidate in diff_conjunctions:
    #     print("start checking")
    #     print()
    #     if str(candidate[1]) == ('exists v0:node. member(v0, v1) & left_round(v0, v2) '
    #                              '& !vote(v0, v2, v3) & active_node(v0)'):
    #         print(candidate)
    #         assert False
    #         resush = relaxed_traces.is_rel_blocking_relax_step(
    #             trns2, 11,
    #             ([(v, str(syntax.safe_cast_sort(v.sort))) for v in candidate[0]],
    #              candidate[1]))
    #         # res2 = trns2.as_state(0).eval(syntax.And(*[i.expr for i in syntax.the_program.inits()]))
    #
    #         # resush = trns2.as_state(7).eval(syntax.And(*[i.expr for i in syntax.the_program.inits()]))
    #         print(resush)
    #         assert False
    # assert False

    diff_conjunctions = list(
        filter(
            lambda candidate: relaxed_traces.is_rel_blocking_relax(
                trns2, ([(v, str(syntax.safe_cast_sort(v.sort)))
                         for v in candidate[0]], candidate[1])),
            diff_conjunctions))
    print("num candidate relations:", len(diff_conjunctions))
    for diffing_conjunction in diff_conjunctions:
        # print("relation:")
        # for conj in diffing_conjunction:
        #     print("\t %s" % str(conj))
        print(diffing_conjunction[1])

    print()

    assert False
def relaxed_program(prog: syntax.Program) -> syntax.Program:
    new_decls: List[syntax.Decl] = [d for d in prog.sorts()]

    actives: Dict[syntax.SortDecl, syntax.RelationDecl] = {}
    for sort in prog.sorts():
        name = prog.scope.fresh('active_' + sort.name)
        r = syntax.RelationDecl(name,
                                arity=(syntax.UninterpretedSort(sort.name), ),
                                mutable=True)
        actives[sort] = r
        new_decls.append(r)

    # active relations initial conditions: always true
    for sort in prog.sorts():
        name = prog.scope.fresh(sort.name[0].upper())
        expr = syntax.Forall((syntax.SortedVar(name, None), ),
                             syntax.Apply(actives[sort].name,
                                          (syntax.Id(name), )))
        new_decls.append(syntax.InitDecl(name=None, expr=expr))

    for d in prog.decls:
        if isinstance(d, syntax.SortDecl):
            pass  # already included above
        elif isinstance(d, syntax.RelationDecl):
            if d.derived_axiom is not None:
                expr = syntax.relativize_quantifiers(actives, d.derived_axiom)
                new_decls.append(
                    syntax.RelationDecl(d.name,
                                        d.arity,
                                        d.mutable,
                                        expr,
                                        annotations=d.annotations))
            else:
                new_decls.append(d)
        elif isinstance(d, syntax.ConstantDecl):
            new_decls.append(d)
        elif isinstance(d, syntax.FunctionDecl):
            new_decls.append(d)
        elif isinstance(d, syntax.AxiomDecl):
            new_decls.append(d)
        elif isinstance(d, syntax.InitDecl):
            new_decls.append(d)
        elif isinstance(d, syntax.DefinitionDecl):
            relativized_def = relativize_decl(d,
                                              actives,
                                              prog.scope,
                                              inline_relax_actives=False)
            new_decls.append(relativized_def)
        elif isinstance(d, syntax.InvariantDecl):
            expr = syntax.relativize_quantifiers(actives, d.expr)
            new_decls.append(
                syntax.InvariantDecl(d.name,
                                     expr=expr,
                                     is_safety=d.is_safety,
                                     is_sketch=d.is_sketch))
        else:
            assert False, d

    new_decls.append(relaxation_action_def(prog, actives=actives, fresh=True))

    res = syntax.Program(new_decls)
    typechecker.typecheck_program(res)  # #sorrynotsorry
    return res