Beispiel #1
0
    def synthesize(self,
                   limit=None,
                   library_max_redundancy=None,
                   strict_out_lib_map=False,
                   strict_in_spec_map=True,
                   use_types=True,
                   use_hints=True,
                   minimize_components=False,
                   minimize_ports=False,
                   minimize_cost=False,
                   filename=None,
                   visualize=True):
        '''
        call for synthesis
        '''
        LOG.debug(filename)
        if filename is None:
            filename = 'out'

        time1 = time.time()

        (model, composition, spec,
         contract_list) = self.solver_interface.synthesize(
             self.spec_contract_list,
             limit=limit,
             library_max_redundancy=library_max_redundancy,
             strict_out_lib_map=strict_out_lib_map,
             strict_in_spec_map=strict_in_spec_map,
             use_types=use_types,
             use_hints=use_hints,
             minimize_components=minimize_components,
             minimize_ports=minimize_ports,
             minimize_cost=minimize_cost,
             same_block_constraints=self.same_block_pairs,
             distinct_mapping_constraints=self.distinct_map,
             fixed_components=self.fixed_components,
             fixed_connections=self.fixed_connections,
             fixed_connections_spec=self.fixed_connections_spec)
        time2 = time.time()
        graphviz_conv = GraphizConverter(spec,
                                         composition,
                                         contract_list,
                                         synthesis_time=time2 - time1,
                                         filename=filename)
        graphviz_conv.generate_graphviz()

        if visualize:
            graphviz_conv.view()
        else:
            graphviz_conv.save()

        with open(filename + '.pyco', 'w') as f:
            f.write('Synthesis time: %.2f seconds\n\n\n' % (time2 - time1))
            f.write(str(model))
            f.write('\n\n')
            f.write(str(spec))
            f.write('\n\n')
            for c in contract_list:
                f.write(str(c))
                f.write('\n')
Beispiel #2
0
def convert_formula_to_z3(formula, contract_vars, level):
    '''
    return a Z3 formula from a pycolite-lite-dev formula structure
    '''

    if formula.is_literal:
        return contract_vars['%d-%s' % (level, formula.unique_name)]
    elif isinstance(formula, TrueFormula):
        return True
    elif isinstance(formula, FalseFormula):
        return False
    elif isinstance(formula, Negation):
        return Not(
            convert_formula_to_z3(formula.right_formula, contract_vars, level))
    elif isinstance(formula, Conjunction):
        return And(
            convert_formula_to_z3(formula.left_formula, contract_vars, level),
            convert_formula_to_z3(formula.right_formula, contract_vars, level))
    elif isinstance(formula, Disjunction):
        return Or(
            convert_formula_to_z3(formula.left_formula, contract_vars, level),
            convert_formula_to_z3(formula.right_formula, contract_vars, level))
    elif isinstance(formula, Implication):
        return Implies(
            convert_formula_to_z3(formula.left_formula, contract_vars, level),
            convert_formula_to_z3(formula.right_formula, contract_vars, level))
    elif isinstance(formula, Equivalence):
        return (convert_formula_to_z3(formula.left_formula, contract_vars,
                                      level) == convert_formula_to_z3(
                                          formula.right_formula, contract_vars,
                                          level))
    else:
        LOG.critical('incorrect unrolled formula')
Beispiel #3
0
 def verify_library(self):
     '''
     Verifies that all the relations in the library are consistent
     '''
     for component in self.components:
         try:
             component.verify_refinement_assertions()
         except NotARefinementError as error:
             LOG.debug('in verify_library')
             LOG.debug(error)
             raise error
    def quit(self, wait=False):
        '''
        close up nicely
        '''
        #pid = self.result_queue.get()

        print('')
        # import time
        # while True:
        #     time.sleep(1)
        # with self.pool_lock:
        if not wait:
            LOG.debug('terminating')
            self.terminate_event.set()

        # LOG.debug(self.thread_pool)
        for thread in self.thread_pool:
            thread.join()

        if self.found_refinement.is_set():
            LOG.debug("get solution")
            pids = []
            var_assign_pid = {}
            params_pid = {}
            while not self.result_queue.empty():
                pid, model_map_items, params_items = self.result_queue.get()
                pids.append(pid)
                var_assign_pid[pid] = {k: v for (k, v) in model_map_items}
                params_pid[pid] = {k: v for (k, v) in params_items}

            pid = min(pids)
        else:
            raise pyco.z3_interface.NotSynthesizableError()

        self.model = self.model_dict[pid]
        self.var_assign = var_assign_pid[pid]
        self.params_assign = params_pid[pid]
        self.relevant_contracts = self.relevant_contracts_pid[pid]

        #rebuild composition
        # with self.z3_lock:
        self.composition, self.connected_spec, self.contract_inst = \
                self.solver_interface.build_composition_from_model(self.model, self.output_port_names,
                                                                   self.relevant_contracts, self.var_assign)
        #wait for all the threads to stop

        #for thread in self.thread_pool:
        #    thread.join()

        return (self.model, self.composition, self.connected_spec,
                self.contract_inst, self.params_assign)
Beispiel #5
0
    def verify_refinement_assertions(self):
        '''
        Runs a verification of all the refinement registered assertions
        '''

        for assertion in self.refinement_assertions:
            try:
                self.verify_refinement(assertion)
            except NotARefinementError as err:
                LOG.debug('here')
                LOG.debug(assertion)
                raise err

        return
Beispiel #6
0
    def terminate(self):
        '''
        close up nicely
        '''
        print('')
        LOG.debug('terminating')
        #pid = self.result_queue.get()

        with self.pool_lock:
            self.terminate_event.set()

        for thread in self.thread_pool:
            thread.join()

        if self.found_refinement.is_set():
            pids = []
            while not self.result_queue.empty():
                pids.append(self.result_queue.get())
            pid = min(pids)
        else:
            raise pyco.z3_interface.NotSynthesizableError()

        self.model = self.model_dict[pid]

        #rebuild composition
        spec = self.z3_interface.specification_list[0]
        with self.z3_lock:
            self.composition, self.connected_spec, self.contract_inst = \
                    self.z3_interface.build_composition_from_model(self.model, spec, complete_model=True)
        #wait for all the threads to stop

        #for thread in self.thread_pool:
        #    thread.join()

        return (self.model, self.composition, self.connected_spec,
                self.contract_inst)
Beispiel #7
0
'''
This module builds the structure converting the contract library to datatypes
for the Z3 SMT solver

Author: Antonio Iannopollo
'''

from pycolite.formula import (Conjunction, Disjunction, Negation, Implication,
                              Equivalence, TrueFormula, FalseFormula)
from z3 import *

from pyco import LOG

LOG.debug('in z3_library_conversion')

DEFAULT_MAX_REDUNDANCY = 1

# def convert_formula_to_z3(formula, contract_vars, level):
#     '''
#     return a Z3 formula from a pycolite-dev formula structure
#     '''
#
#     if formula.is_literal:
#         return contract_vars['%d-%s' % (level, formula.unique_name)]
#     elif isinstance(formula, TrueFormula):
#         return True
#     elif isinstance(formula, FalseFormula):
#         return False
#     elif isinstance(formula, Negation):
#         return Not(convert_formula_to_z3(formula.right_formula,
#             contract_vars, level))
'''
This module contains the main interface to the SMT solver

Author: Antonio Iannopollo
'''

from pyco import LOG

LOG.debug('In solver_interface')

class SMTManager(object):
    '''
    Manage the interface between Ports and SMT
    '''



    def __init__(self, library):
        '''
        Defines init behavior, e.g. where to save list
        of parameters
        '''
        self.port_base_names = {}
        self.port_unique_names = {}
        self.contract_base_names = {}
        self.contract_unique_names = {}
        self.component_base_names = {}
        self.component_unique_names = {}
        self.library = library

        #if solver is None:
Beispiel #9
0
.. module:: contract_ex
:synopsis: This module contains an extension of the basci contracts defined in
            the pyco library

.. moduleauthor:: Antonio Iannopollo <*****@*****.**>

'''

from pycolite.contract import (Contract as BaseContract, PortMapping,
                               CompositionMapping, RefinementMapping,
                               NotARefinementError)
from pycolite.parser.lexer import BaseSymbolSet

from pyco import LOG

LOG.debug('In contract.py')

# class Port(BasePort):
#    '''
#    This class extends the Port class from pycolite-lite-dev.
#    In addition to the base class, here every Port has a related SMT object.
#    '''
#
#    def __init__(self, base_name, contract=None, literal=None, context=None):
#        '''
#        Override initializer. Add SMT port model
#        '''
#        self.smt_model = None
#
#        super(Port, self).__init__(base_name, contract, literal, context)
#
Beispiel #10
0
    def synthesize(self,
                   specs,
                   distinct_spec_port_set=None,
                   limit=None,
                   max_depth=None,
                   minimize_components=False,
                   minimize_cost=False,
                   fixed_components=None,
                   fixed_connections=None,
                   fixed_connections_spec=None,
                   balance_types=None,
                   decompose=True):
        '''
        perform synthesis process
        '''
        if sum([minimize_components, minimize_cost]) > 1:
            raise OptimizationError('Only one objective can be minimized')
        if minimize_cost:
            raise NotImplementedError('Custom cost not yet implemented')

        #self.time = {}
        #self.time['start'] = time()

        self.distinct_spec_port_set = {}
        if distinct_spec_port_set is not None:
            self.distinct_spec_port_set = distinct_spec_port_set

        self.fixed_components = fixed_components
        self.fixed_connections = fixed_connections
        self.fixed_connections_spec = fixed_connections_spec

        self.specification_list = specs

        optimize = minimize_components | minimize_cost

        # let's pick a root
        # we assume all the specs have same interface
        self.spec = self.specification_list[0]
        spec_outs = len(self.spec.output_ports_dict)

        if limit is None:
            self.max_components = spec_outs
        else:
            self.max_components = limit

        # if depth is None:
        #     depth = min(int(3* limit/spec_outs), limit)
        self.max_depth = max_depth

        self.balance_max_types = set()
        if balance_types is not None:
            self.balance_max_types = balance_types

        constraints = [True]

        self.initiliaze_solver(self.spec)

        # print lib structure
        for contract in self.library.all_contracts:
            LOG.debug('++++')
            LOG.debug('%s' % (contract.base_name))

        constraints.append(self.init_models())
        #constraints.append(self.use_max_n_components(self.max_components))
        #constraints.append(self.max_depth(depth))
        constraints.append(self.type_connection_rules())
        # constraints.append(self.type_connection_rules_and_no_loops())

        constraints.append(self.process_fixed_components())

        goal = Goal()
        goal.add(constraints)
        goal = goal.simplify()

        # #split here
        if optimize:
            solv = Optimize()
        else:
            solv = Solver()

        solv.add(goal.as_expr())

        self.base_solver = solv

        if decompose:
            print('Decomposing Specification...')
            clusters = decompose_spec(self.specification_list, self.library)
        else:
            clusters = [self.spec.output_ports_dict.keys()]

        print(clusters)

        if len(clusters) == 0:
            clusters.append([])

        print('Instantiate Solvers...')
        #create parallel solvers
        solvers = []

        result_queue = multiprocessing.Queue()

        semaphore = multiprocessing.Semaphore(MAX_THREADS)

        results = []

        for cluster in clusters:
            # for cluster in [['o1', 'o2', 'o3']]:
            # for cluster in [['c2','c3','c5','c6']]:

            #solve for port
            self.base_solver.push()

            # self.base_solver.add(self.solve_for_outputs(cluster))

            context = Context()
            assertions = self.base_solver.assertions()
            new_assertions = assertions.translate(context)

            #restore solver state
            self.base_solver.pop()

            solver_p = SinglePortSolver(
                self,
                new_assertions,
                context,
                cluster,
                semaphore,
                self.spec,
                minimize_components=minimize_components,
                distinct_spec_port_set=None,
                fixed_components=self.fixed_components,
                fixed_connections=self.fixed_connections,
                fixed_connections_spec=self.fixed_connections_spec,
                result_queue=result_queue,
            )

            solvers.append(solver_p)

            solver_p.start()
            # solver_p.join()

        while len(results) < len(clusters):
            results.append(result_queue.get())
            if results[-1] is None:
                break

        if any([x is None for x in results]):
            raise NotSynthesizableError

        #print
        LOG.debug("merging solutions...")

        new_graph = GraphCreator.merge_graphs(
            results, '_'.join(self.spec.output_ports_dict.keys()))
        gv = GraphizConverter.generate_graphviz_from_generic_graph(new_graph)
        gv.view()
        gv.save()

        #wait for clean exit
        for solv in solvers:
            solv.join()
Beispiel #11
0
    def __init__(self, library, spec, library_max_redundancy=None, limit=None):
        '''
        associate library and create models.
        We need the spec, too, because we need to determine
        the number of replicate components we need.
        TODO
        There is a problem with the size of the library, though...
        '''
        self.library = library
        self.models = []
        self.ports = []
        self.index = {}
        self.model_index = {}
        self.model_in_index = {}
        self.model_out_index = {}
        self.contract_index = {}
        self.out_models = []
        self.out_ports = []
        self.out_index = {}
        self.out_contract_index = {}
        self.in_models = []
        self.in_ports = []
        self.in_index = {}
        self.in_contract_index = {}
        self.model_levels = {}
        self.model_contracts = {}
        self.contracts = set()
        self.contract_used_by_models = {}
        self.contract_use_flags = []
        self.reverse_flag = {}
        self.flag_map = {}

        self.unrolled_info = {}

        self.spec = spec

        if library_max_redundancy is None:
            library_max_redundancy = DEFAULT_MAX_REDUNDANCY

        if limit is None:
            limit = len(spec.output_ports_dict)
        LOG.debug(limit)
        self.max_components = min([library_max_redundancy, limit])

        for level in range(0, self.max_components):
            self.contract_index[level] = {}
            self.in_contract_index[level] = {}
            self.out_contract_index[level] = {}
            self.index[level] = {}
            self.in_index[level] = {}
            self.out_index[level] = {}

            for component in self.library.components:
                contract = component.contract
                self.contracts.add(contract)
                self.contract_index[level][contract] = []
                self.in_contract_index[level][contract] = []
                self.out_contract_index[level][contract] = []

                c_flag = Int('%s-%d' % (contract.base_name, level))
                self.contract_use_flags.append(c_flag)
                self.reverse_flag[c_flag.get_id()] = []
                self.flag_map['%s-%d' % (contract.base_name, level)] = c_flag

                #START UNROLL COMMENT
                #(bool_vars, unr_a, unr_g) = self._contract_unrolled_formula(contract, level)

                #if contract not in self.unrolled_info:
                #    self.unrolled_info[contract] = {}
                #if level not in self.unrolled_info[contract]:
                #    self.unrolled_info[contract][level] = {}

                #self.unrolled_info[contract][level]['cflag'] = c_flag
                #self.unrolled_info[contract][level]['vars'] = bool_vars
                #self.unrolled_info[contract][level]['unroll_assume'] = unr_a
                #self.unrolled_info[contract][level]['unroll_guarantee'] = unr_g
                #END UNROLL COMMENT

                for port in contract.input_ports_dict.values():
                    model = z3.Int('%d-%s' % (level, port.unique_name))
                    self.models.append(model)
                    self.in_models.append(model)
                    self.ports.append(port)
                    self.in_ports.append(port)
                    self.model_levels[model.get_id()] = level
                    self.model_contracts[model.get_id()] = contract

                    #contract_indexing
                    self.contract_used_by_models[len(self.models) - 1] = c_flag
                    #self.reverse_flag[c_flag.get_id()].append(len(self.models) -1)

                    #reverse lookup
                    self.model_index[model.get_id()] = len(self.models) - 1
                    self.model_in_index[model.get_id()] = len(self.models) - 1
                    self.index[level][port] = len(self.models) - 1
                    self.in_index[level][port] = len(self.in_models) - 1

                    self.contract_index[level][contract].append(
                        len(self.models) - 1)
                    self.in_contract_index[level][contract].append(
                        len(self.in_models) - 1)

                for port in contract.output_ports_dict.values():
                    model = z3.Int('%d-%s' % (level, port.unique_name))
                    self.models.append(model)
                    self.out_models.append(model)
                    self.ports.append(port)
                    self.out_ports.append(port)
                    self.model_levels[model.get_id()] = level
                    self.model_contracts[model.get_id()] = contract

                    #contract_indexing
                    self.contract_used_by_models[len(self.models) - 1] = c_flag
                    self.reverse_flag[c_flag.get_id()].append(
                        len(self.models) - 1)

                    #reverse lookup
                    self.model_index[model.get_id()] = len(self.models) - 1
                    self.model_out_index[model.get_id()] = len(self.models) - 1
                    self.index[level][port] = len(self.models) - 1
                    self.out_index[level][port] = len(self.out_models) - 1

                    self.contract_index[level][contract].append(
                        len(self.models) - 1)
                    self.out_contract_index[level][contract].append(
                        len(self.out_models) - 1)

        LOG.debug({i: self.models[i] for i in range(0, self.max_index)})
Beispiel #12
0
 def save(self):
     # self.graph.save()
     self.graph.render()
     LOG.debug(self.graph.source)
Beispiel #13
0
 def view(self):
     self.graph.view()
     LOG.debug(self.graph.source)
def decompose_spec(spec_list, library=None):
    '''
    decompose specification in clusters of outputs
    :param spec_list:
    :return:
    '''

    spec_root = spec_list[0]

    spec_outs_dict = spec_root.output_ports_dict

    clusters = []

    unclustered = set(spec_outs_dict.keys())
    done = set()

    #preprocess:
    # find ports which behave exactly the same
    init_clusters = []
    done = set()
    for pivot_name in unclustered:

        if pivot_name not in done:
            cluster = {pivot_name}
            done.add(pivot_name)

            # for other_name in (unclustered - set([pivot_name])):
            #
            #     if library is None or library.check_connectivity(spec_root.ports_dict[pivot_name],
            #                                   spec_root.ports_dict[other_name]):
            #
            #         for spec in spec_list:
            #             w_spec = spec.copy()
            #
            #             formula_r = Globally(Equivalence(w_spec.ports_dict[pivot_name].literal,
            #                                                               w_spec.ports_dict[other_name].literal,
            #                                                               merge_literals=False))
            #
            #             guarantees = w_spec.guarantee_formula
            #
            #             formula = Implication(guarantees, formula_r, merge_literals=False)
            #
            #             l_passed = verify_tautology(formula, return_trace=False)
            #
            #             if not l_passed:
            #                 break
            #
            #
            #         if l_passed:
            #             cluster.add(other_name)
            #             done.add(other_name)

            init_clusters.append(cluster)

    #done

    # first FAST pass, try to take out as many single ports as possible
    res_queue = Queue()
    pool = []
    semaphore = Semaphore(MAX_PROCESSES)
    #
    # for pivot_name in spec_outs_dict:
    #     semaphore.acquire()
    #     proc = OutputProcessor(pivot_name, spec_list, res_queue, semaphore)
    #     proc.start()
    #     pool.append(proc)
    #
    # for p in pool:
    #     p.join()
    #
    # #everyone is done now
    # while not res_queue.empty():
    #     res = res_queue.get_nowait()
    #     if res[1] is True:
    #         clusters.append(set([res[0]]))
    #     else:
    #         unclustered.add(res[0])

    # for pivot_name in spec_outs_dict:
    #
    #     print('\tprocessing port %s' % pivot_name)
    #
    #     passed = True
    #     for spec in spec_list:
    #
    #         # build composition
    #         w_spec = spec.copy()
    #         w_spec1 = spec.copy()
    #         w_spec2 = spec.copy()
    #
    #         mapping = CompositionMapping([w_spec1, w_spec2])
    #
    #         # connect pivot output port
    #         w_spec.connect_to_port(w_spec.output_ports_dict[pivot_name],
    #                                w_spec1.output_ports_dict[pivot_name])
    #
    #         # connect inputs
    #         for name, in_port in w_spec.input_ports_dict.items():
    #             w_spec.connect_to_port(in_port, w_spec1.input_ports_dict[name])
    #             w_spec.connect_to_port(in_port, w_spec2.input_ports_dict[name])
    #
    #             # # add explicit naming
    #             # mapping.add(w_spec1.input_ports_dict[name],
    #             #             '1_' + name)
    #             # mapping.add(w_spec2.input_ports_dict[name],
    #             #             '2_' + name)
    #
    #         # connect remaining outputs
    #         for name, out_port in w_spec.output_ports_dict.items():
    #             # add explicit naming
    #             mapping.add(w_spec1.output_ports_dict[name],
    #                         '1_' + name)
    #             mapping.add(w_spec2.output_ports_dict[name],
    #                         '2_' + name)
    #
    #             if name != pivot_name:
    #                 w_spec.connect_to_port(out_port, w_spec2.output_ports_dict[name])
    #
    #         # compose
    #         composition = w_spec1.compose([w_spec2], composition_mapping=mapping)
    #
    #         passed &= composition.is_refinement(w_spec)
    #
    #         if not passed:
    #             break
    #
    #     if passed:
    #         clusters.append(set([pivot_name]))
    #     else:
    #         unclustered.add(pivot_name)

    # now process remaining unclustered elements, a bit slower.
    # we need to give a special input to the model checker,
    # to let it suggest what are related outputs
    for init in init_clusters:

        semaphore.acquire()
        proc = MultipleOutputProcessor(init, spec_list, res_queue, semaphore)
        proc.start()
        pool.append(proc)

    for p in pool:
        p.join()

        # everyone is done now
    while not res_queue.empty():
        (name, cluster) = res_queue.get_nowait()
        # LOG.debug(cluster)
        clusters.append(set(cluster))

        # # LOG.debug(pivot_name)
        # cluster = set([pivot_name])
        # done = set()
        # done.add(pivot_name)
        #
        # while True:
        #     passed = True
        #
        #     for spec in spec_list:
        #
        #         unknowns = set(spec_outs_dict.keys()) - done
        #         # LOG.debug(unknowns)
        #         # LOG.debug(cluster)
        #
        #         if len(unknowns) == 0:
        #             # we are done
        #             break
        #         # elif len(unknowns) == 1:
        #         #     # last one must go with the previous one
        #         #     elem = unknowns.pop()
        #         #     cluster.add(elem)
        #         #     done.add(elem)
        #         else:
        #
        #             # build composition
        #             w_spec = spec.copy()
        #             w_spec1 = spec.copy()
        #             w_spec2 = spec.copy()
        #
        #             mapping = CompositionMapping([w_spec1, w_spec2])
        #
        #             # connect pivot output port
        #             for name in cluster:
        #                 w_spec.connect_to_port(w_spec.output_ports_dict[name],
        #                                        w_spec1.output_ports_dict[name])
        #
        #             # connect inputs
        #             for name, in_port in w_spec.input_ports_dict.items():
        #                 w_spec.connect_to_port(in_port, w_spec1.input_ports_dict[name])
        #                 w_spec.connect_to_port(in_port, w_spec2.input_ports_dict[name])
        #
        #             # connect remaining outputs
        #             for name, out_port in w_spec.output_ports_dict.items():
        #                 # add explicit naming
        #                 mapping.add(w_spec1.output_ports_dict[name],
        #                             '1_' + name)
        #                 mapping.add(w_spec2.output_ports_dict[name],
        #                             '2_' + name)
        #
        #                 if name not in cluster:
        #                     w_spec.connect_to_port(out_port, w_spec2.output_ports_dict[name])
        #
        #             # compose
        #             composition = w_spec1.compose([w_spec2], composition_mapping=mapping)
        #
        #             # LOG.debug(composition)
        #             # LOG.debug(w_spec1)
        #             # LOG.debug(w_spec2)
        #
        #             # add conditionals
        #             # (G(a1=a2 & b1!=b2 &...) | G(b1=b2 & a1!=a2 & ...)...) -> Spec ref. formula
        #
        #
        #             left_formula = []
        #             for pivot in unknowns:
        #                 formula_l = []
        #
        #                 formula_l.append(Negation(Globally(Equivalence(w_spec1.ports_dict[pivot].literal,
        #                                                             w_spec2.ports_dict[pivot].literal,
        #                                                             merge_literals=False)
        #                                                    )
        #                                           )
        #                                  )
        #
        #                 for name in unknowns - {pivot}:
        #                     formula_l.append(Globally(Equivalence(w_spec1.ports_dict[name].literal,
        #                                                           w_spec2.ports_dict[name].literal,
        #                                                           merge_literals=False)
        #                                                   )
        #                                      )
        #
        #                 formula = reduce(lambda x, y: Conjunction(x, y, merge_literals=False), formula_l)
        #
        #                 left_formula.append(formula)
        #
        #             formula = reduce(lambda x, y: Disjunction(x, y, merge_literals=False), left_formula)
        #
        #             # get refinement formula
        #             verifier = NuxmvRefinementStrategy(composition)
        #
        #             ref_formula = verifier.get_refinement_formula(w_spec)
        #
        #             formula = Implication(formula, ref_formula, merge_literals=False)
        #
        #             l_passed, trace = verify_tautology(formula, return_trace=True)
        #
        #             # LOG.debug(l_passed)
        #             # LOG.debug(formula.generate())
        #
        #             if not l_passed:
        #
        #                 #build monitored vars dict
        #                 monitored = {}
        #
        #                 for name in unknowns:
        #                     monitored[composition.ports_dict['1_' + name].unique_name] = name
        #                     monitored[w_spec.ports_dict[name].unique_name] = name
        #
        #                 # LOG.debug(composition)
        #                 # LOG.debug(cluster)
        #                 # LOG.debug(unknowns)
        #                 # LOG.debug(done)
        #                 # LOG.debug(monitored)
        #                 # LOG.debug(trace)
        #                 diff = parse_counterexample(trace, monitored)
        #
        #                 assert len(diff) > 0
        #                 LOG.debug(diff)
        #                 for elem in diff:
        #                     cluster.add(elem)
        #                     done.add(elem)
        #
        #             passed &= l_passed
        #
        #     #go out of the while loop
        #     if passed:
        #         break
        #
        # assert len(cluster) > 1
        #
        # # LOG.debug(cluster)
        # clusters.append(cluster)

    assert set([x for cluster in clusters
                for x in cluster]).issuperset(unclustered)

    #postprocessing, merge clusters with elements in common

    mods = True
    while mods:
        mods = False
        for i in range(len(clusters)):
            cl = clusters[i]
            for j in range(len(clusters)):
                if i != j:
                    ocl = clusters[j]
                    if not cl.isdisjoint(ocl):
                        mods = True
                        clusters[i] = cl | ocl
                        cl = clusters[i]
                        clusters[j] = set([])
    #filter
    clusters = [x for x in clusters if len(x) > 0]

    LOG.debug(clusters)

    # #test useless inputs:
    # for c in clusters:
    #     useless = find_useless_inputs(spec_list[0], c)
    #
    #     print("relevant inputs for ")
    #     print(c)
    #     print(set(spec_list[0].input_ports_dict.keys()) - useless)

    # assert False
    # LOG.debug(unclustered)
    return clusters
Beispiel #15
0
This module contains the implementation of classes and fucntions related
to the concept of library of contracts

Author: Antonio Iannopollo
'''
import itertools

from pycolite.attribute import Attribute

from pyco.contract import (RefinementMapping, PortMappingError, PortMapping,
                           CompositionMapping, NotARefinementError, BaseType,
                           NotATypeError)
from pyco import LOG
from pyco.solver_interface import SMTManager

LOG.debug('in library')


class ContractLibrary(object):
    '''
    Implementation of the library of contracts
    '''
    def __init__(self, base_name, context=None):
        '''
        initializer
        '''
        self.components = []

        #type structures
        self.typeset = set()
        self.typeset.add(BaseType)
    def synthesize(self):
        '''
        picks candidates and generates threads
        '''
        #testing without size constraints
        #size = 1
        # size = initial_size
        # tim = time.time()
        while True:
            try:
                # with self.z3_lock:
                model = self.solver_interface.propose_candidate()
                LOG.debug(model)
                # LOG.debug(time.time()-tim)
                # tim = time.time()
            except pyco.z3_interface.NotSynthesizableError as err:
                return self.quit(wait=True)
            else:
                #acquire semaphore
                self.semaphore.acquire()

                #check if event is successful
                if self.found_refinement.is_set():
                    #we are done. kill all running threads and exit
                    self.semaphore.release()
                    return self.quit()

                #else remove not successful models
                while not self.fail_queue.empty():
                    pid = self.fail_queue.get_nowait()
                    self.model_dict.pop(pid)

                    self.thread_pool = self.thread_pool - set(
                        [t for t in self.thread_pool if t.ident == pid])

                # (relevant, _) = self.solver_interface._infer_relevant_contracts(model, self.output_port_names)

                # NUXMV MOD
                #return all contracts here
                relevant = {
                    x
                    for x, m in
                    self.solver_interface.lib_model.use_flags.items()
                }

                print(len(relevant))
                print(
                    len(self.solver_interface.z3_interface.library.
                        all_contracts))

                reject_f = self.solver_interface.generate_reject_formula(
                    relevant)
                #new refinement checker
                thread = RefinementChecker(model, self.output_port_names,
                                           relevant, self,
                                           self.found_refinement,
                                           self.found_refinement)
                #go
                thread.start()
                # with self.pool_lock:
                self.model_dict[thread.ident] = model
                self.relevant_contracts_pid[thread.ident] = relevant
                self.thread_pool.add(thread)

                #now reject the model, to get a new candidate
                LOG.debug(reject_f)
                self.solver_interface.add_assertions(reject_f)

                #NUXMV MOD
                #quit
                return self.quit(wait=True)
Beispiel #17
0
import types
from z3 import *

import multiprocessing
from pyco.contract import CompositionMapping
from pyco import LOG
from pyco.z3_thread_manager import ModelVerificationManager, MAX_THREADS
from pyco.smt_factory import SMTModelFactory
from pyco.z3_library_conversion import Z3Library
from pyco.z3_single_port_solver import SinglePortSolver, NotSynthesizableError, OptimizationError
from pyco.spec_decomposition import decompose_spec
from pyco.graphviz_converter import GraphCreator, GraphizConverter

# LOG = logging.getLogger()
LOG.debug('in z3_interface')


class Z3Interface(object):
    '''
    Interface class for the Z3 SMT solver.
    Extends the class SMTModelFactory
    '''
    def __init__(self, library):
        '''
        init
        '''

        #set_param(proof=False)
        self.library = library
        # selfeset = library.typeset