Exemplo n.º 1
0
 def __init__(self, methodclasses):
     """Create a new solver, using the given subclasses of ClusterMethod."""
     # init superclasses
     Notifier.__init__(self)
     # store arguments
     self._methodclasses = methodclasses
     self._pattern_methods = filter(lambda m: hasattr(m,"patterngraph"),self._methodclasses)
     self._handcoded_methods = filter(lambda m: hasattr(m,"handcoded_match"),self._methodclasses)
     self._incremental_methods = filter(lambda m: hasattr(m,"incremental_matcher"),self._methodclasses)
     # init instance vars
     self._graph = Graph()
     #self._graph.add_vertex("_root")
     # self._graph.add_vertex("_toplevel")
     self._graph.add_vertex("_variables")
     self._graph.add_vertex("_clusters")
     self._graph.add_vertex("_methods")
     self._new = []
     self._mg = MethodGraph()
     # add prototype_selection boolean var to method graph
     self._prototype_selection_var = "_prototype_selection_enabled"
     self._mg.add_variable(self._prototype_selection_var)
     self._mg.set(self._prototype_selection_var, True)
     # store map of selection_constraints to SelectionMethod (or None)
     self._selection_method = {} 
     # store root cluster (will be assigned when first cluster added)
     self._rootcluster = None
     # an incrementally updated toplevel set
     self._toplevel = MutableSet()
     # incrementally updated set of applicable methods
     self._incremental_matchers = map(lambda method: method.incremental_matcher(self), self._incremental_methods)
     #print "incremental matchers:",self._incremental_matchers
     self._applicable_methods = Union(*self._incremental_matchers)
Exemplo n.º 2
0
 def __init__(self, dimension):
     """Create a new empty solver"""
     Notifier.__init__(self)
     self.dimension = dimension
     self._graph = Graph()
     self._graph.add_vertex("_root")
     self._graph.add_vertex("_toplevel")
     self._graph.add_vertex("_variables")
     self._graph.add_vertex("_distances")
     self._graph.add_vertex("_angles")
     self._graph.add_vertex("_rigids")
     self._graph.add_vertex("_hedgehogs")
     self._graph.add_vertex("_balloons")
     self._graph.add_vertex("_methods")
     # queue of new objects to process
     self._new = []
     # methodgraph
     self._mg = MethodGraph()
Exemplo n.º 3
0
 def __init__(self, methodclasses):
     """Create a new solver, using the given subclasses of ClusterMethod."""
     # init superclasses
     Notifier.__init__(self)
     # store arguments
     self._methodclasses = methodclasses
     self._pattern_methods = filter(lambda m: hasattr(m, "patterngraph"),
                                    self._methodclasses)
     self._handcoded_methods = filter(
         lambda m: hasattr(m, "handcoded_match"), self._methodclasses)
     self._incremental_methods = filter(
         lambda m: hasattr(m, "incremental_matcher"), self._methodclasses)
     # init instance vars
     self._graph = Graph()
     #self._graph.add_vertex("_root")
     # self._graph.add_vertex("_toplevel")
     self._graph.add_vertex("_variables")
     self._graph.add_vertex("_clusters")
     self._graph.add_vertex("_methods")
     self._new = []
     self._mg = MethodGraph()
     # add prototype_selection boolean var to method graph
     self._prototype_selection_var = "_prototype_selection_enabled"
     self._mg.add_variable(self._prototype_selection_var)
     self._mg.set(self._prototype_selection_var, True)
     # store map of selection_constraints to SelectionMethod (or None)
     self._selection_method = {}
     # store root cluster (will be assigned when first cluster added)
     self._rootcluster = None
     # an incrementally updated toplevel set
     self._toplevel = MutableSet()
     # incrementally updated set of applicable methods
     self._incremental_matchers = map(
         lambda method: method.incremental_matcher(self),
         self._incremental_methods)
     #print "incremental matchers:",self._incremental_matchers
     self._applicable_methods = Union(*self._incremental_matchers)
Exemplo n.º 4
0
def test():
    graph = MethodGraph()
    graph.add_variable('a', 1)
    graph.add_variable('b', 2)
    mv_x = MultiVariable('x')
    graph.add_variable(mv_x)
    graph.add_method(SumProdMethod('a', 'b', mv_x))

    graph.add_variable('p', 3)
    graph.add_variable('q', 4)
    mv_y = MultiVariable('y')
    graph.add_variable(mv_y)
    graph.add_method(SumProdMethod('p', 'q', mv_y))

    mv_z = MultiVariable('z')
    graph.add_variable(mv_z)
    graph.add_method(SumProdMethod(mv_x, mv_y, mv_z))

    print graph.get(mv_z)

    graph.set('a', 100)
    print graph.get(mv_z)
Exemplo n.º 5
0
class ClusterSolver(Notifier):
    """ 
    Finds a generic solution for problems formulated by Clusters.

    Cluster are added and removed using the add and remove methods. 
    After adding each Cluster, the solver tries to merge it with
    others, resulting in new Clusters.

    The generic solution is a directed acyclic graph of Clusters and Methods. 
    Particilar problems and solutions are represented by a Configuration 
    for each cluster. 

    For each Cluster a set of Configurations can be set using the
    set method. Configurations are propagated via Methods and can
    be retrieved with the get method. 
    """

    # ------- PUBLIC METHODS --------

    def __init__(self, methodclasses):
        """Create a new solver, using the given subclasses of ClusterMethod."""
        # init superclasses
        Notifier.__init__(self)
        # store arguments
        self._methodclasses = methodclasses
        self._pattern_methods = filter(lambda m: hasattr(m,"patterngraph"),self._methodclasses)
        self._handcoded_methods = filter(lambda m: hasattr(m,"handcoded_match"),self._methodclasses)
        self._incremental_methods = filter(lambda m: hasattr(m,"incremental_matcher"),self._methodclasses)
        # init instance vars
        self._graph = Graph()
        #self._graph.add_vertex("_root")
        # self._graph.add_vertex("_toplevel")
        self._graph.add_vertex("_variables")
        self._graph.add_vertex("_clusters")
        self._graph.add_vertex("_methods")
        self._new = []
        self._mg = MethodGraph()
        # add prototype_selection boolean var to method graph
        self._prototype_selection_var = "_prototype_selection_enabled"
        self._mg.add_variable(self._prototype_selection_var)
        self._mg.set(self._prototype_selection_var, True)
        # store map of selection_constraints to SelectionMethod (or None)
        self._selection_method = {} 
        # store root cluster (will be assigned when first cluster added)
        self._rootcluster = None
        # an incrementally updated toplevel set
        self._toplevel = MutableSet()
        # incrementally updated set of applicable methods
        self._incremental_matchers = map(lambda method: method.incremental_matcher(self), self._incremental_methods)
        #print "incremental matchers:",self._incremental_matchers
        self._applicable_methods = Union(*self._incremental_matchers)

    # ------- methods for setting up constraint problems ------------
    
    def add(self, cluster):
        """Add a cluster"""
        diag_print("add_cluster "+str(cluster), "clsolver")
        self._add_cluster(cluster)
        self._process_new()

    def remove(self, cluster):
        """Remove a cluster. 
           All dependend objects are also removed.
        """
        self._remove(cluster)
        self._process_new()

    def set(self, cluster, configurations):
        """Associate a list of configurations with a cluster"""
        self._mg.set(cluster, configurations)
        
    def get(self, cluster):
        """Return a set of configurations associated with a cluster"""
        return self._mg.get(cluster)
 
    def set_root(self, cluster):
        """Set root cluster, used for positionig and orienting the solutions"""
        diag_print("set root "+str(self._rootcluster), "clsolver")
        if self._rootcluster != None:
            oldrootvar = rootname(self._rootcluster)
            self._mg.set(oldrootvar, False)
        newrootvar = rootname(cluster)
        self._mg.set(newrootvar, True)
        self._rootcluster = cluster

    def get_root(self):
        """returns current root cluster or None"""
        return self._rootcluster
   
    def set_prototype_selection(self, enabled):
        """Enable or disable prototype-based solution selection"""
        self._mg.set(self._prototype_selection_var, enabled)

    def add_selection_constraint(self, con):
        """Add a SelectionConstraint to filter solutions"""
        if con not in self._selection_method:
            selector = self._find_selection_method(con)
            if selector != None:
                selector.add_constraint(con)
                self._selection_method[con] = selector
                self._mg.execute(selector)                
            #self._selection_method[con] = None     # this line wrong?
            self._selection_method[con] = selector     # this line better?

    def rem_selection_constraint(self, con):
        """Remove a SelectionConstraint"""
        if con in self._selection_method:
            selector = self._selection_method[con]
            if selector != None:
                selector.rem_constraint(con)
                self._mg.execute(selector)                
            del self._selection_method[con] 

    # ------- methods for inspecting the state of the solver ------------
    
    def variables(self):
        """get list of variables"""
        return self._graph.outgoing_vertices("_variables")

    def clusters(self):
        """get list of clusters"""
        return self._graph.outgoing_vertices("_clusters")

    def methods(self):
        """get list of methods"""
        return self._graph.outgoing_vertices("_methods")

    def top_level(self):
        """return IncrementalSet of top-level clusters"""
        return self._toplevel
        # return self._graph.outgoing_vertices("_toplevel")

    def is_top_level(self, object):
        """Returns True iff given cluster is a top-level cluster""" 
        #return self._graph.has_edge("_toplevel",object)
        return object in self._toplevel

    def find_dependend(self, object):
        """Return a list of objects that depend on given object directly."""
        l = self._graph.outgoing_vertices(object)
        return filter(lambda x: self._graph.get(object,x) == "dependency", l)
        
    def find_depends(self, object):
        """Return a list of objects that the given object depends on directly"""
        l = self._graph.ingoing_vertices(object)
        return filter(lambda x: self._graph.get(x,object) == "dependency", l)

    def contains(self, obj):
        return self._graph.has_vertex(obj)
    
    # ------------ INTERNALLY USED METHODS --------

    # --- dependencies and groups

    def _add_dependency(self, on, dependend):
        """Add a dependence for second object on first object"""
        self._graph.add_edge(on, dependend, "dependency")

    def _add_to_group(self, group, object):
        """Add object to group"""
        self._graph.add_edge(group, object, "contains")

    def _add_needed_by(self, needed, by):
        """Add relation 'needed' object is needed 'by'"""
        self._graph.add_edge(needed, by, "needed_by")

    def _objects_that_need(self, needed):
        """Return objects needed by given object"""
        l = self._graph.outgoing_vertices(needed)
        return filter(lambda x: self._graph.get(needed,x) == "needed_by", l)

    def _objects_needed_by(self, needer):
        """Return objects needed by given object"""
        l = self._graph.ingoing_vertices(needer)
        return filter(lambda x: self._graph.get(x,needer) == "needed_by", l)
   
    def _add_top_level(self, cluster):
        # self._graph.add_edge("_toplevel",cluster)
        self._new.append(cluster)
        self._toplevel.add(cluster)

    def _rem_top_level(self, object):
        # self._graph.rem_edge("_toplevel",object)
        if object in self._new:
            self._new.remove(object)
        self._toplevel.remove(object)

    def _find_descendend(self,v):
        """find all descendend objects of v (i.e.. directly or indirectly dependend)"""
        front = [v]
        result = {}
        while len(front) > 0:
            x = front.pop()
            if x not in result:
                result[x] = 1
                front += self.find_dependend(x)
        del result[v]
        return list(result)


    # -- add object types
   
    def _add_variable(self, var):
        if not self._graph.has_vertex(var):
            diag_print("_add_variable "+str(var), "clsolver")
            self._add_to_group("_variables", var)

    def _add_cluster(self, newcluster):
        diag_print("_add_cluster "+str(newcluster),"clsolver")
        # check if not already exists
        if self._graph.has_vertex(newcluster): 
            raise StandardError, "cluster %s already in clsolver"%(str(newcluster))
        # update graph
        self._add_to_group("_clusters", newcluster)
        for var in newcluster.vars:
            self._add_variable(var)
            self._add_dependency(var, newcluster)
        # add to top level
        self._add_top_level(newcluster)
        # add to methodgraph
        self._mg.add_variable(newcluster)
        # add root-variable if needed with default value False
        root = rootname(newcluster)
        if not self._mg.contains(root):
            self._mg.add_variable(root, False)
            self._mg.set(root, False)
            # add root-variable to dependency graph
            self._add_dependency(newcluster, root)
        # if there is no root cluster, this one will be it
        if self.get_root() == None:
            self.set_root(newcluster)
        # notify listeners
        self.send_notify(("add", newcluster))

    def _add_method(self, method):
        diag_print("new "+str(method),"clsolver")
        self._add_to_group("_methods", method)
        for obj in method.inputs():
            self._add_dependency(obj, method)
        for obj in method.outputs():
            self._add_dependency(method, obj)
            self._add_dependency(obj, method)
        self._mg.add_method(method)
        self.send_notify(("add", method))
    
    # ----- solution selection

    def _add_prototype_selector(self, merge):
        incluster = merge.outputs()[0]
        constraints = merge.prototype_constraints()
        vars = set()
        for con in constraints:
            vars.update(con.variables())
        selclusters = []
        for var in vars:
            clusters = self._graph.outgoing_vertices(var)
            clusters = filter(lambda c: isinstance(c, Rigid), clusters)
            clusters = filter(lambda c: len(c.vars) == 1, clusters)
            if len(clusters) < 1:
                raise StandardError, "no prototype cluster for variable "+str(var)
            elif len(clusters) > 1:
                raise StandardError, "more than one candidate prototype cluster for variable "+str(var)
            selclusters.append(clusters[0])
        outcluster = incluster.copy()
        selector = PrototypeMethod(incluster, selclusters, outcluster, constraints, self._prototype_selection_var)
        self._add_cluster(outcluster)
        self._add_method(selector)
        self._rem_top_level(incluster)
        return outcluster

    def _add_solution_selector(self, incluster):
        outcluster = incluster.copy()
        selector = SelectionMethod(incluster, outcluster)
        constraints = self._find_selection_constraints(incluster)
        for con in constraints: 
            selector.add_constraint(con)
            self._selection_method[con] = selector
        self._add_cluster(outcluster)
        self._add_method(selector)
        self._rem_top_level(incluster)
        return selector

    def _find_selection_method(self, con):
        # find clusters containing all constraints vars
        candidates = None
        for var in con.variables():
            # find clusters
            clusters = set(self.find_dependend(var))
            if candidates == None:
                candidates = clusters
            else:
                candidates = candidates.intersection(clusters)
        # get selection methods of clusters
        methods = []
        for cluster in candidates:
            methods += filter(lambda c: isinstance(c,SelectionMethod), self.find_depends(cluster))
        # get selection method with smallest cluster
        if len(methods)>0:
            method = min(methods, key=lambda m: len(m.inputs()[0].vars))
            return method
        else:
            return None

        ##slow implementation, better would be to find method via clustering information in graph
        #convars = set(con.variables())
        #selmethods = filter(lambda x: isinstance(x,SelectionMethod), self.methods())
        #for method in selmethods:
        #    incluster = method.inputs()[0]
        #    clvars = set(incluster.vars)
        #    if clvars.intersection(convars) == convars:
        #        return method
        #return None

    def _find_selection_constraints(self, incluster):
        applicable = []
        for con in self._selection_method:
            selector = self._selection_method[con]
            if selector == None:
                convars = set(con.variables())
                clvars = set(incluster.vars)
                if convars.intersection(clvars) == convars:
                    applicable.append(con)
        return applicable


        
    # --------------
    # isearch methods
    # --------------
 
    def _process_new(self):
        # try incremental matchers and old style matching alternatingly
        non_redundant_methods = filter(lambda m: not self._is_redundant_method(m), self._applicable_methods)
        while len(non_redundant_methods) > 0 or len(self._new) > 0:
            # check incremental matches
            if len(non_redundant_methods) > 0:  
                method = iter(non_redundant_methods).next()
                #print "applicable methods:", map(str, self._applicable_methods)
                diag_print("incremental search found:"+str(method),"clsolver._process_new")
                self._add_method_complete(method)
            else:
                newobject = self._new.pop()
                diag_print("search from "+str(newobject), "clsolver")
                succes = self._search(newobject)
                if succes and self.is_top_level(newobject): 
                    # maybe more rules applicable.... push back on stack
                    self._new.append(newobject)
                #endif
            # endif
            non_redundant_methods = filter(lambda m: not self._is_redundant_method(m), self._applicable_methods)
        # endwhile
    #end def
    
    def _search(self, newcluster):
        diag_print("search from:"+str(newcluster),"clsolver3D")
        # find all toplevel clusters connected to newcluster 
        # via one or more variables
        connected = set()
        for var in newcluster.vars:
            dependend = self.find_dependend(var)
            dependend = filter(lambda x: self.is_top_level(x), dependend)
            connected.update(dependend)
        diag_print("search: connected clusters="+str(connected),"clsolver3D")
        
        # first try handcoded matching
        for methodclass in self._handcoded_methods:
            diag_print("trying handcoded match for "+str(methodclass), "clsolver3D")
            matches = methodclass.handcoded_match(self, newcluster, connected)
            if self._try_matches(methodclass, matches):
                return True

        # if incremental matching failed, try full pattern matching
        if self._try_methods(connected):
            return True 
        return False


    def _try_methods(self, nlet):
        """finds a possible rewrite rule applications on given set of clusters, applies it 
           and returns True iff successfull
        """
        refgraph = reference2graph(nlet)
        for methodclass in self._pattern_methods:
            diag_print("trying generic pattern matching for "+str(methodclass), "clsolver3D")
            matches = gmatch(methodclass.patterngraph, refgraph)
            if self._try_matches(methodclass,matches):
                return True
            # end for match
        # end for method
        return False
    
    def _try_matches(self, methodclass, matches):
        # print "method="+str(methodclass),"number of matches = "+str(len(matches))
        for s in matches:
            diag_print("try match: "+str(s),"clsolver3D")
            method = apply(methodclass, [s])
            succes = self._add_method_complete(method)
            if succes:
                #raw_input()
                #print "press key"
                return True
            else:    # WARING: fast bailout, may be incoplete!
                return False 
        # end for match
        return False

    def _is_information_increasing(self, merge):
        # check that the method is information increasing (infinc)
        output = merge.outputs()[0]
        infinc = True
        connected = set()
        for var in output.vars:
            dependend = self.find_dependend(var)
            dependend = filter(lambda x: self.is_top_level(x), dependend)
            connected.update(dependend)
        # NOTE 07-11-2007 (while writing the paper): this  implementation of information increasing may not be correct. We may need to check that the total sum of the information in the overlapping clusters is equal to the information in the output.
        for cluster in connected:
            if num_constraints(cluster.intersection(output)) >= num_constraints(output):
                infinc = False
                break
        diag_print("information increasing:"+str(infinc),"clsolver")
        return infinc

       
    def _is_cluster_reducing(self, merge):
        # check if method reduces number of clusters (reduc)
        output = merge.outputs()[0]
        nremove = 0
        for cluster in merge.input_clusters():
            if num_constraints(cluster.intersection(output)) >= num_constraints(cluster): 
               # will be removed from toplevel
               nremove += 1
        # exeption if method sets noremove flag
        if hasattr(merge,"noremove") and merge.noremove == True:
            nremove = 0
        reduc = (nremove > 1)
        diag_print("reduce # clusters:"+str(reduc),"clsolver")
        return reduc

    def _is_redundant_method(self, merge):
        # check if the method is redundant (not information increasing and not reducing number of clusters)
        infinc = self._is_information_increasing(merge)
        reduc = self._is_cluster_reducing(merge)
        if not infinc and not reduc:
            diag_print("method is redundant","clsolver")
            return True
        else:
            diag_print("method is not redundant","clsolver")
            return False

    def _add_method_complete(self, merge):
        diag_print("add_method_complete "+str(merge), "clsolver")
        
        # do not add if method is redundant
        if self._is_redundant_method(merge):
            return False

        output = merge.outputs()[0]
        
        # check consistency and local/global overconstrained
        consistent = True
        local_oc = False
        for i1 in range(0, len(merge.input_clusters())):
            for i2 in range(i1+1, len(merge.input_clusters())):
                c1 = merge.input_clusters()[i1] 
                c2 = merge.input_clusters()[i2] 
                if num_constraints(c1.intersection(c2)) != 0:
                    local_oc = True
                consistent = consistent and self._is_consistent_pair(c1, c2)
        merge.consistent = consistent
        merge.overconstrained = local_oc
        
        # global overconstrained? (store in output cluster)
        overconstrained = not consistent
        for cluster in merge.input_clusters():
            overconstrained = overconstrained or cluster.overconstrained
        output.overconstrained = overconstrained
        
        # determine infinc before adding (used later)
        infinc = self._is_information_increasing(merge)
        
        # add to graph
        self._add_cluster(output)
        self._add_method(merge)
        
        # remove input clusters from top_level
        merge.restore_toplevel = []    # make restore list in method
        for cluster in merge.input_clusters():
            # do not remove rigids from toplevel if method does not consider root
            if isinstance(cluster, Rigid):
                if hasattr(merge,"noremove") and merge.noremove == True:
                    diag_print("block top-level", "clsolver")
                    break
            # remove input clusters when all its constraints are in output cluster 
            if num_constraints(cluster.intersection(output)) >= num_constraints(cluster): 
                diag_print("remove from top-level: "+str(cluster),"clsolver")
                self._rem_top_level(cluster) 
                merge.restore_toplevel.append(cluster)
            else:
                diag_print("keep top-level: "+str(cluster),"clsolver")
        
        # add method to determine root-variable
        if hasattr(merge,"noremove") and merge.noremove == True:
            self._add_root_false(merge.outputs()[0]) 
        else:
            self._add_root_method(merge.input_clusters(),merge.outputs()[0])
        # add solution selection methods, only if information increasing
        if infinc:
            output2 = self._add_prototype_selector(merge)
            output3 = self._add_solution_selector(output2)
        
        # success
        return True

    def _add_root_method(self,inclusters,outcluster):
        inroots = []
        for cluster in inclusters:
            inroots.append(rootname(cluster))
        outroot = rootname(outcluster)
        method = OrMethod(inroots, outroot)
        # add method
        self._add_method(method)
        # make sure its deleted when cluster is deleted
        self._add_dependency(outcluster, method) 

    def _add_root_false(self,outcluster):
        outroot = rootname(outcluster)
        method = SetMethod(outroot, False)
        # add method
        self._add_method(method)
        # make sure its deleted when cluster is deleted
        self._add_dependency(outcluster, method) 


    # -- removing objects

    def _remove(self, object):
        # find all indirectly dependend objects
        todelete = [object]+self._find_descendend(object)
        torestore = set()
        # remove all objects
        for item in todelete:
            # if merge removed items from toplevel then add them back to top level 
            if hasattr(item, "restore_toplevel"):
                for cluster in item.restore_toplevel:
                    torestore.add(cluster)
            # delete it from graph
            diag_print("deleting "+str(item),"clsolver.remove")
            self._graph.rem_vertex(item)
            # remove from _new list
            if item in self._new:
                self._new.remove(item)
            # remove from incremental top_level
            self._toplevel.remove(item)
            # remove from methodgraph
            if isinstance(item, Method):
                # note: method may have been removed because variable removed
                try:
                    self._mg.rem_method(item)
                except:
                    pass
                # restore SelectionConstraints
                if isinstance(item, SelectionMethod):
                    for con in item.iter_constraints():
                        self._selection_method[con] = None
            if isinstance(item, MultiVariable):
                self._mg.rem_variable(item)
            # remove variables with no dependent clusters
            if isinstance(item, Cluster):
                for var in item.vars:
                    if len(self.find_dependend(var)) == 0:
                        self._graph.rem_vertex(var)
            # notify listeners
            self.send_notify(("remove", item))
        # restore toplevel (also added to _new)
        for cluster in torestore:
            if self._graph.has_vertex(cluster): 
                self._add_top_level(cluster)


    ##def _contains_root(self, input_cluster):
    ##   """returns True iff input_cluster is root cluster or was determined by
    ##    merging with the root cluster."""
    ##
    ##    # start from root cluster. Follow merges upwards until:
    ##    #  - input cluster found -> True
    ##    #  - no more merges -> False
    ##
    ##    if len(self._graph.outgoing_vertices("_root")) > 1:
    ##        raise StandardError, "more than one root cluster" 
    ##    if len(self._graph.outgoing_vertices("_root")) == 1:
    ##        cluster = self._graph.outgoing_vertices("_root")[0]
    ##    else:
    ##        cluster = None
    ##    while (cluster != None):
    ##        if cluster is input_cluster:
    ##            return True
    ##        fr = self._graph.outgoing_vertices(cluster)
    ##        me = filter(lambda x: isinstance(x, Merge), fr)
    ##        me = filter(lambda x: cluster in x.outputs(), me)
    ##        if len(me) > 1:
    ##            raise StandardError, "root cluster merged more than once"
    ##        elif len(me) == 0:
    ##            cluster = None
    ##        elif len(me[0].outputs()) != 1:
    ##            raise StandardError, "a merge with number of outputs != 1"
    ##        else:
    ##            cluster = me[0].outputs()[0]
    ##    #while
    ##    return False
    #def

    # ---- consistency

    def _is_consistent_pair(self, object1, object2):
        diag_print("in is_consistent_pair "+str(object1)+" "+str(object2),"clsolver")
        oc = over_constraints(object1, object2) 
        diag_print("over_constraints: "+str(map(str,oc)),"clsolver")
        consistent = True
        for con in oc:
            consistent = consistent and self._consistent_overconstraint_in_pair(con, object1, object2)
        diag_print("global consistent? "+str(consistent),"clsolver")
        return consistent
    
    def _consistent_overconstraint_in_pair(self, overconstraint, object1, object2):
        diag_print("consistent "+str(overconstraint)+" in "+str(object1)+" and "+str(object2)+" ?", "clsolver")
    
        # get sources for constraint in given clusters
        s1 = self._source_constraint_in_cluster(overconstraint, object1)
        s2 = self._source_constraint_in_cluster(overconstraint, object2)

        if s1 == None:
            consistent = False
        elif s2 == None:
            consistent = False
        elif s1 == s2:
            consistent = True
        else:
            if self._is_atomic(s1) and not self._is_atomic(s2):
                consistent = False
            elif self._is_atomic(s2) and not self._is_atomic(s1):
                consistent = False
            else:
                consistent = True
            #c1to2 = constraits_from_s1_in_s2(s1, s2)
            #if solve(c1to2) contains overconstraint then consistent
            #c2to1 = constraits_from_s1_in_s2(s2, s1)
            #if solve(c2to1) contains overconstraint then consistent
            #raise StandardError, "not yet implemented"

        diag_print("consistent? "+str(consistent), "clsolver")
        return consistent

    def _source_constraint_in_cluster(self, constraint, cluster):
        if not self._contains_constraint(cluster, constraint):
            raise StandardError, "constraint not in cluster"
        elif self._is_atomic(cluster):
            return cluster
        else:
            method = self._determining_method(cluster)
            inputs = method.inputs()
            down = filter(lambda x: self._contains_constraint(x, constraint), inputs)
            if len(down) == 0:
                return cluster
            elif len(down) > 1:
                if method.consistent == True:
                    return self._source_constraint_in_cluster(constraint, down[0])
                else: 
                    diag_print("Warning: source is inconsistent","clsolver")
                    return None
            else:
                return self._source_constraint_in_cluster(constraint, down[0])

           
    def _is_atomic(self, object):
        method = self._determining_method(object)
        if method == None:
            return True
        #elif isinstance(method, Distance2Rigid) or isinstance(method, Angle2Hog):
        #    return True
        else:
            return False

    def _determining_method(self, object):
        depends = self.find_depends(object)
        methods = filter(lambda x: isinstance(x, Method), depends)
        if len(methods) == 0:
            return None
        elif len(methods) > 1:
            raise "object determined by more than one method"
        else:
            return methods[0] 

    
    def _contains_constraint(self, object, constraint):
        if isinstance(constraint, Distance):
            return self._contains_distance(object, constraint)
        elif isinstance(constraint, Angle):
            return self._contains_angle(object, constraint)
        else:
            raise StandardError, "unexpected case"

    def _contains_distance(self,object, distance):
        if isinstance(object, Rigid):
            return (distance.vars[0] in object.vars and distance.vars[1] in object.vars)
        elif isinstance(object, Distance):
            return (distance.vars[0] in object.vars and distance.vars[1] in object.vars)
        else:
            return False

    def _contains_angle(self, object, angle):
        if isinstance(object, Rigid) or isinstance(object, Balloon):
            return (angle.vars[0] in object.vars 
            and angle.vars[1] in object.vars 
            and angle.vars[2] in object.vars)
        elif isinstance(object, Hedgehog):
            return (angle.vars[1] == object.cvar and
            angle.vars[0] in object.xvars and 
            angle.vars[2] in object.xvars)
        elif isinstance(object, Angle):
            return (angle.vars[1] == object.vars[1] and
            angle.vars[0] in object.vars and 
            angle.vars[2] in object.vars)
        else:
            return False


    # --------- special methods ------

    def __str__(self):
        s = ""
        s += "Clusters:\n"
        for x in self.clusters():
            s += str(x) + "\n"
        s += "Methods:\n"
        for x in self.methods():
            s += str(x) + "\n"
        return s
Exemplo n.º 6
0
class ClusterSolver(Notifier):
    """Constraints are Clusers: Rigids, Hedgehogs and Balloons. 
       After adding each cluster, the solver tries to merge
       clusters, adding new clusters and methods between clusters. 
    """

    # ------- PUBLIC METHODS --------

    def __init__(self, dimension):
        """Create a new empty solver"""
        Notifier.__init__(self)
        self.dimension = dimension
        self._graph = Graph()
        self._graph.add_vertex("_root")
        self._graph.add_vertex("_toplevel")
        self._graph.add_vertex("_variables")
        self._graph.add_vertex("_distances")
        self._graph.add_vertex("_angles")
        self._graph.add_vertex("_rigids")
        self._graph.add_vertex("_hedgehogs")
        self._graph.add_vertex("_balloons")
        self._graph.add_vertex("_methods")
        # queue of new objects to process
        self._new = []
        # methodgraph
        self._mg = MethodGraph()

    def variables(self):
        """get list of variables"""
        return self._graph.outgoing_vertices("_variables")

    def distances(self):
        """get list of distances"""
        return self._graph.outgoing_vertices("_distances")

    def angles(self):
        """get list of angles"""
        return self._graph.outgoing_vertices("_angles")

    def rigids(self):
        """get list of rigids"""
        return self._graph.outgoing_vertices("_rigids")

    def hedgehogs(self):
        """get list of hedgehogs"""
        return self._graph.outgoing_vertices("_hedgehogs")

    def balloons(self):
        """get list of balloons"""
        return self._graph.outgoing_vertices("_balloons")

    def methods(self):
        """get list of methods"""
        return self._graph.outgoing_vertices("_methods")

    def top_level(self):
        """get top-level objects"""
        return self._graph.outgoing_vertices("_toplevel")

    def is_top_level(self, object):
        return self._graph.has_edge("_toplevel", object)

    def add(self, cluster):
        """Add a cluster. 
        
           arguments:
              cluster: A Rigid
           """
        diag_print("add_cluster " + str(cluster), "clsolver")
        self._add_cluster(cluster)
        self._process_new()

    def remove(self, cluster):
        """Remove a cluster. 
           All dependend objects are also removed.
        """
        self._remove(cluster)
        self._process_new()

    def set(self, cluster, configurations):
        """Associate a list of configurations with a cluster"""
        self._mg.set(cluster, configurations)

    def get(self, cluster):
        """Return a set of configurations associated with a cluster"""
        return self._mg.get(cluster)

    def set_root(self, rigid):
        """Make given rigid cluster the root cluster
        
           arguments:
              cluster: A Rigid
           """
        self._graph.rem_vertex("_root")
        self._graph.add_edge("_root", rigid)

    def find_dependend(self, object):
        """Return a list of objects that depend on given object directly."""
        l = self._graph.outgoing_vertices(object)
        return filter(lambda x: self._graph.get(object, x) == "dependency", l)

    def find_depends(self, object):
        """Return a list of objects that the given object depends on directly"""
        l = self._graph.ingoing_vertices(object)
        return filter(lambda x: self._graph.get(x, object) == "dependency", l)

    def contains(self, obj):
        return self._graph.has_vertex(obj)

    # ------------ INTERNALLY USED METHODS --------

    # -- general house hold

    def _add_dependency(self, on, dependend):
        """Add a dependence for second object on first object"""
        self._graph.add_edge(on, dependend, "dependency")

    def _add_to_group(self, group, object):
        """Add object to group"""
        self._graph.add_edge(group, object, "contains")

    def _add_needed_by(self, needed, by):
        """Add relation 'needed' object is needed 'by'"""
        self._graph.add_edge(needed, by, "needed_by")

    def _objects_that_need(self, needed):
        """Return objects needed by given object"""
        l = self._graph.outgoing_vertices(needed)
        return filter(lambda x: self._graph.get(needed, x) == "needed_by", l)

    def _objects_needed_by(self, needer):
        """Return objects needed by given object"""
        l = self._graph.ingoing_vertices(needer)
        return filter(lambda x: self._graph.get(x, needer) == "needed_by", l)

    def _add_top_level(self, object):
        self._graph.add_edge("_toplevel", object)
        self._new.append(object)

    def _rem_top_level(self, object):
        self._graph.rem_edge("_toplevel", object)
        if object in self._new:
            self._new.remove(object)

    def _remove(self, object):
        # find all indirectly dependend objects
        todelete = [object] + self._find_descendend(object)
        torestore = Set()
        # remove all objects
        for item in todelete:
            # if merge removed items from toplevel then add them back to top level
            if hasattr(item, "restore_toplevel"):
                for cluster in item.restore_toplevel:
                    torestore.add(cluster)
            # delete it from graph
            diag_print("deleting " + str(item), "clsolver.remove")
            self._graph.rem_vertex(item)
            # remove from _new list
            if item in self._new:
                self._new.remove(item)
            # remove from methodgraph
            if isinstance(item, Method):
                # note: method may have been removed because variable removed
                try:
                    self._mg.rem_method(item)
                except:
                    pass
            elif isinstance(item, MultiVariable):
                self._mg.rem_variable(item)
            # notify listeners
            self.send_notify(("remove", item))
        # restore toplevel (also added to _new)
        for cluster in torestore:
            if self._graph.has_vertex(cluster):
                self._add_top_level(cluster)
        # debug
        # print "after remove, drplan:"
        # print self
        # print "after remove, toplevel:"
        # print self.top_level()
        # re-solve
        self._process_new()

    def _find_descendend(self, v):
        """find all descendend objects of v (dirdctly or indirectly dependend"""
        front = [v]
        result = {}
        while len(front) > 0:
            x = front.pop()
            if x not in result:
                result[x] = 1
                front += self.find_dependend(x)
        del result[v]
        return list(result)

    # -- add object types

    def _add_variable(self, var):
        """Add a variable if not already in system
        
           arguments:
              var: any hashable object
        """
        if not self._graph.has_vertex(var):
            diag_print("_add_variable " + str(var), "clsolver")
            self._add_to_group("_variables", var)

    def _add_cluster(self, cluster):
        if isinstance(cluster, Rigid):
            self._add_rigid(cluster)
        elif isinstance(cluster, Hedgehog):
            self._add_hog(cluster)
        elif isinstance(cluster, Balloon):
            self._add_balloon(cluster)
        else:
            raise StandardError, "unsupported type", type(cluster)

    def _add_rigid(self, newcluster):
        """add a rigid cluster if not already in system"""
        diag_print("_add_rigid " + str(newcluster), "clsolver")
        # check if not already exists
        if self._graph.has_vertex(newcluster):
            raise StandardError, "rigid already in clsolver"
        # update graph
        self._add_to_group("_rigids", newcluster)
        for var in newcluster.vars:
            self._add_variable(var)
            self._add_dependency(var, newcluster)
        # if there is no root cluster, this one will be it
        if len(self._graph.outgoing_vertices("_root")) == 0:
            self._graph.add_edge("_root", newcluster)
        # add to top level
        self._add_top_level(newcluster)
        # add to methodgraph
        self._mg.add_variable(newcluster)
        # notify
        self.send_notify(("add", newcluster))

    #end def _add_rigid

    def _add_hog(self, hog):
        diag_print("_add_hog:" + str(hog), "clsolver")
        # check if not already exists
        if self._graph.has_vertex(hog):
            raise StandardError, "hedgehog already in clsolver"
        # update graph
        self._add_to_group("_hedgehogs", hog)
        for var in list(hog.xvars) + [hog.cvar]:
            self._add_variable(var)
            self._add_dependency(var, hog)
        # add to top level
        self._add_top_level(hog)
        # add to methodgraph
        self._mg.add_variable(hog)
        # notify
        self.send_notify(("add", hog))

    def _add_balloon(self, newballoon):
        """add a cluster if not already in system"""
        diag_print("_add_balloon " + str(newballoon), "clsolver")
        # check if not already exists
        if self._graph.has_vertex(newballoon):
            raise StandardError, "balloon already in clsolver"
        # update graph
        self._add_to_group("_balloons", newballoon)
        for var in newballoon.vars:
            self._add_variable(var)
            self._add_dependency(var, newballoon)
        # add to top level
        self._add_top_level(newballoon)
        # add to methodgraph
        self._mg.add_variable(newballoon)
        # notify
        self.send_notify(("add", newballoon))

    #end def _add_balloon

    def _add_merge(self, merge):
        # structural check that method has one output
        if len(merge.outputs()) != 1:
            raise StandardError, "merge number of outputs != 1"
        output = merge.outputs()[0]
        # remove any derives from clusters to be merged
        #for cluster in merge.inputs():
        #    outgoing = self.find_dependend(cluster)
        #    derives = filter(lambda x: isinstance(x, Derive), outgoing)
        #    for d in derives:
        #        self._remove(d)
        # consistent merge?
        consistent = True
        for i1 in range(0, len(merge.inputs())):
            for i2 in range(i1 + 1, len(merge.inputs())):
                c1 = merge.inputs()[i1]
                c2 = merge.inputs()[i2]
                consistent = consistent and self._is_consistent_pair(c1, c2)
        merge.consistent = consistent
        # overconstrained cluster?
        overconstrained = not consistent
        for cluster in merge.inputs():
            overconstrained = overconstrained and cluster.overconstrained
        output.overconstrained = overconstrained
        # add to graph
        self._add_cluster(output)
        self._add_method(merge)
        # remove inputs from toplevel
        for cluster in merge.inputs():
            self._rem_top_level(cluster)
        # add prototype selection method
        self._add_prototype_selector(merge)
        # add solution selection method
        self._add_solution_selector(merge)

    def _add_prototype_selector(self, merge):
        incluster = merge.outputs()[0]
        constraints = merge.prototype_constraints()
        if len(constraints) == 0:
            return
        vars = Set()
        for con in constraints:
            vars.union_update(con.variables())
        selclusters = []
        for var in vars:
            clusters = self._graph.outgoing_vertices(var)
            clusters = filter(lambda c: isinstance(c, Rigid), clusters)
            clusters = filter(lambda c: len(c.vars) == 1, clusters)
            if len(clusters) != 1:
                raise StandardError, "no prototype cluster for variable " + str(
                    v)
            selclusters.append(clusters[0])
        outcluster = incluster.copy()
        # Rick 20090519 - copy does not copy structural overconstrained flag?
        outcluster.overconstrained = incluster.overconstrained
        selector = PrototypeMethod(incluster, selclusters, outcluster,
                                   constraints)
        self._add_cluster(outcluster)
        self._add_method(selector)
        self._rem_top_level(incluster)
        return

    def _add_solution_selector(self, merge):
        return

    def _add_method(self, method):
        diag_print("new " + str(method), "clsolver")
        self._add_to_group("_methods", method)
        for obj in method.inputs():
            self._add_dependency(obj, method)
        for obj in method.outputs():
            self._add_dependency(method, obj)
            self._add_dependency(obj, method)
        self._mg.add_method(method)
        self.send_notify(("add", method))

    # --------------
    # search methods
    # --------------

    def _process_new(self):
        while len(self._new) > 0:
            newobject = self._new.pop()
            diag_print("search from " + str(newobject), "clsolver")
            succes = self._search(newobject)
            if succes and self.is_top_level(newobject):
                # maybe more rules applicable.... push back on stack
                self._new.append(newobject)
        # while

    #end def

    def _search(self, newcluster):
        raise StandardError, "Not implemented. ClusterSolver is an abstract class, please use ClusterSolver2D or ClusterSolver3D"

    def _contains_root(self, input_cluster):
        """returns True iff input_cluster is root cluster or was determined by
        merging with the root cluster."""

        # start from root cluster. Follow merges upwards until:
        #  - input cluster found -> True
        #  - no more merges -> False

        if len(self._graph.outgoing_vertices("_root")) > 1:
            raise StandardError, "more than one root cluster"
        if len(self._graph.outgoing_vertices("_root")) == 1:
            cluster = self._graph.outgoing_vertices("_root")[0]
        else:
            cluster = None
        while (cluster != None):
            if cluster is input_cluster:
                return True
            fr = self._graph.outgoing_vertices(cluster)
            me = filter(lambda x: isinstance(x, Merge), fr)
            me = filter(lambda x: cluster in x.outputs(), me)
            if len(me) > 1:
                raise StandardError, "root cluster merged more than once"
            elif len(me) == 0:
                cluster = None
            elif len(me[0].outputs()) != 1:
                raise StandardError, "a merge with number of outputs != 1"
            else:
                cluster = me[0].outputs()[0]
        #while
        return False

    #def

    def _is_consistent_pair(self, object1, object2):
        diag_print(
            "in is_consistent_pair " + str(object1) + " " + str(object2),
            "clsolver")
        oc = over_constraints(object1, object2)
        diag_print("over_constraints: " + str(map(str, oc)), "clsolver")
        consistent = True
        for con in oc:
            consistent = consistent and self._consistent_overconstraint_in_pair(
                con, object1, object2)
        diag_print("global consistent? " + str(consistent), "clsolver")
        return consistent

    def _consistent_overconstraint_in_pair(self, overconstraint, object1,
                                           object2):
        diag_print(
            "consistent " + str(overconstraint) + " in " + str(object1) +
            " and " + str(object2) + " ?", "clsolver")

        # get sources for constraint in given clusters
        s1 = self._source_constraint_in_cluster(overconstraint, object1)
        s2 = self._source_constraint_in_cluster(overconstraint, object2)

        if s1 == None:
            consistent = False
        elif s2 == None:
            consistent = False
        elif s1 == s2:
            consistent = True
        else:
            if self._is_atomic(s1) and not self._is_atomic(s2):
                consistent = False
            elif self._is_atomic(s2) and not self._is_atomic(s1):
                consistent = False
            else:
                consistent = True
            #c1to2 = constraits_from_s1_in_s2(s1, s2)
            #if solve(c1to2) contains overconstraint then consistent
            #c2to1 = constraits_from_s1_in_s2(s2, s1)
            #if solve(c2to1) contains overconstraint then consistent
            #raise StandardError, "not yet implemented"

        diag_print("consistent? " + str(consistent), "clsolver")
        return consistent

    def _source_constraint_in_cluster(self, constraint, cluster):
        if not self._contains_constraint(cluster, constraint):
            raise StandardError, "constraint not in cluster"
        elif self._is_atomic(cluster):
            return cluster
        else:
            method = self._determining_method(cluster)
            inputs = method.inputs()
            down = filter(lambda x: self._contains_constraint(x, constraint),
                          inputs)
            if len(down) == 0:
                return cluster
            elif len(down) > 1:
                if method.consistent == True:
                    return self._source_constraint_in_cluster(
                        constraint, down[0])
                else:
                    diag_print("Warning: source is inconsistent", "clsolver")
                    return None
            else:
                return self._source_constraint_in_cluster(constraint, down[0])

    def _is_atomic(self, object):
        method = self._determining_method(object)
        if method == None:
            return True
        #elif isinstance(method, Distance2Rigid) or isinstance(method, Angle2Hog):
        #    return True
        else:
            return False

    def _determining_method(self, object):
        depends = self.find_depends(object)
        methods = filter(lambda x: isinstance(x, Method), depends)
        if len(methods) == 0:
            return None
        elif len(methods) > 1:
            raise "object determined by more than one method"
        else:
            return methods[0]

    def _contains_constraint(self, object, constraint):
        if isinstance(constraint, Distance):
            return self._contains_distance(object, constraint)
        elif isinstance(constraint, Angle):
            return self._contains_angle(object, constraint)
        else:
            raise StandardError, "unexpected case"

    def _contains_distance(self, object, distance):
        if isinstance(object, Rigid):
            return (distance.vars[0] in object.vars
                    and distance.vars[1] in object.vars)
        elif isinstance(object, Distance):
            return (distance.vars[0] in object.vars
                    and distance.vars[1] in object.vars)
        else:
            return False

    def _contains_angle(self, object, angle):
        if isinstance(object, Rigid) or isinstance(object, Balloon):
            return (angle.vars[0] in object.vars
                    and angle.vars[1] in object.vars
                    and angle.vars[2] in object.vars)
        elif isinstance(object, Hedgehog):
            return (angle.vars[1] == object.cvar
                    and angle.vars[0] in object.xvars
                    and angle.vars[2] in object.xvars)
        elif isinstance(object, Angle):
            return (angle.vars[1] == object.vars[1]
                    and angle.vars[0] in object.vars
                    and angle.vars[2] in object.vars)
        else:
            return False

    # --------- special methods ------

    def __str__(self):
        s = ""
        for x in self.distances():
            s += str(x) + "\n"
        for x in self.angles():
            s += str(x) + "\n"
        for x in self.rigids():
            s += str(x) + "\n"
        for x in self.hedgehogs():
            s += str(x) + "\n"
        for x in self.balloons():
            s += str(x) + "\n"
        for x in self.methods():
            s += str(x) + "\n"
        return s

    # ---------- older unused methods, kept for possible future use ---------

    ##def _known_distance(self,a,b):
    ##    """returns Distance or Rigid that contains a and b"""
    ##    # get objects dependend on a and b
    ##    dep_a = self._graph.outgoing_vertices(a)
    ##    dep_b = self._graph.outgoing_vertices(b)
    ##    dependend = []
    ##    for obj in dep_a:
    ##        if obj in dep_b:
    ##            dependend.append(obj)
    ##    # find a Distance
    ##    # distances = filter(lambda x: isinstance(x,Distance), dependend)
    ##    # if len(distances) > 0: return distances[0]
    ##    # or find a Rigid
    ##    clusters = filter(lambda x: isinstance(x,Rigid), dependend)
    ##    clusters = filter(lambda x: self.is_top_level(x), clusters)
    ##    if len(clusters) > 0: return clusters[0]
    ##    # or return None
    ##    return None
    ##

    def _known_angle(self, a, b, c):
        """returns Balloon, Rigid or Hedgehog that contains angle(a, b, c)"""
        if a == b or a == c or b == c:
            raise StandardError, "all vars in angle must be different"
        # get objects dependend on a, b and c
        dep_a = self._graph.outgoing_vertices(a)
        dep_b = self._graph.outgoing_vertices(b)
        dep_c = self._graph.outgoing_vertices(c)
        dependend = []
        for obj in dep_a:
            if obj in dep_b and obj in dep_c:
                dependend.append(obj)
        # find a hedgehog
        hogs = filter(lambda x: isinstance(x, Hedgehog), dependend)
        hogs = filter(lambda hog: hog.cvar == b, hogs)
        hogs = filter(lambda x: self.is_top_level(x), hogs)
        if len(hogs) == 1: return hogs[0]
        if len(hogs) > 1: raise "error: angle in more than one hedgehogs"
        # or find a cluster
        clusters = filter(lambda x: isinstance(x, Rigid), dependend)
        clusters = filter(lambda x: self.is_top_level(x), clusters)
        if len(clusters) == 1: return clusters[0]
        if len(clusters) > 1: raise "error: angle in more than one Rigids"
        # or find a balloon
        balloons = filter(lambda x: isinstance(x, Balloon), dependend)
        balloons = filter(lambda x: self.is_top_level(x), balloons)
        if len(balloons) == 1: return balloons[0]
        if len(balloons) > 1: raise "error: angle in more than one Balloons"
        # or return None
        return None
Exemplo n.º 7
0
def test():
    graph = MethodGraph()
    graph.add_variable('a', 1)
    graph.add_variable('b', 2)
    mv_x = MultiVariable('x')
    graph.add_variable(mv_x)
    graph.add_method(SumProdMethod('a','b', mv_x))

    graph.add_variable('p', 3)
    graph.add_variable('q', 4)
    mv_y = MultiVariable('y')
    graph.add_variable(mv_y)
    graph.add_method(SumProdMethod('p','q', mv_y))

    mv_z = MultiVariable('z')
    graph.add_variable(mv_z)
    graph.add_method(SumProdMethod(mv_x,mv_y,mv_z))
    
    print graph.get(mv_z)

    graph.set('a', 100)
    print graph.get(mv_z)
Exemplo n.º 8
0
class ClusterSolver(Notifier):
    """ 
    Finds a generic solution for problems formulated by Clusters.

    Cluster are added and removed using the add and remove methods. 
    After adding each Cluster, the solver tries to merge it with
    others, resulting in new Clusters.

    The generic solution is a directed acyclic graph of Clusters and Methods. 
    Particilar problems and solutions are represented by a Configuration 
    for each cluster. 

    For each Cluster a set of Configurations can be set using the
    set method. Configurations are propagated via Methods and can
    be retrieved with the get method. 
    """

    # ------- PUBLIC METHODS --------

    def __init__(self, methodclasses):
        """Create a new solver, using the given subclasses of ClusterMethod."""
        # init superclasses
        Notifier.__init__(self)
        # store arguments
        self._methodclasses = methodclasses
        self._pattern_methods = filter(lambda m: hasattr(m, "patterngraph"),
                                       self._methodclasses)
        self._handcoded_methods = filter(
            lambda m: hasattr(m, "handcoded_match"), self._methodclasses)
        self._incremental_methods = filter(
            lambda m: hasattr(m, "incremental_matcher"), self._methodclasses)
        # init instance vars
        self._graph = Graph()
        #self._graph.add_vertex("_root")
        # self._graph.add_vertex("_toplevel")
        self._graph.add_vertex("_variables")
        self._graph.add_vertex("_clusters")
        self._graph.add_vertex("_methods")
        self._new = []
        self._mg = MethodGraph()
        # add prototype_selection boolean var to method graph
        self._prototype_selection_var = "_prototype_selection_enabled"
        self._mg.add_variable(self._prototype_selection_var)
        self._mg.set(self._prototype_selection_var, True)
        # store map of selection_constraints to SelectionMethod (or None)
        self._selection_method = {}
        # store root cluster (will be assigned when first cluster added)
        self._rootcluster = None
        # an incrementally updated toplevel set
        self._toplevel = MutableSet()
        # incrementally updated set of applicable methods
        self._incremental_matchers = map(
            lambda method: method.incremental_matcher(self),
            self._incremental_methods)
        #print "incremental matchers:",self._incremental_matchers
        self._applicable_methods = Union(*self._incremental_matchers)

    # ------- methods for setting up constraint problems ------------

    def add(self, cluster):
        """Add a cluster"""
        diag_print("add_cluster " + str(cluster), "clsolver")
        self._add_cluster(cluster)
        self._process_new()

    def remove(self, cluster):
        """Remove a cluster. 
           All dependend objects are also removed.
        """
        self._remove(cluster)
        self._process_new()

    def set(self, cluster, configurations):
        """Associate a list of configurations with a cluster"""
        self._mg.set(cluster, configurations)

    def get(self, cluster):
        """Return a set of configurations associated with a cluster"""
        return self._mg.get(cluster)

    def set_root(self, cluster):
        """Set root cluster, used for positionig and orienting the solutions"""
        diag_print("set root " + str(self._rootcluster), "clsolver")
        if self._rootcluster != None:
            oldrootvar = rootname(self._rootcluster)
            self._mg.set(oldrootvar, False)
        newrootvar = rootname(cluster)
        self._mg.set(newrootvar, True)
        self._rootcluster = cluster

    def get_root(self):
        """returns current root cluster or None"""
        return self._rootcluster

    def set_prototype_selection(self, enabled):
        """Enable or disable prototype-based solution selection"""
        self._mg.set(self._prototype_selection_var, enabled)

    def add_selection_constraint(self, con):
        """Add a SelectionConstraint to filter solutions"""
        if con not in self._selection_method:
            selector = self._find_selection_method(con)
            if selector != None:
                selector.add_constraint(con)
                self._selection_method[con] = selector
                self._mg.execute(selector)
            #self._selection_method[con] = None     # this line wrong?
            self._selection_method[con] = selector  # this line better?

    def rem_selection_constraint(self, con):
        """Remove a SelectionConstraint"""
        if con in self._selection_method:
            selector = self._selection_method[con]
            if selector != None:
                selector.rem_constraint(con)
                self._mg.execute(selector)
            del self._selection_method[con]

    # ------- methods for inspecting the state of the solver ------------

    def variables(self):
        """get list of variables"""
        return self._graph.outgoing_vertices("_variables")

    def clusters(self):
        """get list of clusters"""
        return self._graph.outgoing_vertices("_clusters")

    def methods(self):
        """get list of methods"""
        return self._graph.outgoing_vertices("_methods")

    def top_level(self):
        """return IncrementalSet of top-level clusters"""
        return self._toplevel
        # return self._graph.outgoing_vertices("_toplevel")

    def is_top_level(self, object):
        """Returns True iff given cluster is a top-level cluster"""
        #return self._graph.has_edge("_toplevel",object)
        return object in self._toplevel

    def find_dependend(self, object):
        """Return a list of objects that depend on given object directly."""
        l = self._graph.outgoing_vertices(object)
        return filter(lambda x: self._graph.get(object, x) == "dependency", l)

    def find_depends(self, object):
        """Return a list of objects that the given object depends on directly"""
        l = self._graph.ingoing_vertices(object)
        return filter(lambda x: self._graph.get(x, object) == "dependency", l)

    def contains(self, obj):
        return self._graph.has_vertex(obj)

    # ------------ INTERNALLY USED METHODS --------

    # --- dependencies and groups

    def _add_dependency(self, on, dependend):
        """Add a dependence for second object on first object"""
        self._graph.add_edge(on, dependend, "dependency")

    def _add_to_group(self, group, object):
        """Add object to group"""
        self._graph.add_edge(group, object, "contains")

    def _add_needed_by(self, needed, by):
        """Add relation 'needed' object is needed 'by'"""
        self._graph.add_edge(needed, by, "needed_by")

    def _objects_that_need(self, needed):
        """Return objects needed by given object"""
        l = self._graph.outgoing_vertices(needed)
        return filter(lambda x: self._graph.get(needed, x) == "needed_by", l)

    def _objects_needed_by(self, needer):
        """Return objects needed by given object"""
        l = self._graph.ingoing_vertices(needer)
        return filter(lambda x: self._graph.get(x, needer) == "needed_by", l)

    def _add_top_level(self, cluster):
        # self._graph.add_edge("_toplevel",cluster)
        self._new.append(cluster)
        self._toplevel.add(cluster)

    def _rem_top_level(self, object):
        # self._graph.rem_edge("_toplevel",object)
        if object in self._new:
            self._new.remove(object)
        self._toplevel.remove(object)

    def _find_descendend(self, v):
        """find all descendend objects of v (i.e.. directly or indirectly dependend)"""
        front = [v]
        result = {}
        while len(front) > 0:
            x = front.pop()
            if x not in result:
                result[x] = 1
                front += self.find_dependend(x)
        del result[v]
        return list(result)

    # -- add object types

    def _add_variable(self, var):
        if not self._graph.has_vertex(var):
            diag_print("_add_variable " + str(var), "clsolver")
            self._add_to_group("_variables", var)

    def _add_cluster(self, newcluster):
        diag_print("_add_cluster " + str(newcluster), "clsolver")
        # check if not already exists
        if self._graph.has_vertex(newcluster):
            raise StandardError, "cluster %s already in clsolver" % (
                str(newcluster))
        # update graph
        self._add_to_group("_clusters", newcluster)
        for var in newcluster.vars:
            self._add_variable(var)
            self._add_dependency(var, newcluster)
        # add to top level
        self._add_top_level(newcluster)
        # add to methodgraph
        self._mg.add_variable(newcluster)
        # add root-variable if needed with default value False
        root = rootname(newcluster)
        if not self._mg.contains(root):
            self._mg.add_variable(root, False)
            self._mg.set(root, False)
            # add root-variable to dependency graph
            self._add_dependency(newcluster, root)
        # if there is no root cluster, this one will be it
        if self.get_root() == None:
            self.set_root(newcluster)
        # notify listeners
        self.send_notify(("add", newcluster))

    def _add_method(self, method):
        diag_print("new " + str(method), "clsolver")
        self._add_to_group("_methods", method)
        for obj in method.inputs():
            self._add_dependency(obj, method)
        for obj in method.outputs():
            self._add_dependency(method, obj)
            self._add_dependency(obj, method)
        self._mg.add_method(method)
        self.send_notify(("add", method))

    # ----- solution selection

    def _add_prototype_selector(self, merge):
        incluster = merge.outputs()[0]
        constraints = merge.prototype_constraints()
        vars = set()
        for con in constraints:
            vars.update(con.variables())
        selclusters = []
        for var in vars:
            clusters = self._graph.outgoing_vertices(var)
            clusters = filter(lambda c: isinstance(c, Rigid), clusters)
            clusters = filter(lambda c: len(c.vars) == 1, clusters)
            if len(clusters) < 1:
                raise StandardError, "no prototype cluster for variable " + str(
                    var)
            elif len(clusters) > 1:
                raise StandardError, "more than one candidate prototype cluster for variable " + str(
                    var)
            selclusters.append(clusters[0])
        outcluster = incluster.copy()
        selector = PrototypeMethod(incluster, selclusters, outcluster,
                                   constraints, self._prototype_selection_var)
        self._add_cluster(outcluster)
        self._add_method(selector)
        self._rem_top_level(incluster)
        return outcluster

    def _add_solution_selector(self, incluster):
        outcluster = incluster.copy()
        selector = SelectionMethod(incluster, outcluster)
        constraints = self._find_selection_constraints(incluster)
        for con in constraints:
            selector.add_constraint(con)
            self._selection_method[con] = selector
        self._add_cluster(outcluster)
        self._add_method(selector)
        self._rem_top_level(incluster)
        return selector

    def _find_selection_method(self, con):
        # find clusters containing all constraints vars
        candidates = None
        for var in con.variables():
            # find clusters
            clusters = set(self.find_dependend(var))
            if candidates == None:
                candidates = clusters
            else:
                candidates = candidates.intersection(clusters)
        # get selection methods of clusters
        methods = []
        for cluster in candidates:
            methods += filter(lambda c: isinstance(c, SelectionMethod),
                              self.find_depends(cluster))
        # get selection method with smallest cluster
        if len(methods) > 0:
            method = min(methods, key=lambda m: len(m.inputs()[0].vars))
            return method
        else:
            return None

        ##slow implementation, better would be to find method via clustering information in graph
        #convars = set(con.variables())
        #selmethods = filter(lambda x: isinstance(x,SelectionMethod), self.methods())
        #for method in selmethods:
        #    incluster = method.inputs()[0]
        #    clvars = set(incluster.vars)
        #    if clvars.intersection(convars) == convars:
        #        return method
        #return None

    def _find_selection_constraints(self, incluster):
        applicable = []
        for con in self._selection_method:
            selector = self._selection_method[con]
            if selector == None:
                convars = set(con.variables())
                clvars = set(incluster.vars)
                if convars.intersection(clvars) == convars:
                    applicable.append(con)
        return applicable

    # --------------
    # isearch methods
    # --------------

    def _process_new(self):
        # try incremental matchers and old style matching alternatingly
        non_redundant_methods = filter(
            lambda m: not self._is_redundant_method(m),
            self._applicable_methods)
        while len(non_redundant_methods) > 0 or len(self._new) > 0:
            # check incremental matches
            if len(non_redundant_methods) > 0:
                method = iter(non_redundant_methods).next()
                #print "applicable methods:", map(str, self._applicable_methods)
                diag_print("incremental search found:" + str(method),
                           "clsolver._process_new")
                self._add_method_complete(method)
            else:
                newobject = self._new.pop()
                diag_print("search from " + str(newobject), "clsolver")
                succes = self._search(newobject)
                if succes and self.is_top_level(newobject):
                    # maybe more rules applicable.... push back on stack
                    self._new.append(newobject)
                #endif
            # endif
            non_redundant_methods = filter(
                lambda m: not self._is_redundant_method(m),
                self._applicable_methods)
        # endwhile

    #end def

    def _search(self, newcluster):
        diag_print("search from:" + str(newcluster), "clsolver3D")
        # find all toplevel clusters connected to newcluster
        # via one or more variables
        connected = set()
        for var in newcluster.vars:
            dependend = self.find_dependend(var)
            dependend = filter(lambda x: self.is_top_level(x), dependend)
            connected.update(dependend)
        diag_print("search: connected clusters=" + str(connected),
                   "clsolver3D")

        # first try handcoded matching
        for methodclass in self._handcoded_methods:
            diag_print("trying handcoded match for " + str(methodclass),
                       "clsolver3D")
            matches = methodclass.handcoded_match(self, newcluster, connected)
            if self._try_matches(methodclass, matches):
                return True

        # if incremental matching failed, try full pattern matching
        if self._try_methods(connected):
            return True
        return False

    def _try_methods(self, nlet):
        """finds a possible rewrite rule applications on given set of clusters, applies it 
           and returns True iff successfull
        """
        refgraph = reference2graph(nlet)
        for methodclass in self._pattern_methods:
            diag_print(
                "trying generic pattern matching for " + str(methodclass),
                "clsolver3D")
            matches = gmatch(methodclass.patterngraph, refgraph)
            if self._try_matches(methodclass, matches):
                return True
            # end for match
        # end for method
        return False

    def _try_matches(self, methodclass, matches):
        # print "method="+str(methodclass),"number of matches = "+str(len(matches))
        for s in matches:
            diag_print("try match: " + str(s), "clsolver3D")
            method = apply(methodclass, [s])
            succes = self._add_method_complete(method)
            if succes:
                #raw_input()
                #print "press key"
                return True
            else:  # WARING: fast bailout, may be incoplete!
                return False
        # end for match
        return False

    def _is_information_increasing(self, merge):
        # check that the method is information increasing (infinc)
        output = merge.outputs()[0]
        infinc = True
        connected = set()
        for var in output.vars:
            dependend = self.find_dependend(var)
            dependend = filter(lambda x: self.is_top_level(x), dependend)
            connected.update(dependend)
        # NOTE 07-11-2007 (while writing the paper): this  implementation of information increasing may not be correct. We may need to check that the total sum of the information in the overlapping clusters is equal to the information in the output.
        for cluster in connected:
            if num_constraints(
                    cluster.intersection(output)) >= num_constraints(output):
                infinc = False
                break
        diag_print("information increasing:" + str(infinc), "clsolver")
        return infinc

    def _is_cluster_reducing(self, merge):
        # check if method reduces number of clusters (reduc)
        output = merge.outputs()[0]
        nremove = 0
        for cluster in merge.input_clusters():
            if num_constraints(
                    cluster.intersection(output)) >= num_constraints(cluster):
                # will be removed from toplevel
                nremove += 1
        # exeption if method sets noremove flag
        if hasattr(merge, "noremove") and merge.noremove == True:
            nremove = 0
        reduc = (nremove > 1)
        diag_print("reduce # clusters:" + str(reduc), "clsolver")
        return reduc

    def _is_redundant_method(self, merge):
        # check if the method is redundant (not information increasing and not reducing number of clusters)
        infinc = self._is_information_increasing(merge)
        reduc = self._is_cluster_reducing(merge)
        if not infinc and not reduc:
            diag_print("method is redundant", "clsolver")
            return True
        else:
            diag_print("method is not redundant", "clsolver")
            return False

    def _add_method_complete(self, merge):
        diag_print("add_method_complete " + str(merge), "clsolver")

        # do not add if method is redundant
        if self._is_redundant_method(merge):
            return False

        output = merge.outputs()[0]

        # check consistency and local/global overconstrained
        consistent = True
        local_oc = False
        for i1 in range(0, len(merge.input_clusters())):
            for i2 in range(i1 + 1, len(merge.input_clusters())):
                c1 = merge.input_clusters()[i1]
                c2 = merge.input_clusters()[i2]
                if num_constraints(c1.intersection(c2)) != 0:
                    local_oc = True
                consistent = consistent and self._is_consistent_pair(c1, c2)
        merge.consistent = consistent
        merge.overconstrained = local_oc

        # global overconstrained? (store in output cluster)
        overconstrained = not consistent
        for cluster in merge.input_clusters():
            overconstrained = overconstrained or cluster.overconstrained
        output.overconstrained = overconstrained

        # determine infinc before adding (used later)
        infinc = self._is_information_increasing(merge)

        # add to graph
        self._add_cluster(output)
        self._add_method(merge)

        # remove input clusters from top_level
        merge.restore_toplevel = []  # make restore list in method
        for cluster in merge.input_clusters():
            # do not remove rigids from toplevel if method does not consider root
            if isinstance(cluster, Rigid):
                if hasattr(merge, "noremove") and merge.noremove == True:
                    diag_print("block top-level", "clsolver")
                    break
            # remove input clusters when all its constraints are in output cluster
            if num_constraints(
                    cluster.intersection(output)) >= num_constraints(cluster):
                diag_print("remove from top-level: " + str(cluster),
                           "clsolver")
                self._rem_top_level(cluster)
                merge.restore_toplevel.append(cluster)
            else:
                diag_print("keep top-level: " + str(cluster), "clsolver")

        # add method to determine root-variable
        if hasattr(merge, "noremove") and merge.noremove == True:
            self._add_root_false(merge.outputs()[0])
        else:
            self._add_root_method(merge.input_clusters(), merge.outputs()[0])
        # add solution selection methods, only if information increasing
        if infinc:
            output2 = self._add_prototype_selector(merge)
            output3 = self._add_solution_selector(output2)

        # success
        return True

    def _add_root_method(self, inclusters, outcluster):
        inroots = []
        for cluster in inclusters:
            inroots.append(rootname(cluster))
        outroot = rootname(outcluster)
        method = OrMethod(inroots, outroot)
        # add method
        self._add_method(method)
        # make sure its deleted when cluster is deleted
        self._add_dependency(outcluster, method)

    def _add_root_false(self, outcluster):
        outroot = rootname(outcluster)
        method = SetMethod(outroot, False)
        # add method
        self._add_method(method)
        # make sure its deleted when cluster is deleted
        self._add_dependency(outcluster, method)

    # -- removing objects

    def _remove(self, object):
        # find all indirectly dependend objects
        todelete = [object] + self._find_descendend(object)
        torestore = set()
        # remove all objects
        for item in todelete:
            # if merge removed items from toplevel then add them back to top level
            if hasattr(item, "restore_toplevel"):
                for cluster in item.restore_toplevel:
                    torestore.add(cluster)
            # delete it from graph
            diag_print("deleting " + str(item), "clsolver.remove")
            self._graph.rem_vertex(item)
            # remove from _new list
            if item in self._new:
                self._new.remove(item)
            # remove from incremental top_level
            self._toplevel.remove(item)
            # remove from methodgraph
            if isinstance(item, Method):
                # note: method may have been removed because variable removed
                try:
                    self._mg.rem_method(item)
                except:
                    pass
                # restore SelectionConstraints
                if isinstance(item, SelectionMethod):
                    for con in item.iter_constraints():
                        self._selection_method[con] = None
            if isinstance(item, MultiVariable):
                self._mg.rem_variable(item)
            # remove variables with no dependent clusters
            if isinstance(item, Cluster):
                for var in item.vars:
                    if len(self.find_dependend(var)) == 0:
                        self._graph.rem_vertex(var)
            # notify listeners
            self.send_notify(("remove", item))
        # restore toplevel (also added to _new)
        for cluster in torestore:
            if self._graph.has_vertex(cluster):
                self._add_top_level(cluster)

    ##def _contains_root(self, input_cluster):
    ##   """returns True iff input_cluster is root cluster or was determined by
    ##    merging with the root cluster."""
    ##
    ##    # start from root cluster. Follow merges upwards until:
    ##    #  - input cluster found -> True
    ##    #  - no more merges -> False
    ##
    ##    if len(self._graph.outgoing_vertices("_root")) > 1:
    ##        raise StandardError, "more than one root cluster"
    ##    if len(self._graph.outgoing_vertices("_root")) == 1:
    ##        cluster = self._graph.outgoing_vertices("_root")[0]
    ##    else:
    ##        cluster = None
    ##    while (cluster != None):
    ##        if cluster is input_cluster:
    ##            return True
    ##        fr = self._graph.outgoing_vertices(cluster)
    ##        me = filter(lambda x: isinstance(x, Merge), fr)
    ##        me = filter(lambda x: cluster in x.outputs(), me)
    ##        if len(me) > 1:
    ##            raise StandardError, "root cluster merged more than once"
    ##        elif len(me) == 0:
    ##            cluster = None
    ##        elif len(me[0].outputs()) != 1:
    ##            raise StandardError, "a merge with number of outputs != 1"
    ##        else:
    ##            cluster = me[0].outputs()[0]
    ##    #while
    ##    return False
    #def

    # ---- consistency

    def _is_consistent_pair(self, object1, object2):
        diag_print(
            "in is_consistent_pair " + str(object1) + " " + str(object2),
            "clsolver")
        oc = over_constraints(object1, object2)
        diag_print("over_constraints: " + str(map(str, oc)), "clsolver")
        consistent = True
        for con in oc:
            consistent = consistent and self._consistent_overconstraint_in_pair(
                con, object1, object2)
        diag_print("global consistent? " + str(consistent), "clsolver")
        return consistent

    def _consistent_overconstraint_in_pair(self, overconstraint, object1,
                                           object2):
        diag_print(
            "consistent " + str(overconstraint) + " in " + str(object1) +
            " and " + str(object2) + " ?", "clsolver")

        # get sources for constraint in given clusters
        s1 = self._source_constraint_in_cluster(overconstraint, object1)
        s2 = self._source_constraint_in_cluster(overconstraint, object2)

        if s1 == None:
            consistent = False
        elif s2 == None:
            consistent = False
        elif s1 == s2:
            consistent = True
        else:
            if self._is_atomic(s1) and not self._is_atomic(s2):
                consistent = False
            elif self._is_atomic(s2) and not self._is_atomic(s1):
                consistent = False
            else:
                consistent = True
            #c1to2 = constraits_from_s1_in_s2(s1, s2)
            #if solve(c1to2) contains overconstraint then consistent
            #c2to1 = constraits_from_s1_in_s2(s2, s1)
            #if solve(c2to1) contains overconstraint then consistent
            #raise StandardError, "not yet implemented"

        diag_print("consistent? " + str(consistent), "clsolver")
        return consistent

    def _source_constraint_in_cluster(self, constraint, cluster):
        if not self._contains_constraint(cluster, constraint):
            raise StandardError, "constraint not in cluster"
        elif self._is_atomic(cluster):
            return cluster
        else:
            method = self._determining_method(cluster)
            inputs = method.inputs()
            down = filter(lambda x: self._contains_constraint(x, constraint),
                          inputs)
            if len(down) == 0:
                return cluster
            elif len(down) > 1:
                if method.consistent == True:
                    return self._source_constraint_in_cluster(
                        constraint, down[0])
                else:
                    diag_print("Warning: source is inconsistent", "clsolver")
                    return None
            else:
                return self._source_constraint_in_cluster(constraint, down[0])

    def _is_atomic(self, object):
        method = self._determining_method(object)
        if method == None:
            return True
        #elif isinstance(method, Distance2Rigid) or isinstance(method, Angle2Hog):
        #    return True
        else:
            return False

    def _determining_method(self, object):
        depends = self.find_depends(object)
        methods = filter(lambda x: isinstance(x, Method), depends)
        if len(methods) == 0:
            return None
        elif len(methods) > 1:
            raise "object determined by more than one method"
        else:
            return methods[0]

    def _contains_constraint(self, object, constraint):
        if isinstance(constraint, Distance):
            return self._contains_distance(object, constraint)
        elif isinstance(constraint, Angle):
            return self._contains_angle(object, constraint)
        else:
            raise StandardError, "unexpected case"

    def _contains_distance(self, object, distance):
        if isinstance(object, Rigid):
            return (distance.vars[0] in object.vars
                    and distance.vars[1] in object.vars)
        elif isinstance(object, Distance):
            return (distance.vars[0] in object.vars
                    and distance.vars[1] in object.vars)
        else:
            return False

    def _contains_angle(self, object, angle):
        if isinstance(object, Rigid) or isinstance(object, Balloon):
            return (angle.vars[0] in object.vars
                    and angle.vars[1] in object.vars
                    and angle.vars[2] in object.vars)
        elif isinstance(object, Hedgehog):
            return (angle.vars[1] == object.cvar
                    and angle.vars[0] in object.xvars
                    and angle.vars[2] in object.xvars)
        elif isinstance(object, Angle):
            return (angle.vars[1] == object.vars[1]
                    and angle.vars[0] in object.vars
                    and angle.vars[2] in object.vars)
        else:
            return False

    # --------- special methods ------

    def __str__(self):
        s = ""
        s += "Clusters:\n"
        for x in self.clusters():
            s += str(x) + "\n"
        s += "Methods:\n"
        for x in self.methods():
            s += str(x) + "\n"
        return s