def _consistent_overconstraint_in_pair(self, overconstraint, object1, object2): diag_print( "consistent " + str(overconstraint) + " in " + str(object1) + " and " + str(object2) + " ?", "clsolver") # get sources for constraint in given clusters s1 = self._source_constraint_in_cluster(overconstraint, object1) s2 = self._source_constraint_in_cluster(overconstraint, object2) if s1 == None: consistent = False elif s2 == None: consistent = False elif s1 == s2: consistent = True else: if self._is_atomic(s1) and not self._is_atomic(s2): consistent = False elif self._is_atomic(s2) and not self._is_atomic(s1): consistent = False else: consistent = True #c1to2 = constraits_from_s1_in_s2(s1, s2) #if solve(c1to2) contains overconstraint then consistent #c2to1 = constraits_from_s1_in_s2(s2, s1) #if solve(c2to1) contains overconstraint then consistent #raise StandardError, "not yet implemented" diag_print("consistent? " + str(consistent), "clsolver") return consistent
def _consistent_overconstraint_in_pair(self, overconstraint, object1, object2): diag_print("consistent "+str(overconstraint)+" in "+str(object1)+" and "+str(object2)+" ?", "clsolver") # get sources for constraint in given clusters s1 = self._source_constraint_in_cluster(overconstraint, object1) s2 = self._source_constraint_in_cluster(overconstraint, object2) if s1 == None: consistent = False elif s2 == None: consistent = False elif s1 == s2: consistent = True else: if self._is_atomic(s1) and not self._is_atomic(s2): consistent = False elif self._is_atomic(s2) and not self._is_atomic(s1): consistent = False else: consistent = True #c1to2 = constraits_from_s1_in_s2(s1, s2) #if solve(c1to2) contains overconstraint then consistent #c2to1 = constraits_from_s1_in_s2(s2, s1) #if solve(c2to1) contains overconstraint then consistent #raise StandardError, "not yet implemented" diag_print("consistent? "+str(consistent), "clsolver") return consistent
def _add_cluster(self, newcluster): diag_print("_add_cluster "+str(newcluster),"clsolver") # check if not already exists if self._graph.has_vertex(newcluster): raise StandardError, "cluster %s already in clsolver"%(str(newcluster)) # update graph self._add_to_group("_clusters", newcluster) for var in newcluster.vars: self._add_variable(var) self._add_dependency(var, newcluster) # add to top level self._add_top_level(newcluster) # add to methodgraph self._mg.add_variable(newcluster) # add root-variable if needed with default value False root = rootname(newcluster) if not self._mg.contains(root): self._mg.add_variable(root, False) self._mg.set(root, False) # add root-variable to dependency graph self._add_dependency(newcluster, root) # if there is no root cluster, this one will be it if self.get_root() == None: self.set_root(newcluster) # notify listeners self.send_notify(("add", newcluster))
def multi_execute(self, inmap): diag_print("DeriveDDD.multi_execute called", "clmethods") c12 = inmap[self.d_ab] c13 = inmap[self.d_ac] c23 = inmap[self.d_bc] v1 = self.a v2 = self.b v3 = self.c d12 = distance_2p(c12.get(v1), c12.get(v2)) d31 = distance_2p(c13.get(v1), c13.get(v3)) d23 = distance_2p(c23.get(v2), c23.get(v3)) solutions = solve_ddd(v1, v2, v3, d12, d23, d31) # transform solutions to align with root input cluster isroot_ab = inmap[self.root_ab] isroot_ac = inmap[self.root_ac] isroot_bc = inmap[self.root_bc] for i in range(len(solutions)): if isroot_ab: solutions[i] = c12.merge(solutions[i]) elif isroot_ac: solutions[i] = c13.merge(solutions[i]) elif isroot_bc: solutions[i] = c23.merge(solutions[i]) return solutions
def _add_cluster(self, newcluster): diag_print("_add_cluster " + str(newcluster), "clsolver") # check if not already exists if self._graph.has_vertex(newcluster): raise StandardError, "cluster %s already in clsolver" % ( str(newcluster)) # update graph self._add_to_group("_clusters", newcluster) for var in newcluster.vars: self._add_variable(var) self._add_dependency(var, newcluster) # add to top level self._add_top_level(newcluster) # add to methodgraph self._mg.add_variable(newcluster) # add root-variable if needed with default value False root = rootname(newcluster) if not self._mg.contains(root): self._mg.add_variable(root, False) self._mg.set(root, False) # add root-variable to dependency graph self._add_dependency(newcluster, root) # if there is no root cluster, this one will be it if self.get_root() == None: self.set_root(newcluster) # notify listeners self.send_notify(("add", newcluster))
def add_random_constraint(problem, ratio): """add a random constraint to a problem, with a given ratio angles/distances""" if random.random() < ratio: # add angle pointvars = list(problem.cg.variables()) random.shuffle(pointvars) v1 = pointvars[0] v2 = pointvars[1] v3 = pointvars[2] p1 = problem.get_point(v1) p2 = problem.get_point(v2) p3 = problem.get_point(v3) angle = angle_3p(p1,p2,p3) con = AngleConstraint(v1,v2,v3,angle) diag_print("**Add constraint:"+str(con),"drplan") problem.add_constraint(con) else: # add distance pointvars = list(problem.cg.variables()) random.shuffle(pointvars) v1 = pointvars[0] v2 = pointvars[1] p1 = problem.get_point(v1) p2 = problem.get_point(v2) dist = distance_2p(p1,p2) con = DistanceConstraint(v1,v2,dist) diag_print("**Add constraint:"+str(con),"drplan") problem.add_constraint(con) return
def multi_execute(self, inmap): diag_print("DeriveDDD.multi_execute called","clmethods") c12 = inmap[self.d_ab] c13 = inmap[self.d_ac] c23 = inmap[self.d_bc] v1 = self.a v2 = self.b v3 = self.c d12 = distance_2p(c12.get(v1),c12.get(v2)) d31 = distance_2p(c13.get(v1),c13.get(v3)) d23 = distance_2p(c23.get(v2),c23.get(v3)) solutions = solve_ddd(v1,v2,v3,d12,d23,d31) # transform solutions to align with root input cluster isroot_ab = inmap[self.root_ab] isroot_ac = inmap[self.root_ac] isroot_bc = inmap[self.root_bc] for i in range(len(solutions)): if isroot_ab: solutions[i] = c12.merge(solutions[i]) elif isroot_ac: solutions[i] = c13.merge(solutions[i]) elif isroot_bc: solutions[i] = c23.merge(solutions[i]) return solutions
def multi_execute(self, inmap): diag_print("MergeSD.multi_execute called", "clmethods") c1 = self._inputs[0] c2 = self._inputs[1] conf1 = inmap[c1] conf2 = inmap[c2] return [conf1.merge_scale(conf2)]
def add_random_constraint(problem, ratio): """add a random constraint to a problem, with a given ratio angles/distances""" if random.random() < ratio: # add angle pointvars = list(problem.cg.variables()) random.shuffle(pointvars) v1 = pointvars[0] v2 = pointvars[1] v3 = pointvars[2] p1 = problem.get_point(v1) p2 = problem.get_point(v2) p3 = problem.get_point(v3) angle = angle_3p(p1, p2, p3) con = AngleConstraint(v1, v2, v3, angle) diag_print("**Add constraint:" + str(con), "drplan") problem.add_constraint(con) else: # add distance pointvars = list(problem.cg.variables()) random.shuffle(pointvars) v1 = pointvars[0] v2 = pointvars[1] p1 = problem.get_point(v1) p2 = problem.get_point(v2) dist = distance_2p(p1, p2) con = DistanceConstraint(v1, v2, dist) diag_print("**Add constraint:" + str(con), "drplan") problem.add_constraint(con) return
def solve_ddd_3D(v1, v2, v3, d12, d23, d31): """returns a list of Configurations of v1,v2,v3 such that distance v1-v2=d12 etc. v<x>: name of point variables d<xy>: numeric distance values a<xyz>: numeric angle in radians """ diag_print("solve_ddd: %s %s %s %f %f %f" % (v1, v2, v3, d12, d23, d31), "clmethods") # solve in 2D p1 = vector.vector([0.0, 0.0]) p2 = vector.vector([d12, 0.0]) p3s = cc_int(p1, d31, p2, d23) solutions = [] # extend coords to 3D! p1.append(0.0) p2.append(0.0) for p3 in p3s: p3.append(0.0) solution = Configuration({v1: p1, v2: p2, v3: p3}) solutions.append(solution) # return only one solution (if any) if len(solutions) > 0: solutions = [solutions[0]] diag_print("solve_ddd solutions" + str(solutions), "clmethods") return solutions
def multi_execute(self, inmap): diag_print("MergeSR.multi_execute called","clmethods") c1 = self._inputs[0] c2 = self._inputs[1] conf1 = inmap[c1] conf2 = inmap[c2] return [conf1.merge_scale(conf2)]
def _process_new(self): # try incremental matchers and old style matching alternatingly non_redundant_methods = filter( lambda m: not self._is_redundant_method(m), self._applicable_methods) while len(non_redundant_methods) > 0 or len(self._new) > 0: # check incremental matches if len(non_redundant_methods) > 0: method = iter(non_redundant_methods).next() #print "applicable methods:", map(str, self._applicable_methods) diag_print("incremental search found:" + str(method), "clsolver._process_new") self._add_method_complete(method) else: newobject = self._new.pop() diag_print("search from " + str(newobject), "clsolver") succes = self._search(newobject) if succes and self.is_top_level(newobject): # maybe more rules applicable.... push back on stack self._new.append(newobject) #endif # endif non_redundant_methods = filter( lambda m: not self._is_redundant_method(m), self._applicable_methods)
def solve_ada_3D(a, b, c, a_cab, d_ab, a_abc): """returns a list of Configurations of v1,v2,v3 such that distance v1-v2=d12 etc. v<x>: name of point variables d<xy>: numeric distance values a<xyz>: numeric angle in radians """ diag_print("solve_ada: %s %s %s %f %f %f"%(a,b,c,a_cab,d_ab,a_abc),"clmethods") p_a = vector.vector([0.0,0.0]) p_b = vector.vector([d_ab, 0.0]) dir_ac = vector.vector([math.cos(-a_cab),math.sin(-a_cab)]) dir_bc = vector.vector([math.cos(math.pi-a_abc),math.sin(math.pi-a_abc)]) dir_ac[1] = math.fabs(dir_ac[1]) dir_bc[1] = math.fabs(dir_bc[1]) if tol_eq(math.sin(a_cab), 0.0) and tol_eq(math.sin(a_abc),0.0): m = d_ab/2 + math.cos(-a_cab)*d_ab - math.cos(-a_abc)*d_ab p_c = vector.vector([m,0.0]) # p_c = (p_a + p_b) / 2 p_a.append(0.0) p_b.append(0.0) p_c.append(0.0) map = {a:p_a, b:p_b, c:p_c} cluster = _Configuration(map) cluster.underconstrained = True rval = [cluster] else: solutions = rr_int(p_a,dir_ac,p_b,dir_bc) p_a.append(0.0) p_b.append(0.0) rval = [] for p_c in solutions: p_c.append(0.0) map = {a:p_a, b:p_b, c:p_c} rval.append(Configuration(map)) return rval
def _process_new(self): while len(self._new) > 0: newobject = self._new.pop() diag_print("search from " + str(newobject), "clsolver") succes = self._search(newobject) if succes and self.is_top_level(newobject): # maybe more rules applicable.... push back on stack self._new.append(newobject)
def _add_variable(self, var): """Add a variable if not already in system arguments: var: any hashable object """ if not self._graph.has_vertex(var): diag_print("_add_variable " + str(var), "clsolver") self._add_to_group("_variables", var)
def add(self, cluster): """Add a cluster. arguments: cluster: A Rigid """ diag_print("add_cluster " + str(cluster), "clsolver") self._add_cluster(cluster) self._process_new()
def set_root(self, cluster): """Set root cluster, used for positionig and orienting the solutions""" diag_print("set root "+str(self._rootcluster), "clsolver") if self._rootcluster != None: oldrootvar = rootname(self._rootcluster) self._mg.set(oldrootvar, False) newrootvar = rootname(cluster) self._mg.set(newrootvar, True) self._rootcluster = cluster
def set_root(self, cluster): """Set root cluster, used for positionig and orienting the solutions""" diag_print("set root " + str(self._rootcluster), "clsolver") if self._rootcluster != None: oldrootvar = rootname(self._rootcluster) self._mg.set(oldrootvar, False) newrootvar = rootname(cluster) self._mg.set(newrootvar, True) self._rootcluster = cluster
def _add_method(self, method): diag_print("new "+str(method),"clsolver") self._add_to_group("_methods", method) for obj in method.inputs(): self._add_dependency(obj, method) for obj in method.outputs(): self._add_dependency(method, obj) self._add_dependency(obj, method) self._mg.add_method(method) self.send_notify(("add", method))
def _add_method(self, method): diag_print("new " + str(method), "clsolver") self._add_to_group("_methods", method) for obj in method.inputs(): self._add_dependency(obj, method) for obj in method.outputs(): self._add_dependency(method, obj) self._add_dependency(obj, method) self._mg.add_method(method) self.send_notify(("add", method))
def _is_redundant_method(self, merge): # check if the method is redundant (not information increasing and not reducing number of clusters) infinc = self._is_information_increasing(merge) reduc = self._is_cluster_reducing(merge) if not infinc and not reduc: diag_print("method is redundant","clsolver") return True else: diag_print("method is not redundant","clsolver") return False
def _is_redundant_method(self, merge): # check if the method is redundant (not information increasing and not reducing number of clusters) infinc = self._is_information_increasing(merge) reduc = self._is_cluster_reducing(merge) if not infinc and not reduc: diag_print("method is redundant", "clsolver") return True else: diag_print("method is not redundant", "clsolver") return False
def solve_ddd(v1,v2,v3,d12,d23,d31): diag_print("solve_ddd: %s %s %s %f %f %f"%(v1,v2,v3,d12,d23,d31),"clmethods") p1 = vector.vector([0.0,0.0]) p2 = vector.vector([d12,0.0]) p3s = cc_int(p1,d31,p2,d23) solutions = [] for p3 in p3s: solution = Configuration({v1:p1, v2:p2, v3:p3}) solutions.append(solution) diag_print("solve_ddd solutions"+str(solutions),"clmethods") return solutions
def multi_execute(self, inmap): diag_print("DeriveAA.multi_execute called","clmethods") c312 = inmap[self.a_cab] c123 = inmap[self.a_abc] v1 = self.a v2 = self.b v3 = self.c a312 = angle_3p(c312.get(v3),c312.get(v1),c312.get(v2)) d12 = 1.0 a123 = angle_3p(c123.get(v1),c123.get(v2),c123.get(v3)) solutions = solve_ada_3D(v1,v2,v3,a312,d12,a123) return solutions
def solve_ddd(v1, v2, v3, d12, d23, d31): diag_print("solve_ddd: %s %s %s %f %f %f" % (v1, v2, v3, d12, d23, d31), "clmethods") p1 = vector.vector([0.0, 0.0]) p2 = vector.vector([d12, 0.0]) p3s = cc_int(p1, d31, p2, d23) solutions = [] for p3 in p3s: solution = Configuration({v1: p1, v2: p2, v3: p3}) solutions.append(solution) diag_print("solve_ddd solutions" + str(solutions), "clmethods") return solutions
def multi_execute(self, inmap): diag_print("MergeAA.multi_execute called", "clmethods") c312 = inmap[self.a_cab] c123 = inmap[self.a_abc] v1 = self.a v2 = self.b v3 = self.c a312 = angle_3p(c312.get(v3), c312.get(v1), c312.get(v2)) d12 = 1.0 a123 = angle_3p(c123.get(v1), c123.get(v2), c123.get(v3)) solutions = solve_ada_3D(v1, v2, v3, a312, d12, a123) return solutions
def multi_execute(self, inmap): diag_print("MergeDR.multi_execute called", "clmethods") c1 = self._inputs[0] c2 = self._inputs[1] conf1 = inmap[c1] conf2 = inmap[c2] #res = conf1.merge2D(conf2) #return [res] if len(c1.vars) == 2: return [conf2.copy()] else: return [conf1.copy()]
def multi_execute(self, inmap): diag_print("DeriveDAD.multi_execute called","clmethods") c12 = inmap[self.d_ab] c123 = inmap[self.a_abc] c23 = inmap[self.d_bc] v1 = self.a v2 = self.b v3 = self.c d12 = distance_2p(c12.get(v1),c12.get(v2)) a123 = angle_3p(c123.get(v1),c123.get(v2),c123.get(v3)) d23 = distance_2p(c23.get(v2),c23.get(v3)) solutions = solve_dad(v1,v2,v3,d12,a123,d23) return solutions
def multi_execute(self, inmap): diag_print("DeriveDAD.multi_execute called", "clmethods") c12 = inmap[self.d_ab] c123 = inmap[self.a_abc] c23 = inmap[self.d_bc] v1 = self.a v2 = self.b v3 = self.c d12 = distance_2p(c12.get(v1), c12.get(v2)) a123 = angle_3p(c123.get(v1), c123.get(v2), c123.get(v3)) d23 = distance_2p(c23.get(v2), c23.get(v3)) solutions = solve_dad_3D(v1, v2, v3, d12, a123, d23) return solutions
def multi_execute(self, inmap): diag_print("DeriveDDD.multi_execute called","clmethods") c12 = inmap[self.d_ab] c13 = inmap[self.d_ac] c23 = inmap[self.d_bc] v1 = self.a v2 = self.b v3 = self.c d12 = distance_2p(c12.get(v1),c12.get(v2)) d31 = distance_2p(c13.get(v1),c13.get(v3)) d23 = distance_2p(c23.get(v2),c23.get(v3)) solutions = solve_ddd_3D(v1,v2,v3,d12,d23,d31) return solutions
def _try_methods(self, nlet): """finds a possible rewrite rule applications on given set of clusters, applies it and returns True iff successfull """ refgraph = reference2graph(nlet) for methodclass in self._pattern_methods: diag_print("trying generic pattern matching for "+str(methodclass), "clsolver3D") matches = gmatch(methodclass.patterngraph, refgraph) if self._try_matches(methodclass,matches): return True # end for match # end for method return False
def multi_execute(self, inmap): diag_print("DeriveADD.multi_execute called","clmethods") c312 = inmap[self.a_cab] c12 = inmap[self.d_ab] c23 = inmap[self.d_bc] v1 = self.a v2 = self.b v3 = self.c a312 = angle_3p(c312.get(v3),c312.get(v1),c312.get(v2)) d12 = distance_2p(c12.get(v1),c12.get(v2)) d23 = distance_2p(c23.get(v2),c23.get(v3)) solutions = solve_add_3D(v1,v2,v3,a312,d12,d23) return solutions
def multi_execute(self, inmap): diag_print("MergeDDD.multi_execute called", "clmethods") c12 = inmap[self.d_ab] c13 = inmap[self.d_ac] c23 = inmap[self.d_bc] v1 = self.a v2 = self.b v3 = self.c d12 = distance_2p(c12.get(v1), c12.get(v2)) d31 = distance_2p(c13.get(v1), c13.get(v3)) d23 = distance_2p(c23.get(v2), c23.get(v3)) solutions = solve_ddd_3D(v1, v2, v3, d12, d23, d31) return solutions
def multi_execute(self, inmap): diag_print("MergeADD.multi_execute called", "clmethods") c312 = inmap[self.a_cab] c12 = inmap[self.d_ab] c23 = inmap[self.d_bc] v1 = self.a v2 = self.b v3 = self.c a312 = angle_3p(c312.get(v3), c312.get(v1), c312.get(v2)) d12 = distance_2p(c12.get(v1), c12.get(v2)) d23 = distance_2p(c23.get(v2), c23.get(v3)) solutions = solve_add_3D(v1, v2, v3, a312, d12, d23) return solutions
def _search(self, newcluster): print "search from:", newcluster # find all toplevel clusters connected to newcluster via one or more variables connected = Set() for var in newcluster.vars: dependend = self.find_dependend(var) dependend = filter(lambda x: self.is_top_level(x), dependend) connected.union_update(dependend) diag_print("search: connected clusters=" + str(connected), "clsolver3D") # try applying methods if self._try_method(connected): return True return False
def _try_matches(self, methodclass, matches): # print "method="+str(methodclass),"number of matches = "+str(len(matches)) for s in matches: diag_print("try match: "+str(s),"clsolver3D") method = apply(methodclass, [s]) succes = self._add_method_complete(method) if succes: #raw_input() #print "press key" return True else: # WARING: fast bailout, may be incoplete! return False # end for match return False
def _try_matches(self, methodclass, matches): # print "method="+str(methodclass),"number of matches = "+str(len(matches)) for s in matches: diag_print("try match: " + str(s), "clsolver3D") method = apply(methodclass, [s]) succes = self._add_method_complete(method) if succes: #raw_input() #print "press key" return True else: # WARING: fast bailout, may be incoplete! return False # end for match return False
def _is_cluster_reducing(self, merge): # check if method reduces number of clusters (reduc) output = merge.outputs()[0] nremove = 0 for cluster in merge.input_clusters(): if num_constraints(cluster.intersection(output)) >= num_constraints(cluster): # will be removed from toplevel nremove += 1 # exeption if method sets noremove flag if hasattr(merge,"noremove") and merge.noremove == True: nremove = 0 reduc = (nremove > 1) diag_print("reduce # clusters:"+str(reduc),"clsolver") return reduc
def multi_execute(self, inmap): diag_print("MergePR.multi_execute called","clmethods") #c1 = self._inputs[0] #c2 = self._inputs[1] conf1 = inmap[self._inputs[0]] conf2 = inmap[self._inputs[1]] isroot1 = inmap[self._inputs[2]] isroot2 = inmap[self._inputs[3]] if isroot1: res = conf1.merge(conf2) elif isroot2: res = conf2.merge(conf1) else: # cheapest - just copy reference res = conf2 return [res]
def multi_execute(self, inmap): diag_print("MergePR.multi_execute called", "clmethods") #c1 = self._inputs[0] #c2 = self._inputs[1] conf1 = inmap[self._inputs[0]] conf2 = inmap[self._inputs[1]] isroot1 = inmap[self._inputs[2]] isroot2 = inmap[self._inputs[3]] if isroot1: res = conf1.merge(conf2) elif isroot2: res = conf2.merge(conf1) else: # cheapest - merge single point with rigid res = conf2.merge(conf1) return [res]
def multi_execute(self, inmap): diag_print("MergeDR.multi_execute called","clmethods") c1 = self._inputs[0] c2 = self._inputs[1] conf1 = inmap[c1] conf2 = inmap[c2] isroot1 = inmap[self._inputs[2]] isroot2 = inmap[self._inputs[3]] if isroot1: res = conf1.merge(conf2) elif isroot2: res = conf2.merge(conf1) else: # cheapest - merge distance with rigid res = conf2.merge(conf1) return [res]
def solve_dad(v1,v2,v3,d12,a123,d23): """returns a list of Configurations of v1,v2,v3 such that distance v1-v2=d12 etc. v<x>: name of point variables d<xy>: numeric distance values a<xyz>: numeric angle in radians """ diag_print("solve_dad: %s %s %s %f %f %f"%(v1,v2,v3,d12,a123,d23),"clmethods") p2 = vector.vector([0.0, 0.0]) p1 = vector.vector([d12, 0.0]) p3s = [ vector.vector([d23*math.cos(a123), d23*math.sin(a123)]) ] solutions = [] for p3 in p3s: solution = Configuration({v1:p1, v2:p2, v3:p3}) solutions.append(solution) return solutions
def multi_execute(self, inmap): diag_print("MergePR.multi_execute called", "clmethods") #c1 = self._inputs[0] #c2 = self._inputs[1] conf1 = inmap[self._inputs[0]] conf2 = inmap[self._inputs[1]] isroot1 = inmap[self._inputs[2]] isroot2 = inmap[self._inputs[3]] if isroot1: res = conf1.merge(conf2) elif isroot2: res = conf2.merge(conf1) else: # cheapest - just copy reference res = conf2 return [res]
def _try_methods(self, nlet): """finds a possible rewrite rule applications on given set of clusters, applies it and returns True iff successfull """ refgraph = reference2graph(nlet) for methodclass in self._pattern_methods: diag_print( "trying generic pattern matching for " + str(methodclass), "clsolver3D") matches = gmatch(methodclass.patterngraph, refgraph) if self._try_matches(methodclass, matches): return True # end for match # end for method return False
def multi_execute(self, inmap): diag_print("MergePR.multi_execute called","clmethods") #c1 = self._inputs[0] #c2 = self._inputs[1] conf1 = inmap[self._inputs[0]] conf2 = inmap[self._inputs[1]] isroot1 = inmap[self._inputs[2]] isroot2 = inmap[self._inputs[3]] if isroot1: res = conf1.merge(conf2) elif isroot2: res = conf2.merge(conf1) else: # cheapest - merge single point with rigid res = conf2.merge(conf1) return [res]
def multi_execute(self, inmap): diag_print("CheckAR.multi_execute called","clmethods") # get configurations hog = inmap[self.hog] rigid = inmap[self.rigid] xvars = list(self.hog.xvars) # test if all angles match for i in range(len(self.sharedx)-1): hangle = angle_3p(hog.get(xvars[i]), hog.get(self.hog.cvar), hog.get(xvars[i+1])) rangle = angle_3p(rigid.get(xvars[i]), rigid.get(self.hog.cvar), rigid.get(xvars[i+1])) # angle check failed, return no configuration if not tol_eq(hangle,rangle): return [] # all checks passed, return rigid configuration return [rigid]
def multi_execute(self, inmap): diag_print("MergeDR.multi_execute called", "clmethods") c1 = self._inputs[0] c2 = self._inputs[1] conf1 = inmap[c1] conf2 = inmap[c2] isroot1 = inmap[self._inputs[2]] isroot2 = inmap[self._inputs[3]] if isroot1: res = conf1.merge(conf2) elif isroot2: res = conf2.merge(conf1) else: # cheapest - merge distance with rigid res = conf2.merge(conf1) return [res]
def _is_cluster_reducing(self, merge): # check if method reduces number of clusters (reduc) output = merge.outputs()[0] nremove = 0 for cluster in merge.input_clusters(): if num_constraints( cluster.intersection(output)) >= num_constraints(cluster): # will be removed from toplevel nremove += 1 # exeption if method sets noremove flag if hasattr(merge, "noremove") and merge.noremove == True: nremove = 0 reduc = (nremove > 1) diag_print("reduce # clusters:" + str(reduc), "clsolver") return reduc
def _is_information_increasing(self, merge): # check that the method is information increasing (infinc) output = merge.outputs()[0] infinc = True connected = set() for var in output.vars: dependend = self.find_dependend(var) dependend = filter(lambda x: self.is_top_level(x), dependend) connected.update(dependend) # NOTE 07-11-2007 (while writing the paper): this implementation of information increasing may not be correct. We may need to check that the total sum of the information in the overlapping clusters is equal to the information in the output. for cluster in connected: if num_constraints(cluster.intersection(output)) >= num_constraints(output): infinc = False break diag_print("information increasing:"+str(infinc),"clsolver") return infinc
def solve_dad(v1, v2, v3, d12, a123, d23): """returns a list of Configurations of v1,v2,v3 such that distance v1-v2=d12 etc. v<x>: name of point variables d<xy>: numeric distance values a<xyz>: numeric angle in radians """ diag_print("solve_dad: %s %s %s %f %f %f" % (v1, v2, v3, d12, a123, d23), "clmethods") p2 = vector.vector([0.0, 0.0]) p1 = vector.vector([d12, 0.0]) p3s = [vector.vector([d23 * math.cos(a123), d23 * math.sin(a123)])] solutions = [] for p3 in p3s: solution = Configuration({v1: p1, v2: p2, v3: p3}) solutions.append(solution) return solutions
def multi_execute(self, inmap): diag_print("SelectionMethod.multi_execute called","SelectionMethod.multi_execute") incluster = self._inputs[0] inconf = inmap[incluster] diag_print("input configuration = "+str(inconf), "SelectionMethod.multi_execute") sat = True for con in self._constraints: diag_print("constraint = "+str(con), "SelectionMethod.multi_execute") satcon = con.satisfied(inconf.map) diag_print("satisfied = "+str(satcon), "SelectionMethod.multi_execute") sat = sat and satcon diag_print("all satisfied = "+str(sat), "SelectionMethod.multi_execute") if sat: return [inconf] else: return []
def multi_execute(self, inmap): diag_print("MergeTTD.multi_execute called", "clmethods") c123 = inmap[self.t_abc] c124 = inmap[self.t_abd] c34 = inmap[self.d_cd] v1 = self.a v2 = self.b v3 = self.c v4 = self.d p1 = c123.get(v1) p2 = c123.get(v2) p3 = c123.get(v3) d14 = distance_2p(c124.get(v1), c124.get(v4)) d24 = distance_2p(c124.get(v2), c124.get(v4)) d34 = distance_2p(c34.get(v3), c34.get(v4)) return solve_3p3d(v1, v2, v3, v4, p1, p2, p3, d14, d24, d34)
def multi_execute(self, inmap): diag_print("DeriveTTD.multi_execute called","clmethods") c123 = inmap[self.t_abc] c124 = inmap[self.t_abd] c34 = inmap[self.d_cd] v1 = self.a v2 = self.b v3 = self.c v4 = self.d p1 = c123.get(v1) p2 = c123.get(v2) p3 = c123.get(v3) d14 = distance_2p(c124.get(v1),c124.get(v4)) d24 = distance_2p(c124.get(v2),c124.get(v4)) d34 = distance_2p(c34.get(v3),c34.get(v4)) return solve_3p3d(v1,v2,v3,v4,p1,p2,p3,d14,d24,d34)
def _add_hog(self, hog): diag_print("_add_hog:" + str(hog), "clsolver") # check if not already exists if self._graph.has_vertex(hog): raise StandardError, "hedgehog already in clsolver" # update graph self._add_to_group("_hedgehogs", hog) for var in list(hog.xvars) + [hog.cvar]: self._add_variable(var) self._add_dependency(var, hog) # add to top level self._add_top_level(hog) # add to methodgraph self._mg.add_variable(hog) # notify self.send_notify(("add", hog))
def solve_add(a,b,c, a_cab, d_ab, d_bc): """returns a list of Configurations of v1,v2,v3 such that distance v1-v2=d12 etc. v<x>: name of point variables d<xy>: numeric distance values a<xyz>: numeric angle in radians """ diag_print("solve_dad: %s %s %s %f %f %f"%(a,b,c,a_cab,d_ab,d_bc),"clmethods") p_a = vector.vector([0.0,0.0]) p_b = vector.vector([d_ab,0.0]) dir = vector.vector([math.cos(-a_cab),math.sin(-a_cab)]) solutions = cr_int(p_b, d_bc, p_a, dir) rval = [] for p_c in solutions: map = {a:p_a, b:p_b, c:p_c} rval.append(Configuration(map)) return rval
def multi_execute(self, inmap): diag_print("MergeRR.multi_execute called","clmethods") c1 = self._inputs[0] c2 = self._inputs[1] conf1 = inmap[c1] conf2 = inmap[c2] isroot1 = inmap[self._inputs[2]] isroot2 = inmap[self._inputs[3]] if isroot1 and not isroot2: res = conf1.merge(conf2) elif isroot2 and not isroot1: res = conf2.merge(conf1) elif len(c1.vars) < len(c2.vars): # cheapest - transform smallest config res = conf2.merge(conf1) else: res = conf1.merge(conf2) return [res]
def multi_execute(self, inmap): diag_print("CheckAR.multi_execute called", "clmethods") # get configurations hog = inmap[self.hog] rigid = inmap[self.rigid] xvars = list(self.hog.xvars) # test if all angles match for i in range(len(self.sharedx) - 1): hangle = angle_3p(hog.get(xvars[i]), hog.get(self.hog.cvar), hog.get(xvars[i + 1])) rangle = angle_3p(rigid.get(xvars[i]), rigid.get(self.hog.cvar), rigid.get(xvars[i + 1])) # angle check failed, return no configuration if not tol_eq(hangle, rangle): return [] # all checks passed, return rigid configuration return [rigid]
def _source_constraint_in_cluster(self, constraint, cluster): if not self._contains_constraint(cluster, constraint): raise StandardError, "constraint not in cluster" elif self._is_atomic(cluster): return cluster else: method = self._determining_method(cluster) inputs = method.inputs() down = filter(lambda x: self._contains_constraint(x, constraint), inputs) if len(down) == 0: return cluster elif len(down) > 1: if method.consistent == True: return self._source_constraint_in_cluster(constraint, down[0]) else: diag_print("Warning: source is inconsistent","clsolver") return None else: return self._source_constraint_in_cluster(constraint, down[0])
def _remove(self, object): # find all indirectly dependend objects todelete = [object]+self._find_descendend(object) torestore = set() # remove all objects for item in todelete: # if merge removed items from toplevel then add them back to top level if hasattr(item, "restore_toplevel"): for cluster in item.restore_toplevel: torestore.add(cluster) # delete it from graph diag_print("deleting "+str(item),"clsolver.remove") self._graph.rem_vertex(item) # remove from _new list if item in self._new: self._new.remove(item) # remove from incremental top_level self._toplevel.remove(item) # remove from methodgraph if isinstance(item, Method): # note: method may have been removed because variable removed try: self._mg.rem_method(item) except: pass # restore SelectionConstraints if isinstance(item, SelectionMethod): for con in item.iter_constraints(): self._selection_method[con] = None if isinstance(item, MultiVariable): self._mg.rem_variable(item) # remove variables with no dependent clusters if isinstance(item, Cluster): for var in item.vars: if len(self.find_dependend(var)) == 0: self._graph.rem_vertex(var) # notify listeners self.send_notify(("remove", item)) # restore toplevel (also added to _new) for cluster in torestore: if self._graph.has_vertex(cluster): self._add_top_level(cluster)
def _process_new(self): # try incremental matchers and old style matching alternatingly non_redundant_methods = filter(lambda m: not self._is_redundant_method(m), self._applicable_methods) while len(non_redundant_methods) > 0 or len(self._new) > 0: # check incremental matches if len(non_redundant_methods) > 0: method = iter(non_redundant_methods).next() #print "applicable methods:", map(str, self._applicable_methods) diag_print("incremental search found:"+str(method),"clsolver._process_new") self._add_method_complete(method) else: newobject = self._new.pop() diag_print("search from "+str(newobject), "clsolver") succes = self._search(newobject) if succes and self.is_top_level(newobject): # maybe more rules applicable.... push back on stack self._new.append(newobject) #endif # endif non_redundant_methods = filter(lambda m: not self._is_redundant_method(m), self._applicable_methods)