def build_minimum_tree(g, root, terminals, edges, directed=True): """remove redundant edges from `edges` so that root can reach each node in terminals """ # build the tree t = Graph(directed=directed) for _ in range(g.num_vertices()): t.add_vertex() for (u, v) in edges: t.add_edge(u, v) # mask out redundant edges vis = init_visitor(t, root) pbfs_search(t, source=root, terminals=list(terminals), visitor=vis) minimum_edges = { e for u in terminals for e in extract_edges_from_pred(t, root, u, vis.pred) } # print(minimum_edges) efilt = t.new_edge_property('bool') efilt.a = False for u, v in minimum_edges: efilt[u, v] = True t.set_edge_filter(efilt) return filter_nodes_by_edges(t, minimum_edges)
def build_minimum_tree(g, root, terminals, edges, directed=True): """remove redundant edges from `edges` so that root can reach each node in terminals """ # build the tree t = Graph(directed=directed) for _ in range(g.num_vertices()): t.add_vertex() for (u, v) in edges: t.add_edge(u, v) # mask out redundant edges vis = init_visitor(t, root) pbfs_search(t, source=root, terminals=list(terminals), visitor=vis) minimum_edges = {e for u in terminals for e in extract_edges_from_pred(t, root, u, vis.pred)} # print(minimum_edges) efilt = t.new_edge_property('bool') efilt.a = False for u, v in minimum_edges: efilt[u, v] = True t.set_edge_filter(efilt) return filter_nodes_by_edges(t, minimum_edges)
class BaseGraph(object): """ Class representing a graph. We do not use pure graph_tool.Graph for we want to be able to easily change this library. Neither we use inheritance as graph_tool has inconvenient licence. """ def __init__(self): self._g = None self._node_dict = {} self._syn_to_vertex_map = None self._lemma_to_nodes_dict = None self._lu_on_vertex_dict = None def use_graph_tool(self): """ Returns underlying graph_tool.Graph. It should be avoided at all costs. """ return self._g def get_node_for_synset_id(self, syn_id): """ Lazy function to makes the map of synset identifiers to nodes into the graph. The building of map is made only on the first funcion call. The first and the next calls of this function will return the built map. """ if not self._syn_to_vertex_map: self._syn_to_vertex_map = {} for node in self.all_nodes(): if node.synset: synset_id = node.synset.synset_id self._syn_to_vertex_map[synset_id] = node return self._syn_to_vertex_map.get(syn_id, None) def pickle(self, filename): self._g.save(filename) def unpickle(self, filename): self._g = load_graph(filename) def init_graph(self, drctd=False): self._g = Graph(directed=drctd) def copy_graph_from(self, g): self._g = g._g.copy() def set_directed(self, drctd): self._g.set_directed(drctd) def is_directed(self): return self._g.is_directed() def merge_graphs(self, g1, g2): self._g = graph_union(g1._g, g2._g, internal_props=True) # Node operations: def all_nodes(self): for node in self._g.vertices(): yield BaseNode(self._g, node) def create_node_attribute(self, name, kind, value=None): if not self.has_node_attribute(name): node_attr = self._g.new_vertex_property(kind, value) self._g.vertex_properties[name] = node_attr def create_node_attributes(self, node_attributes_list): for attr in node_attributes_list: if not self.has_node_attribute(attr[0]): node_attr = self._g.new_vertex_property(attr[1]) self._g.vertex_properties[attr[0]] = node_attr def has_node_attribute(self, name): """ Checks if a node attribute already exists """ return name in self._g.vertex_properties def delete_node_attribute(self, name): """ Delete node attribute """ del self._g.vertex_properties[name] def add_node(self, name, node_attributes_list=None): if node_attributes_list is None: node_attributes_list = [] if name not in self._node_dict: new_node = self._g.add_vertex() self._node_dict[name] = BaseNode(self._g, new_node) for attr in node_attributes_list: self._g.vertex_properties[attr[0]][new_node] = attr[1] return self._node_dict[name] def get_node(self, name): return self._node_dict[name] def remove_node(self, name): self._g.remove_vertex(self._node_dict[name]._node) del self._node_dict[name] def nodes_filter(self, nodes_to_filter_set, inverted=False, replace=False, soft=False): """ Filters out nodes from set Args: nodes_to_filter_set (Iterable): Nodes which fill be filtered out. inverted (bool): If True, nodes NOT in set will be filtered out. Defaults to False. replace (bool): Replace current filter instead of combining the two. Defaults to False. soft (bool): Hide nodes without removing them so they can be restored with reset_nodes_filter. Defaults to False. """ predicate = lambda node: node not in nodes_to_filter_set self.nodes_filter_conditional(predicate, inverted, replace, soft) def nodes_filter_conditional(self, predicate, inverted=False, replace=False, soft=False): """ Filters node based on a predicate Args: predicate (Callable): Predicate returning False for nodes that should be filtered out. inverted (bool): Invert condition. Defaults to False. replace (bool): Replace current filter instead of combining the two. Defaults to False. soft (bool): Hide nodes without removing them so they can be restored with reset_nodes_filter. Defaults to False. """ (old_filter, old_inverted) = self._g.get_vertex_filter() new_filter = self._g.new_vertex_property("bool") for node in self.all_nodes(): kept = predicate(node) != inverted if not replace and old_filter: old_kept = bool(old_filter[node._node]) != old_inverted kept = kept and old_kept new_filter[node._node] = kept self._g.set_vertex_filter(new_filter, False) if not soft: self.apply_nodes_filter() def apply_nodes_filter(self): """ Removes nodes that are currently filtered out """ self._g.purge_vertices() def reset_nodes_filter(self): """ Clears node filter """ self._g.set_vertex_filter(None) # Edge operations: def num_edges(self): return self._g.num_edges() def all_edges(self): for e in self._g.edges(): yield BaseEdge(self._g, e) def get_edges_between(self, source, target): """ Return all edges between source and target. Source and target can be either BaseNode or integer. """ if isinstance(source, BaseNode): source = source._node if isinstance(target, BaseNode): target = target._node for e in self._g.edge(source, target, all_edges=True): yield BaseEdge(self._g, e) def get_edge(self, source, target, add_missing=False): """ Return some edge between source and target. Source and target can be either BaseNode or integer. """ if isinstance(source, BaseNode): source = source._node if isinstance(target, BaseNode): target = target._node e = self._g.edge(source, target, add_missing) if e is not None: return BaseEdge(self._g, e) else: return None def create_edge_attribute(self, name, kind, value=None): if not self.has_edge_attribute(name): edge_attr = self._g.new_edge_property(kind, value) self._g.edge_properties[name] = edge_attr def alias_edge_attribute(self, name, alias): self._g.edge_properties[alias] = self._g.edge_properties[name] def create_edge_attributes(self, edge_attributes_list): for attr in edge_attributes_list: if not self.has_edge_attribute(attr[0]): edge_attr = self._g.new_edge_property(attr[1]) self._g.edge_properties[attr[0]] = edge_attr def has_edge_attribute(self, name): """ Checks if an edge attribute already existst """ return name in self._g.edge_properties def delete_edge_attribute(self, name): """ Delete edge attribute """ del self._g.edge_properties[name] def add_edge(self, parent, child, edge_attributes_list=None): if edge_attributes_list is None: edge_attributes_list = [] new_edge = self._g.add_edge(parent._node, child._node) for attr in edge_attributes_list: self._g.edge_properties[attr[0]][new_edge] = attr[1] return BaseEdge(self._g, new_edge) def edges_filter(self, edges_to_filter_set): edge_filter = self._g.new_edge_property("bool") for e in self.all_edges(): if e in edges_to_filter_set: edge_filter[e._edge] = False else: edge_filter[e._edge] = True self._g.set_edge_filter(edge_filter) self._g.purge_edges() def ungraph_tool(self, thingy, lemma_on_only_synset_node_dict): """ Converts given data structure so that it no longer have any graph_tool dependencies. """ logger = logging.getLogger(__name__) if type(thingy) == dict: return { self.ungraph_tool(k, lemma_on_only_synset_node_dict): self.ungraph_tool(thingy[k], lemma_on_only_synset_node_dict) for k in thingy } nodes_to_translate = set() for vset in lemma_on_only_synset_node_dict.values(): for v in vset: nodes_to_translate.add(v) if type(thingy) == gt.PropertyMap: dct = {} if thingy.key_type() == 'v': for node in nodes_to_translate: dct[node] = thingy[node.use_graph_tool()] elif thingy.key_type() == 'e': for edge in self.all_edges(): dct[edge] = thingy[edge.use_graph_tool()] else: logger.error('Unknown property type %s', thingy.key_type()) raise NotImplemented return dct def generate_lemma_to_nodes_dict_synsets(self): """ This method generates a utility dictionary, which maps lemmas to corresponding node objects. It is expensive in menas of time needed to generate the dictionary. It should therefore be executed at the beginning of the runtime and later its results should be reused as many times as needed without re-executing the function. """ lemma_to_nodes_dict = defaultdict(set) for node in self.all_nodes(): try: lu_set = node.synset.lu_set except KeyError: continue for lu in lu_set: lemma = lu.lemma.lower() lemma_to_nodes_dict[lemma].add(node) self._lemma_to_nodes_dict = lemma_to_nodes_dict def generate_lemma_to_nodes_dict_lexical_units(self): """ This method generates a utility dictionary, which maps lemmas to corresponding node objects. It is expensive in menas of time needed to generate the dictionary. It should therefore be executed at the beginning of the runtime and later its results should be reused as many times as needed without re-executing the function. """ lemma_to_nodes_dict = defaultdict(set) for node in self.all_nodes(): try: lemma = node.lu.lemma.lower() lemma_to_nodes_dict[lemma].add(node) except: continue self._lemma_to_nodes_dict = lemma_to_nodes_dict @property def lemma_to_nodes_dict(self): return self._lemma_to_nodes_dict def _make_lu_on_v_dict(self): """ Makes dictionary lu on vertex """ lu_on_vertex_dict = defaultdict(set) for node in self.all_nodes(): try: nl = node.lu except Exception: continue if nl: lu_on_vertex_dict[node.lu.lu_id] = node self._lu_on_vertex_dict = lu_on_vertex_dict
class graphtool(): def get_edges(self): self.edges = [] for dev in Device.objects: port = dev['ports'] for port in dev['ports']: if not port['acc']: self.edges.append([int(port['dev']), int(dev['devid'])]) for edge in self.edges: if edge[::-1] in self.edges: self.edges.remove(edge) def create_graph(self): self.get_edges() self.g = Graph(directed=False) self.g.add_edge_list(self.edges) def load_graph(self): self.g = pickle.loads(System.objects.first().graph.read()) def shortestpath(self, source, dest): if source == dest: return ('нужны разные пипишники') #ip to id source = Device.objects(uri=source) dest = Device.objects(uri=dest) if len(source) > 0 and len(dest) > 0: source = self.g.vertex(source[0].devid) dest = self.g.vertex(dest[0].devid) result = graph_tool.topology.shortest_path(self.g, source, dest) path = [self.g.vertex_index[x] for x in result[0]] filteredge = self.g.new_edge_property('bool') filteredge[result[1][0]] = True self.g.set_edge_filter(filteredge, inverted=True) result = graph_tool.topology.shortest_path(self.g, source, dest) second_path = [self.g.vertex_index[x] for x in result[0]] self.g.clear_filters() another_paths = [] all_shortest = graph_tool.topology.all_shortest_paths( self.g, source, dest) for i in all_shortest: another_paths.append([self.g.vertex_index[j] for j in i]) self.all_paths = [path] + [second_path] + another_paths self.all_paths = [tuple(t) for t in self.all_paths] self.all_paths = [t for t in self.all_paths if len(t) > 0] self.all_paths = list(set(self.all_paths)) self.all_paths = [list(t) for t in self.all_paths] dev_from_stp = [] count = 0 for path in self.all_paths: for dev in path: dev = Device.objects(devid=dev).first().uri if Stpdomins.objects(devices__=dev): count += 1 [ dev_from_stp.append(x) for x in Stpdomins.objects( devices__=dev).first().devices if x not in dev_from_stp ] if len(dev_from_stp) > 0 and count > 1: print('stp domains') filtevertex = self.g.new_vertex_property('bool') for x in dev_from_stp: filtevertex[self.g.vertex( Device.objects(uri=x).first().devid)] = True self.g.set_vertex_filter(filtevertex) source = self.g.vertex( Device.objects(uri=dev_from_stp[0]).first().devid) dest = self.g.vertex( Device.objects(uri=dev_from_stp[-1]).first().devid) result = graph_tool.topology.all_paths(self.g, source, dest) for x in result: self.all_paths.append([int(self.g.vertex(i)) for i in x]) self.g.clear_filters() self.all_paths.sort() self.all_paths = list( self.all_paths for self.all_paths, _ in itertools.groupby(self.all_paths)) self.all_paths = [ path for path in self.all_paths if len(path) > 0 ] return self.all_paths def fancy_shortest(self): self.fancy_paths = [] for path in self.all_paths: fancy = [] for i in path: d = Device.objects(devid=i).first() if d.devtype not in passive: fancy.append([d.uri, d.addr, dev_type_dict[d.devtype]]) self.fancy_paths.append(fancy) return self.fancy_paths def paths_ports(self): output = [] for path in self.all_paths: for i, j in zip(path, path[1:]): dev = Device.objects(devid=i).first() if dev.devtype in supported: ports = [x['num'] for x in dev.ports if x['dev'] == j] if len(ports) == 0: ports = 0 else: ports = ports[0] output.append([dev.uri, dev.devtype, ports]) dev = Device.objects(devid=j).first() if dev.devtype in supported: ports = [x['num'] for x in dev.ports if x['dev'] == i] if len(ports) == 0: ports = 0 else: ports = ports[0] output.append([dev.uri, dev.devtype, ports]) g_fancy_output = dict() g_output = dict() for key, group in groupby(output, lambda x: x[0]): ports = [] for i in group: ports.append(i[2]) if key in g_output: # print (g_output[key]['ports'], ports) g_output[key]['ports'] = g_output[key]['ports'] + ports else: g_output[key] = {'type': i[1], 'ports': ports} for key in g_output: g_output[key]['ports'] = list(set(g_output[key]['ports'])) g_fancy_output = copy.deepcopy(g_output) for i in g_fancy_output: g_fancy_output[i]['type'] = dev_type_dict[g_fancy_output[i] ['type']] return g_fancy_output, g_output
class lqg(object): def __init__(self, dim=3): self.dim = dim return def read_systre_key(self, skey, dim=3): self.dim = dim dfac = 2 + self.dim skey = skey.split() self.nedges = len(skey) / dfac self.nvertices = 1 self.edges = [] self.labels = [] for i in range(self.nedges): edge = map(int, skey[i * dfac:i * dfac + 2]) for j in edge: if j > self.nvertices: self.nvertices = j edge = list(numpy.array(edge) - 1) label = map(int, skey[i * dfac + 2:i * dfac + dfac]) self.edges.append(edge) self.labels.append(label) return def write_systre_pgr(self, id="mfpb"): pgr = "PERIODIC_GRAPH\nID %s\nEDGES\n" % id for e, l in zip(self.edges, self.labels): entry = (" %s %s" + self.dim * " %1.0f" + "\n") % tuple(list(numpy.array(e) + 1) + l) pgr += entry pgr += "END" return pgr def get_lqg_from_topo(self, topo): # be careful not working for nets where an vertex is connected to itself self.dim = 3 self.nvertices = topo.get_natoms() self.nedges = 0 self.edges = [] self.labels = [] for i in range(self.nvertices): for j, v in enumerate(topo.conn[i]): if v > i: self.nedges += 1 self.edges.append([i, v]) #pdb.set_trace() self.labels.append(list(topo.pconn[i][j])) return def get_lqg_from_lists(self, edges, labels, nvertices, dim): assert len(edges) == len(labels) self.edges = edges self.labels = labels self.dim = dim self.nedges = len(edges) self.nvertices = nvertices return def build_lqg(self): self.nbasevec = self.nedges - self.nvertices + 1 self.molg = Graph(directed=True) self.molg.ep.label = self.molg.new_edge_property("vector<double>") self.molg.ep.number = self.molg.new_edge_property("int") for i in range(self.nvertices): iv = self.molg.add_vertex() for i, e in enumerate(self.edges): ie = self.molg.add_edge(self.molg.vertex(e[0]), self.molg.vertex(e[1])) self.molg.ep.label[ie] = self.labels[i] self.molg.ep.number[ie] = i return def get_cyclic_basis(self): nbasevec = self.nbasevec basis = numpy.zeros([nbasevec, self.nedges], dtype="int") self.molg.set_directed(False) tree = min_spanning_tree(self.molg) i = 0 for e in self.molg.edges(): if tree[e] == 0: self.molg.set_edge_filter(tree) vl, el = shortest_path(self.molg, self.molg.vertex(int(e.target())), self.molg.vertex(int(e.source()))) self.molg.set_edge_filter(None) basis[i, self.molg.ep.number[e]] = 1 neg = False for eb in el: idx = self.molg.ep.number[eb] ebt = self.get_edge_with_idx(idx) if ebt.target() == e.target(): if neg != True: basis[i, self.molg.ep.number[eb]] = -1 neg = True else: basis[i, self.molg.ep.number[eb]] = 1 neg = False elif ebt.source() == e.source(): if neg != True: basis[i, self.molg.ep.number[eb]] = -1 neg = True else: basis[i, self.molg.ep.number[eb]] = 1 neg = False elif ebt.source() == e.target(): if neg != True: basis[i, self.molg.ep.number[eb]] = 1 neg = False else: basis[i, self.molg.ep.number[eb]] = -1 neg = True elif ebt.target() == e.source(): if neg != True: basis[i, self.molg.ep.number[eb]] = 1 neg = False else: basis[i, self.molg.ep.number[eb]] = -1 neg = True e = ebt i += 1 self.cyclic_basis = basis self.molg.set_directed(True) return self.cyclic_basis def get_cocycle_basis(self): n = self.nedges - (self.nedges - self.nvertices + 1) cocycles = numpy.zeros([n, self.nedges]) self.molg.set_directed(False) i = 0 for v in self.molg.vertices(): el = v.out_edges() for eb in el: idx = self.molg.ep.number[eb] ebt = self.get_edge_with_idx(idx) if ebt.source() == v: cocycles[i, idx] = 1 else: cocycles[i, idx] = -1 i += 1 if i == n: break self.cocycle_basis = cocycles return self.cocycle_basis def get_ncocycles(self, n): self.molg.set_directed(False) cocycles = numpy.zeros([n, self.nedges]) i = 0 for v in self.molg.vertices(): el = v.out_edges() for eb in el: idx = self.molg.ep.number[eb] ebt = self.get_edge_with_idx(idx) if ebt.source() == v: cocycles[i, idx] = 1 else: cocycles[i, idx] = -1 i += 1 if i == n: break return cocycles def get_B_matrix(self): n = self.nedges - (self.nedges - self.nvertices + 1) if n > 0: self.B = numpy.append(self.cyclic_basis, self.cocycle_basis, axis=0) else: self.B = self.cyclic_basis return self.B def get_alpha(self): vimg = [] labels = numpy.array(self.labels) for i in range(numpy.shape(self.cyclic_basis)[0]): img = numpy.sum(self.cyclic_basis[i] * labels.T, axis=1) vimg.append(img) for i in range(self.nedges - self.nbasevec): if self.dim == 2: vimg.append([0, 0]) else: vimg.append([0, 0, 0]) self.alpha = numpy.array(vimg) return self.alpha def get_image(self, vec): labels = numpy.array(self.labels) return numpy.sum(vec * labels.T, axis=1) def get_fracs(self): self.fracs = numpy.dot(numpy.linalg.inv(self.B), self.alpha) return self.fracs def get_lattice_basis(self): idx = self.find_li_vectors(self.alpha) latbase = self.alpha[idx] Lr = self.cyclic_basis[idx] ### we need to orthonormalize the latbase ### L = numpy.zeros([self.dim, self.nedges]) olatbase = numpy.eye(self.dim, self.dim) for i in range(self.dim): b = numpy.linalg.solve(latbase.T, olatbase[i, :]) for j in range(self.dim): L[i, :] += b[j] * Lr[j, :] self.lattice_basis = L return self.lattice_basis def get_kernel(self): k = numpy.zeros( [self.nbasevec - self.dim + self.nvertices - 1, self.nedges]) idx = self.find_li_vectors(self.alpha) latbase = self.alpha[idx] counter = 0 ### TODO: switch to other basis to make it more beautiful for i in range(self.nbasevec): if i not in idx: b = numpy.linalg.solve(latbase.T, self.alpha[i]) bb = numpy.zeros(self.nedges) for j in range(self.dim): bb += b[j] * self.cyclic_basis[idx[j]] k[counter] = self.cyclic_basis[i] - bb #print(self.get_image(k[counter])) counter += 1 if self.nvertices > 1: k[self.nbasevec - self.dim:, :] = self.cocycle_basis[0:self.nvertices - 1, :] self.kernel = k return self.kernel def get_cell(self): k = self.kernel L = self.lattice_basis S = numpy.dot(k, k.T) P = numpy.eye(self.nedges, self.nedges) - numpy.dot( k.T, numpy.dot(numpy.linalg.inv(S), k)) self.cell = numpy.dot(L, numpy.dot(P, L.T)) return self.cell def place_vertices(self, first=numpy.array([0.0, 0.0, 0.0])): frac_xyz = numpy.zeros([self.nvertices, 3]) frac_xyz[0, :] = first done = [0] counter = 0 while len(done) != self.nvertices: for i, e in enumerate(self.edges): if self.labels[i] == [0, 0, 0]: if ((e[0] in done) and (e[1] not in done)): #print(e, self.fracs[i,:]) frac_xyz[e[1], :] = (frac_xyz[e[0], :] + self.fracs[i, :]) done.append(e[1]) elif ((e[1] in done) and (e[0] not in done)): nc = (frac_xyz[e[1], :] - self.fracs[i, :]) frac_xyz[e[0], :] = nc done.append(e[0]) counter += 1 if counter > 10: break #frac_xyz = frac_xyz%1 print(len(done)) if len(done) != self.nvertices: print('proceed') for i, e in enumerate(self.edges): if ((e[0] in done) and (e[1] not in done)): print(e) frac_xyz[e[1], :] = frac_xyz[e[0], :] + self.fracs[i, :] done.append(e[1]) elif ((e[1] in done) and (e[0] not in done)): print(e, self.labels[i], self.fracs[i, :]) #### problem!!!!! frac_xyz[e[0], :] = frac_xyz[e[1], :] - self.fracs[i, :] done.append(e[0]) ### perhaps a flooring has to be performe self.frac_xyz = frac_xyz return self.frac_xyz def to_mol(self): t = topo() t.natoms = self.nvertices t.set_cell(self.cell) t.set_xyz_from_frac(self.frac_xyz) t.set_atypes(self.nvertices * ['1']) t.set_empty_conn() t.set_empty_pconn() for i, e in enumerate(self.edges): t.conn[e[0]].append(e[1]) t.conn[e[1]].append(e[0]) t.pconn[e[0]].append(numpy.array(self.labels[i])) t.pconn[e[1]].append(-1 * numpy.array(self.labels[i])) #t.wrap_in_box() t.set_elems_by_coord_number() return t def get_edge_with_idx(self, idx): for i in self.molg.edges(): if self.molg.edge_index[i] == idx: return i #if self.molg.ep.number[i] == idx: return i def find_li_vectors(self, R): rank = numpy.linalg.matrix_rank(R) idx = [] ### get first non zero vector of R fn = numpy.nonzero(R)[0][0] idx.append(fn) for i in range(fn + 1, R.shape[0]): indep = True for j in idx: if i != j: inner_product = numpy.dot( R[i, :], R[j, :]) #compute the scalar product norm_i = numpy.linalg.norm(R[i, :]) #compute norms norm_j = numpy.linalg.norm(R[j, :]) if abs(inner_product - norm_j * norm_i) < 1e-4: # vector i is linear dependent, iterate i indep = False break if indep == True: idx.append(i) if numpy.linalg.matrix_rank(R[idx]) != len(idx): idx.pop() if len(idx) == rank: break return idx def vertex_positions(self, edges, used, pos={}): if self.dim == 2: return 'Not yet implemented' if len(pos.keys()) == self.nvertices: return pos self.molg.set_directed(True) for i, ed in enumerate(edges): e = ed if i == 0: break if int(str(e.source())) not in pos.keys() and int(str( e.target())) not in pos.keys(): pass elif int(str(e.source())) not in pos.keys() or int(str( e.target())) not in pos.keys(): from_v = int(str(e.source())) if int(str( e.source())) in pos.keys() else int(str(e.target())) to_v = int(str(e.target())) if int(str( e.target())) not in pos.keys() else int(str(e.source())) coeff = 0 for i, ed in enumerate(self.molg.vertex(from_v).out_edges()): if e == ed: coeff = 1 break if coeff == 0: coeff = -1 index = self.molg.ep.number[e] to_pos = coeff * numpy.array(self.fracs)[index] + pos[from_v] newedges = [] to_pos = numpy.array([i % 1 for i in to_pos]) pos[to_v] = to_pos used.append(e) self.molg.set_directed(False) ee = self.molg.vertex(to_v).out_edges() newedges = [i for i in ee if i not in used and i not in edges] print(newedges) edges = newedges + edges[1:] else: used.append(e) edges = edges[1:] return self.vertex_positions(edges, used, pos) def __call__(self): self.build_lqg() self.get_cyclic_basis() self.get_cocycle_basis() self.get_B_matrix() self.get_alpha() self.get_lattice_basis() self.get_kernel() self.get_cell() self.get_fracs() self.place_vertices()