def build_graphs(self) -> None: """Builds graphs for all modes currently available in the model When called, it overwrites all graphs previously created and stored in the networks' dictionary of graphs """ curr = self.conn.cursor() curr.execute('PRAGMA table_info(links);') field_names = curr.fetchall() ignore_fields = ['ogc_fid', 'geometry'] all_fields = [f[1] for f in field_names if f[1] not in ignore_fields] raw_links = curr.execute( f"select {','.join(all_fields)} from links").fetchall() links = [] for l in raw_links: lk = list(map(lambda x: np.nan if x is None else x, l)) links.append(lk) data = np.core.records.fromrecords(links, names=all_fields) valid_fields = [] removed_fields = [] for f in all_fields: if np.issubdtype(data[f].dtype, np.floating) or np.issubdtype( data[f].dtype, np.integer): valid_fields.append(f) else: removed_fields.append(f) if len(removed_fields) > 1: warn( f'Fields were removed form Graph for being non-numeric: {",".join(removed_fields)}' ) curr.execute('select node_id from nodes where is_centroid=1;') centroids = np.array([i[0] for i in curr.fetchall()], np.uint32) modes = curr.execute('select mode_id from modes;').fetchall() modes = [m[0] for m in modes] for m in modes: w = np.core.defchararray.find(data['modes'], m) net = np.array(data[valid_fields], copy=True) net['b_node'][w < 0] = net['a_node'][w < 0] g = Graph() g.mode = m g.network = net g.network_ok = True g.status = 'OK' g.prepare_graph(centroids) g.set_blocked_centroid_flows(True) self.graphs[m] = g
def assign_matrix(self, matrix: AequilibraeMatrix, result_name: str): conn = database_connection() sql = f"select link_id, direction, a_node, b_node, distance, 1 capacity from {DELAUNAY_TABLE}" df = pd.read_sql(sql, conn) centroids = np.array(np.unique(np.hstack((df.a_node.values, df.b_node.values))), int) g = Graph() g.mode = 'delaunay' g.network = df g.prepare_graph(centroids) g.set_blocked_centroid_flows(True) tc = TrafficClass('delaunay', g, matrix) ta = TrafficAssignment() ta.set_classes([tc]) ta.set_time_field('distance') ta.set_capacity_field('capacity') ta.set_vdf('BPR') ta.set_vdf_parameters({"alpha": 0, "beta": 1.0}) ta.set_algorithm('all-or-nothing') ta.execute() report = {"setup": str(ta.info())} data = [result_name, "Delaunay assignment", self.procedure_id, str(report), ta.procedure_date, ''] conn.execute("""Insert into results(table_name, procedure, procedure_id, procedure_report, timestamp, description) Values(?,?,?,?,?,?)""", data) conn.commit() conn.close() cols = [] for x in matrix.view_names: cols.extend([f'{x}_ab', f'{x}_ba', f'{x}_tot']) df = ta.results()[cols] conn = sqlite3.connect(join(environ[ENVIRON_VAR], "results_database.sqlite")) df.to_sql(result_name, conn) conn.close()
class TestGraph(TestCase): def test_create_from_geography(self): self.graph = Graph() self.graph.create_from_geography( test_network, "link_id", "dir", "distance", centroids=centroids, skim_fields=[], anode="A_NODE", bnode="B_NODE", ) self.graph.set_graph(cost_field="distance") self.graph.set_blocked_centroid_flows(block_centroid_flows=True) self.graph.set_skimming("distance") def test_prepare_graph(self): self.test_create_from_geography() self.graph.prepare_graph(centroids) reference_graph = Graph() reference_graph.load_from_disk(test_graph) if not np.array_equal(self.graph.graph, reference_graph.graph): self.fail("Reference graph and newly-prepared graph are not equal") def test_set_graph(self): self.test_prepare_graph() self.graph.set_graph(cost_field="distance") self.graph.set_blocked_centroid_flows(block_centroid_flows=True) if self.graph.num_zones != centroids.shape[0]: self.fail("Number of centroids not properly set") if self.graph.num_links != 222: self.fail("Number of links not properly set") if self.graph.num_nodes != 93: self.fail("Number of nodes not properly set - " + str(self.graph.num_nodes)) def test_save_to_disk(self): self.test_create_from_geography() self.graph.save_to_disk(join(path_test, "aequilibrae_test_graph.aeg")) self.graph_id = self.graph.__id__ self.graph_version = self.graph.__version__ def test_load_from_disk(self): self.test_save_to_disk() reference_graph = Graph() reference_graph.load_from_disk(test_graph) new_graph = Graph() new_graph.load_from_disk(join(path_test, "aequilibrae_test_graph.aeg")) comparisons = [ ("Graph", new_graph.graph, reference_graph.graph), ("b_nodes", new_graph.b_node, reference_graph.b_node), ("Forward-Star", new_graph.fs, reference_graph.fs), ("cost", new_graph.cost, reference_graph.cost), ("centroids", new_graph.centroids, reference_graph.centroids), ("skims", new_graph.skims, reference_graph.skims), ("link ids", new_graph.ids, reference_graph.ids), ("Network", new_graph.network, reference_graph.network), ("All Nodes", new_graph.all_nodes, reference_graph.all_nodes), ("Nodes to indices", new_graph.nodes_to_indices, reference_graph.nodes_to_indices), ] for comparison, newg, refg in comparisons: if not np.array_equal(newg, refg): self.fail( "Reference %s and %s created and saved to disk are not equal" % (comparison, comparison)) comparisons = [ ("nodes", new_graph.num_nodes, reference_graph.num_nodes), ("links", new_graph.num_links, reference_graph.num_links), ("zones", new_graph.num_zones, reference_graph.num_zones), ("block through centroids", new_graph.block_centroid_flows, reference_graph.block_centroid_flows), ("Graph ID", new_graph.__id__, self.graph_id), ("Graph Version", new_graph.__version__, self.graph_version), ] for comparison, newg, refg in comparisons: if newg != refg: self.fail( "Reference %s and %s created and saved to disk are not equal" % (comparison, comparison)) def test_available_skims(self): self.test_set_graph() if self.graph.available_skims() != ["distance"]: self.fail("Skim availability with problems") def test_exclude_links(self): p = Project() p.load(siouxfalls_project) p.network.build_graphs() g = p.network.graphs['c'] # type: Graph # excludes a link before any setting or preparation g.exclude_links([12]) g.set_graph('distance') r1 = PathResults() r1.prepare(g) r1.compute_path(1, 14) self.assertEqual(list(r1.path), [2, 6, 10, 34]) # We exclude one link that we know was part of the last shortest path g.exclude_links([10]) r2 = PathResults() r2.prepare(g) r2.compute_path(1, 14) self.assertEqual(list(r2.path), [2, 7, 36, 34]) p.conn.close()
class TestAllOrNothing(TestCase): def setUp(self) -> None: self.mat_name = AequilibraeMatrix().random_name() self.g = Graph() self.g.load_from_disk(test_graph) self.g.set_graph(cost_field="distance") # Creates the matrix for assignment args = { "file_name": os.path.join(gettempdir(), self.mat_name), "zones": self.g.num_zones, "matrix_names": ["cars", "trucks"], "index_names": ["my indices"], } matrix = AequilibraeMatrix() matrix.create_empty(**args) matrix.index[:] = self.g.centroids[:] matrix.cars.fill(1.1) matrix.trucks.fill(2.2) # Exports matrix to OMX in order to have two matrices to work with matrix.export(os.path.join(gettempdir(), "my_matrix.omx")) matrix.close() def test_skimming_on_assignment(self): matrix = AequilibraeMatrix() matrix.load(os.path.join(gettempdir(), self.mat_name)) matrix.computational_view(["cars"]) res = AssignmentResults() res.prepare(self.g, matrix) self.g.set_skimming([]) self.g.set_blocked_centroid_flows(True) assig = allOrNothing(matrix, self.g, res) assig.execute() if res.skims.distance.sum() > 0: self.fail( "skimming for nothing during assignment returned something different than zero" ) self.g.set_skimming("distance") res.prepare(self.g, matrix) assig = allOrNothing(matrix, self.g, res) assig.execute() if res.skims.distance.sum() != 2914644.0: self.fail("skimming during assignment returned the wrong value") matrix.close() def test_execute(self): # Loads and prepares the graph car_loads = [] two_class_loads = [] for extension in ["omx", "aem"]: matrix = AequilibraeMatrix() if extension == 'omx': mat_name = os.path.join(gettempdir(), "my_matrix." + extension) else: mat_name = self.mat_name matrix.load(mat_name) matrix.computational_view(["cars"]) # Performs assignment res = AssignmentResults() res.prepare(self.g, matrix) assig = allOrNothing(matrix, self.g, res) assig.execute() car_loads.append(res.link_loads) res.save_to_disk( os.path.join(gettempdir(), "link_loads_{}.aed".format(extension))) res.save_to_disk( os.path.join(gettempdir(), "link_loads_{}.csv".format(extension))) matrix.computational_view() # Performs assignment res = AssignmentResults() res.prepare(self.g, matrix) assig = allOrNothing(matrix, self.g, res) assig.execute() two_class_loads.append(res.link_loads) res.save_to_disk( os.path.join(gettempdir(), "link_loads_2_classes_{}.aed".format(extension))) res.save_to_disk( os.path.join(gettempdir(), "link_loads_2_classes_{}.csv".format(extension))) matrix.close() load_diff = two_class_loads[0] - two_class_loads[1] if load_diff.max() > 0.0000000001 or load_diff.max() < -0.0000000001: self.fail( "Loads for two classes differ for OMX and AEM matrix types") load_diff = car_loads[0] - car_loads[1] if load_diff.max() > 0.0000000001 or load_diff.max() < -0.0000000001: self.fail( "Loads for a single class differ for OMX and AEM matrix types")
class TestGraph(TestCase): def test_create_from_geography(self): self.graph = Graph() self.graph.create_from_geography( test_network, "link_id", "dir", "distance", centroids=centroids, skim_fields=[], anode="A_NODE", bnode="B_NODE", ) self.graph.set_graph(cost_field="distance") self.graph.set_blocked_centroid_flows(block_centroid_flows=True) self.graph.set_skimming("distance") def test_prepare_graph(self): self.test_create_from_geography() self.graph.prepare_graph(centroids) reference_graph = Graph() reference_graph.load_from_disk(test_graph) if not np.array_equal(self.graph.graph, reference_graph.graph): self.fail("Reference graph and newly-prepared graph are not equal") def test_set_graph(self): self.test_prepare_graph() self.graph.set_graph(cost_field="distance") self.graph.set_blocked_centroid_flows(block_centroid_flows=True) if self.graph.num_zones != centroids.shape[0]: self.fail("Number of centroids not properly set") if self.graph.num_links != 222: self.fail("Number of links not properly set") if self.graph.num_nodes != 93: self.fail("Number of nodes not properly set - " + str(self.graph.num_nodes)) def test_save_to_disk(self): self.test_create_from_geography() self.graph.save_to_disk(join(path_test, "aequilibrae_test_graph.aeg")) self.graph_id = self.graph.__id__ self.graph_version = self.graph.__version__ def test_load_from_disk(self): self.test_save_to_disk() reference_graph = Graph() reference_graph.load_from_disk(test_graph) new_graph = Graph() new_graph.load_from_disk(join(path_test, "aequilibrae_test_graph.aeg")) comparisons = [ ("Graph", new_graph.graph, reference_graph.graph), ("b_nodes", new_graph.b_node, reference_graph.b_node), ("Forward-Star", new_graph.fs, reference_graph.fs), ("cost", new_graph.cost, reference_graph.cost), ("centroids", new_graph.centroids, reference_graph.centroids), ("skims", new_graph.skims, reference_graph.skims), ("link ids", new_graph.ids, reference_graph.ids), ("Network", new_graph.network, reference_graph.network), ("All Nodes", new_graph.all_nodes, reference_graph.all_nodes), ("Nodes to indices", new_graph.nodes_to_indices, reference_graph.nodes_to_indices), ] for comparison, newg, refg in comparisons: if not np.array_equal(newg, refg): self.fail( "Reference %s and %s created and saved to disk are not equal" % (comparison, comparison)) comparisons = [ ("nodes", new_graph.num_nodes, reference_graph.num_nodes), ("links", new_graph.num_links, reference_graph.num_links), ("zones", new_graph.num_zones, reference_graph.num_zones), ("block through centroids", new_graph.block_centroid_flows, reference_graph.block_centroid_flows), ("Graph ID", new_graph.__id__, self.graph_id), ("Graph Version", new_graph.__version__, self.graph_version), ] for comparison, newg, refg in comparisons: if newg != refg: self.fail( "Reference %s and %s created and saved to disk are not equal" % (comparison, comparison)) def test_available_skims(self): self.test_set_graph() if self.graph.available_skims() != ["distance"]: self.fail("Skim availability with problems")
def build_graphs(self, fields: list = None, modes: list = None) -> None: """Builds graphs for all modes currently available in the model When called, it overwrites all graphs previously created and stored in the networks' dictionary of graphs Args: *fields* (:obj:`list`, optional): When working with very large graphs with large number of fields in the database, it may be useful to specify which fields to use *modes* (:obj:`list`, optional): When working with very large graphs with large number of fields in the database, it may be useful to generate only those we need To use the *fields* parameter, a minimalistic option is the following :: p = Project() p.open(nm) fields = ['distance'] p.network.build_graphs(fields, modes = ['c', 'w']) """ curr = self.conn.cursor() if fields is None: curr.execute("PRAGMA table_info(links);") field_names = curr.fetchall() ignore_fields = ["ogc_fid", "geometry"] all_fields = [ f[1] for f in field_names if f[1] not in ignore_fields ] else: fields.extend( ["link_id", "a_node", "b_node", "direction", "modes"]) all_fields = list(set(fields)) if modes is None: modes = curr.execute("select mode_id from modes;").fetchall() modes = [m[0] for m in modes] elif isinstance(modes, str): modes = [modes] sql = f"select {','.join(all_fields)} from links" df = pd.read_sql(sql, self.conn).fillna(value=np.nan) valid_fields = list(df.select_dtypes(np.number).columns) + ["modes"] curr.execute( "select node_id from nodes where is_centroid=1 order by node_id;") centroids = np.array([i[0] for i in curr.fetchall()], np.uint32) data = df[valid_fields] for m in modes: net = pd.DataFrame(data, copy=True) net.loc[~net.modes.str.contains(m), "b_node"] = net.loc[~net.modes.str.contains(m), "a_node"] g = Graph() g.mode = m g.network = net g.prepare_graph(centroids) g.set_blocked_centroid_flows(True) self.graphs[m] = g
class DesireLinesProcedure(WorkerThread): def __init__(self, parentThread, layer: str, id_field: int, matrix: AequilibraeMatrix, matrix_hash: dict, dl_type: str) -> None: WorkerThread.__init__(self, parentThread) self.layer = layer self.id_field = id_field self.matrix = matrix self.dl_type = dl_type self.error = None self.matrix_hash = matrix_hash self.report = [] self.logger = logging.getLogger('aequilibrae') self.nodes_to_indices = { matrix.index[x]: x for x in range(matrix.zones) } self.python_version = (8 * struct.calcsize("P")) if error: self.error = 'Scipy and/or Numpy not installed' self.report.append(self.error) self.procedure = "ASSIGNMENT" def doWork(self): if self.error is None: # In case we have only one class unnasigned = 0 classes = self.matrix.matrix_view.shape[2] layer = get_vector_layer_by_name(self.layer) idx = layer.dataProvider().fieldNameIndex(self.id_field) feature_count = layer.featureCount() self.desire_lines.emit(('job_size_dl', feature_count)) all_centroids = {} for P, feat in enumerate(layer.getFeatures()): geom = feat.geometry() if geom is not None: point = list(geom.centroid().asPoint()) centroid_id = feat.attributes()[idx] all_centroids[centroid_id] = point self.desire_lines.emit(('jobs_done_dl', P)) self.desire_lines.emit(('text_dl', "Loading Layer Features: " + str(P) + "/" + str(feature_count))) # Creating resulting layer EPSG_code = int(layer.crs().authid().split(":")[1]) desireline_layer = QgsVectorLayer( "LineString?crs=epsg:" + str(EPSG_code), self.dl_type, "memory") dlpr = desireline_layer.dataProvider() base_dl_fields = [ QgsField("link_id", QVariant.Int), QgsField("A_Node", QVariant.Int), QgsField("B_Node", QVariant.Int), QgsField("direct", QVariant.Int), QgsField("distance", QVariant.Double) ] if self.dl_type == "DesireLines": max_zone = self.matrix.index[:].max().astype(np.int64) + 1 items = [(i, j[0], j[1]) for i, j in all_centroids.items() if i < max_zone] coords = np.array(items) coord_index = np.zeros((max_zone, 2)) coord_index[coords[:, 0].astype(np.int64), 0] = coords[:, 1] coord_index[coords[:, 0].astype(np.int64), 1] = coords[:, 2] self.desire_lines.emit( ('text_dl', "Manipulating matrix indices")) zones = self.matrix.index[:].shape[0] a = np.array(self.matrix.index[:], np.int64) ij, ji = np.meshgrid(a, a, sparse=False, indexing='ij') ij = ij.flatten() ji = ji.flatten() arrays = [ij, ji] self.desire_lines.emit(('text_dl', "Collecting all matrices")) self.desire_lines.emit( ('job_size_dl', len(self.matrix.view_names))) total_mat = np.zeros((zones, zones), np.float64) for i, mat in enumerate(self.matrix.view_names): m = self.matrix.get_matrix(mat) total_mat += m arrays.append(m.flatten()) self.desire_lines.emit(('jobs_done_dl', i + 1)) # Eliminates the cells for which we don't have geography self.desire_lines.emit( ('text_dl', "Filtering zones with no geography available")) zones_with_no_geography = [ x for x in self.matrix.index[:] if x not in all_centroids ] if zones_with_no_geography: self.desire_lines.emit( ('job_size_dl', len(zones_with_no_geography))) for k, z in enumerate(zones_with_no_geography): i = self.matrix.matrix_hash[z] t = np.nansum(total_mat[i, :]) + np.nansum(total_mat[:, i]) unnasigned += t self.report.append( 'Zone {} does not have a corresponding centroid/zone. Total flow {}' .format(z, t)) total_mat[i, :] = 0 total_mat[:, i] = 0 self.desire_lines.emit(('jobs_done_dl', k + 1)) self.desire_lines.emit( ('text_dl', "Filtering down to OD pairs with flows")) field_names = [x for x in self.matrix.view_names] nonzero = np.nonzero(total_mat.flatten()) arrays = np.vstack(arrays).transpose() arrays = arrays[nonzero, :] arrays = arrays.reshape(arrays.shape[1], arrays.shape[2]) base_types = [(x, np.float64) for x in ['from', 'to']] base_types = base_types + [(x + '_AB', np.float64) for x in field_names] dtypes_ab = [(x, np.int64) for x in ['from', 'to']] + [(x + '_AB', float) for x in field_names] dtypes_ba = [(x, np.int64) for x in ['to', 'from']] + [(x + '_BA', float) for x in field_names] ab_mat = np.array(arrays[arrays[:, 0] > arrays[:, 1], :]) ba_mat = np.array(arrays[arrays[:, 0] < arrays[:, 1], :]) flows_ab = ab_mat.view(base_types) flows_ab = flows_ab.reshape(flows_ab.shape[:-1]) flows_ab = flows_ab.astype(dtypes_ab) flows_ba = ba_mat.view(base_types) flows_ba = flows_ba.reshape(flows_ba.shape[:-1]) flows_ba = flows_ba.astype(dtypes_ba) defaults1 = {x + '_AB': 0.0 for x in field_names} defaults = {x + '_BA': 0.0 for x in field_names} defaults = {**defaults, **defaults1} self.desire_lines.emit( ('text_dl', "Concatenating AB & BA flows")) flows = rfn.join_by(['from', 'to'], flows_ab, flows_ba, jointype='outer', defaults=defaults, usemask=True, asrecarray=True) flows = flows.filled() flows_ab = 0 flows_ba = 0 for f in flows.dtype.names[2:]: base_dl_fields.extend([QgsField(f, QVariant.Double)]) dlpr.addAttributes(base_dl_fields) desireline_layer.updateFields() self.desire_lines.emit(('text_dl', "Creating Desire Lines")) self.desire_lines.emit(('job_size_dl', flows.shape[0])) all_features = [] for i, rec in enumerate(flows): a_node = rec[0] b_node = rec[1] a_point = QgsPointXY(*all_centroids[a_node]) b_point = QgsPointXY(*all_centroids[b_node]) dist = QgsGeometry().fromPointXY(a_point).distance( QgsGeometry().fromPointXY(b_point)) feature = QgsFeature() feature.setGeometry( QgsGeometry.fromPolylineXY([a_point, b_point])) attrs = [i + 1, int(a_node), int(b_node), 0, dist] attrs.extend([float(x) for x in list(rec)[2:]]) feature.setAttributes(attrs) all_features.append(feature) self.desire_lines.emit(('jobs_done_dl', i)) if unnasigned > 0: self.report.append( 'Total non assigned flows (not counting intrazonals):' + str(unnasigned)) if flows.shape[0] > 1: a = dlpr.addFeatures(all_features) self.result_layer = desireline_layer else: self.report.append('Nothing to show') elif self.dl_type == "DelaunayLines": for f in self.matrix.view_names: base_dl_fields.extend([ QgsField(f + '_ab', QVariant.Double), QgsField(f + '_ba', QVariant.Double), QgsField(f + '_tot', QVariant.Double) ]) dlpr.addAttributes(base_dl_fields) desireline_layer.updateFields() self.desire_lines.emit( ('text_dl', "Building Delaunay dataset")) points = [] node_id_in_delaunay_results = {} i = 0 self.desire_lines.emit(('job_size_dl', len(all_centroids))) for k, v in all_centroids.items(): self.desire_lines.emit(('jobs_done_dl', i)) points.append(v) node_id_in_delaunay_results[i] = k i += 1 self.desire_lines.emit( ('text_dl', "Computing Delaunay Triangles")) tri = Delaunay(np.array(points)) # We process all the triangles to only get each edge once self.desire_lines.emit( ('text_dl', "Building Delaunay Network: Collecting Edges")) edges = [] if self.python_version == 32: all_edges = tri.vertices else: all_edges = tri.simplices self.desire_lines.emit(('job_size_dl', len(all_edges))) for j, triangle in enumerate(all_edges): self.desire_lines.emit(('jobs_done_dl', j)) links = list(itertools.combinations(triangle, 2)) for i in links: edges.append([min(i[0], i[1]), max(i[0], i[1])]) self.desire_lines.emit( ('text_dl', "Building Delaunay Network: Getting unique edges")) edges = OrderedDict((str(x), x) for x in edges).values() # Writing Delaunay layer self.desire_lines.emit( ('text_dl', "Building Delaunay Network: Assembling Layer")) desireline_link_id = 1 data = [] dl_ids_on_links = {} self.desire_lines.emit(('job_size_dl', len(edges))) for j, edge in enumerate(edges): self.desire_lines.emit(('jobs_done_dl', j)) a_node = node_id_in_delaunay_results[edge[0]] a_point = all_centroids[a_node] a_point = QgsPointXY(a_point[0], a_point[1]) b_node = node_id_in_delaunay_results[edge[1]] b_point = all_centroids[b_node] b_point = QgsPointXY(b_point[0], b_point[1]) dist = QgsGeometry().fromPointXY(a_point).distance( QgsGeometry().fromPointXY(b_point)) line = [] line.append(desireline_link_id) line.append(a_node) line.append(b_node) line.append(dist) line.append(dist) line.append(0) data.append(line) dl_ids_on_links[desireline_link_id] = [ a_node, b_node, 0, dist ] desireline_link_id += 1 self.desire_lines.emit(('text_dl', "Building graph")) network = np.asarray(data) del data # types for the network self.graph = Graph() itype = self.graph.default_types('int') ftype = self.graph.default_types('float') all_types = [itype, itype, itype, ftype, ftype, np.int8] all_titles = [ 'link_id', 'a_node', 'b_node', 'distance_ab', 'distance_ba', 'direction' ] dt = [(t, d) for t, d in zip(all_titles, all_types)] self.graph.network = np.zeros(network.shape[0], dtype=dt) for k, t in enumerate(dt): self.graph.network[t[0]] = network[:, k].astype(t[1]) del network self.graph.type_loaded = 'NETWORK' self.graph.status = 'OK' self.graph.network_ok = True self.graph.prepare_graph(self.matrix.index.astype(np.int64)) self.graph.set_graph(cost_field="distance") self.graph.set_blocked_centroid_flows(False) self.results = AssignmentResults() self.results.prepare(self.graph, self.matrix) self.desire_lines.emit(('text_dl', "Assigning demand")) self.desire_lines.emit( ('job_size_dl', self.matrix.index.shape[0])) assigner = allOrNothing(self.matrix, self.graph, self.results) assigner.execute() self.report = assigner.report print(self.results.link_loads) self.desire_lines.emit(('text_dl', "Collecting results")) self.desire_lines.emit(('text_dl', "Building resulting layer")) features = [] max_edges = len(edges) self.desire_lines.emit(('job_size_dl', max_edges)) link_loads = self.results.get_load_results() for i, link_id in enumerate(link_loads.index): self.desire_lines.emit(('jobs_done_dl', i)) a_node, b_node, direct, dist = dl_ids_on_links[link_id] attr = [int(link_id), a_node, b_node, direct, dist] a_point = all_centroids[a_node] a_point = QgsPointXY(a_point[0], a_point[1]) b_point = all_centroids[b_node] b_point = QgsPointXY(b_point[0], b_point[1]) feature = QgsFeature() feature.setGeometry( QgsGeometry.fromPolylineXY([a_point, b_point])) for c in self.matrix.view_names: attr.extend([ float(link_loads.data[c + '_ab'][i]), float(link_loads.data[c + '_ba'][i]), float(link_loads.data[c + '_tot'][i]) ]) feature.setAttributes(attr) features.append(feature) a = dlpr.addFeatures(features) self.result_layer = desireline_layer self.desire_lines.emit(('finished_desire_lines_procedure', 0))