def init_dep_graph(strokes_topology):
    dep_graph = DependencyGraph()
    dep_graph.sketch_versions_reference[0] = []
    dep_graph.sketch_versions_scores[0] = []
    for s_id, s in enumerate(strokes_topology):
        if s["primitive_type"] != 0 or not s["depth_assigned"]:
            continue
        is_curve = False
        before_edges = []
        after_edges = []
        dep_node = DependencyNode(stroke_id=s_id,
                                  in_edges=before_edges,
                                  out_edges=after_edges,
                                  is_curve=is_curve)
        if s["depth_assigned"]:
            #print(s)
            dep_node.is_assigned = True
            candidate = CandidateNode(candidate_nb=0)
            candidate.geometry = s["stroke_3d"]
            candidate.sketch_versions = [0]
            dep_node.insert_candidate_node(candidate)
            dep_graph.sketch_versions_reference[0].append([0, [], ScoreContainer()])
            dep_graph.stroke_lengths_3d[(len(dep_graph.dependency_nodes), 0)] = tools_3d.line_3d_length(
                candidate.geometry)
        dep_graph.dependency_nodes.append(dep_node)

    scores = [score_container.total_score for (_, _, score_container) in
              dep_graph.sketch_versions_reference[0]]
    dep_graph.sketch_versions_scores[0] = np.sum(scores)
    dep_graph.stroke_id_to_dep_node_id = np.zeros(len(strokes_topology), dtype=np.int)
    for dep_node_id in range(len(dep_graph.dependency_nodes)):
        dep_graph.stroke_id_to_dep_node_id[dep_graph.dependency_nodes[dep_node_id].stroke_id] = dep_node_id

    return dep_graph
Exemplo n.º 2
0
def filter_out_foreshortened_lines(candidate_curves, intersections_sets,
                                   dep_graph, s_id, strokes_topology,
                                   median_length_2d, bbox):
    delete_indices = []
    local_candidate_lines = deepcopy(candidate_curves)
    local_intersections_sets = deepcopy(intersections_sets)
    for cand_line_id, cand_line in enumerate(candidate_curves):
        #print(cand_line.geometry < bbox[:3])
        #print(np.any(cand_line.geometry < bbox[:3]))
        #print(np.any(cand_line.geometry < bbox[:3]) or np.any(cand_line.geometry > bbox[3:]))
        if tools_3d.line_3d_length(cand_line.geometry) / dep_graph.median_length_3d > \
          2.0 * strokes_topology[s_id]["linestring"].length / median_length_2d or \
          np.any(cand_line.geometry < bbox[:3]) or np.any(cand_line.geometry > bbox[3:]):
            delete_indices.append(cand_line_id)
        #if np.any(cand_line.geometry < bbox[:3]) or np.any(cand_line.geometry > bbox[3:]):
        #	delete_indices.append(cand_line_id)
    if len(delete_indices) == len(candidate_curves):
        return local_candidate_lines, local_intersections_sets
    for del_id in sorted(delete_indices, reverse=True):
        del local_candidate_lines[del_id]
        del local_intersections_sets[del_id]
    return local_candidate_lines, local_intersections_sets
Exemplo n.º 3
0
def get_candidate_curves(s_id, intersections_3d, strokes_topology, camera,
                         bbox_diag):

    candidate_curves = []
    # for each candidate line, include at least the intersections which were used
    # to create the candidate_line
    creation_intersections = []
    intersection_normals = [[] for i in range(len(intersections_3d))]

    #print("s_id: ", s_id)
    #start_time = time.clock()
    for inter_id, inter in enumerate(intersections_3d):
        if inter.coords_3d is None:
            continue
        # include the three major planes
        for i in range(3):
            cand_curve = tools_3d.Curve3D()
            plane_point = inter.coords_3d
            plane_normal = np.zeros(3)
            plane_normal[i] = 1.0
            #geometry = camera.lift_polyline_to_plane(strokes_topology[s_id]["stroke"],
            #										 plane_point, plane_normal)
            #print("geometry comparison")
            #print(np.array(geometry))
            intersection_normals[inter_id].append(plane_normal)
            geometry = camera.lift_polyline_to_plane_vectorized(
                strokes_topology[s_id]["stroke"], plane_point, plane_normal)
            #print(geometry)
            #sys.exit()
            #if len(geometry) == 0:
            #	continue
            cand_curve.geometry = geometry
            cand_curve.plane_point = plane_point
            cand_curve.plane_normal = plane_normal
            candidate_curves.append(cand_curve)
            creation_intersections.append([inter_id])

        if strokes_topology[s_id]["is_ellipse"]:
            continue
        curr_s_id = np.argwhere(
            np.array(inter.stroke_ids) == s_id).flatten()[0]
        other_s = strokes_topology[inter.stroke_ids[1 - curr_s_id]]
        if other_s["primitive_type"] == 0:
            # add scaffold planes
            for plane in other_s["planes"]:
                plane_point = plane["plane_point"]
                plane_normal = plane["plane_normal"]
                if np.isclose(np.linalg.norm(plane_normal), 0.0):
                    continue

                used_normals = np.array(intersection_normals[inter_id])
                if len(used_normals) > 0:
                    used_dot = 1.0 - np.abs(np.dot(used_normals, plane_normal))
                    if np.any(used_dot < np.deg2rad(0.1) / np.pi):
                        continue
                intersection_normals[inter_id].append(plane_normal)

                geometry = camera.lift_polyline_to_plane_vectorized(
                    strokes_topology[s_id]["stroke"], plane_point,
                    plane_normal)
                if len(geometry) == 0:
                    continue
                cand_curve = tools_3d.Curve3D()
                cand_curve.geometry = geometry
                cand_curve.plane_point = plane_point
                cand_curve.plane_normal = plane_normal
                candidate_curves.append(cand_curve)
                creation_intersections.append([inter_id])

    #print("len(candidate_curves)")
    #print(len(candidate_curves))
    # finally, add planes formed by triplets of intersections
    for comb in distinct_combinations(range(len(intersections_3d)), 3):
        if strokes_topology[s_id]["is_ellipse"]:
            continue
        if intersections_3d[comb[0]].coords_3d is None or \
          intersections_3d[comb[1]].coords_3d is None or \
          intersections_3d[comb[2]].coords_3d is None:
            continue
        plane_point = np.array(intersections_3d[comb[0]].coords_3d)
        vec_1 = np.array(intersections_3d[comb[1]].coords_3d) - plane_point
        if np.isclose(np.linalg.norm(vec_1), 0.0):
            continue
        vec_1 /= np.linalg.norm(vec_1)
        vec_2 = np.array(intersections_3d[comb[2]].coords_3d) - plane_point
        if np.isclose(np.linalg.norm(vec_2), 0.0):
            continue
        vec_2 /= np.linalg.norm(vec_2)
        plane_normal = np.cross(vec_1, vec_2)
        if np.isclose(np.linalg.norm(plane_normal), 0.0):
            continue
        plane_normal /= np.linalg.norm(plane_normal)

        # check if similar normal already used by one of the 3 intersections
        normal_already_used = False
        for i in comb:
            used_normals = np.array(intersection_normals[i])
            if len(used_normals) > 0:
                used_dot = 1.0 - np.abs(np.dot(used_normals, plane_normal))
                if np.any(used_dot < np.deg2rad(0.1) / np.pi):
                    normal_already_used = True
                    break
        if normal_already_used:
            continue
        else:
            for i in comb:
                intersection_normals[i].append(plane_normal)

        geometry = camera.lift_polyline_to_plane_vectorized(
            strokes_topology[s_id]["stroke"], plane_point, plane_normal)
        if len(geometry) == 0:
            continue
        cand_curve = tools_3d.Curve3D()
        cand_curve.geometry = geometry
        cand_curve.plane_point = plane_point
        cand_curve.plane_normal = plane_normal
        candidate_curves.append(cand_curve)
        creation_intersections.append([comb[0], comb[1], comb[2]])

    #print(len(candidate_curves))
    cand_curve_bbox = [
        tools.bbox_3d_single_stroke(cand_curve.geometry)
        for cand_curve in candidate_curves
    ]
    #print("collect_planes time: " + str(
    #	(time.clock() - start_time) / 60.0) + " min")
    #for cand_curve in candidate_curves:
    #	if np.all(np.isclose(cand_curve.plane_normal, 0.0)):
    #		print("cand_curve.plane_normal")
    #		print(cand_curve.plane_normal)
    # get intersection sets for all candidate lines: all intersections which are
    # within 0.1*length(cand_line)
    #start_time = time.clock()
    intersection_sets = []
    empty_intersection_sets = []
    for cand_curve_id, cand_curve in enumerate(candidate_curves):
        intersection_set = creation_intersections[cand_curve_id]
        line_length = tools_3d.line_3d_length(cand_curve.geometry)
        merge_dist = min(0.02 * bbox_diag, 0.1 * line_length)
        #merge_dist = min(0.005 * bbox_diag, 0.05 * line_length)
        for inter_3d_id, inter_3d in enumerate(intersections_3d):
            if inter_3d_id in intersection_set or inter_3d.coords_3d is None:
                continue
            if tools_3d.distance_to_bbox(
                    inter_3d.coords_3d,
                    cand_curve_bbox[cand_curve_id]) > merge_dist:
                continue
            #dist_old = tools_3d.distance_point_to_polyline(inter_3d.coords_3d,
            #										   cand_curve.geometry)

            dist = tools_3d.distance_point_to_polyline_vectorized(
                inter_3d.coords_3d, cand_curve.geometry)
            if np.isclose(dist, -1.0):
                continue
            #dist = tools_3d.distance_point_to_polyline(inter_3d.coords_3d,
            #										   cand_curve.geometry)
            #sys.exit()

            if dist < merge_dist:
                #if dist < 0.1 * line_length:
                #if dist < 0.02 * bbox_diag and dist < 0.1 * line_length:
                intersection_set.append(inter_3d_id)
        if len(intersection_set) > 0:
            intersection_sets.append(intersection_set)
        else:
            empty_intersection_sets.append(cand_curve_id)
    # remove empty candidate lines
    for del_id in sorted(empty_intersection_sets, reverse=True):
        del candidate_curves[del_id]

    #print("collect_intersections time: " + str(
    #	(time.clock() - start_time) / 60.0) + " min")

    #start_time = time.clock()
    clustered_candidate_curves, clustered_intersection_sets = \
     cluster_candidate_curves_v2(candidate_curves, intersection_sets, dep_node_id=s_id,
            intersections_3d=intersections_3d)
    #print("clustering time: " + str(
    #	(time.clock() - start_time) / 60.0) + " min")

    #for cand_curve in clustered_candidate_curves:
    #	if np.all(np.isclose(cand_curve.plane_normal, 0.0)):
    #		print("clustered_cand_curve.plane_normal")
    #		print(cand_curve.plane_normal)
    return clustered_candidate_curves, clustered_intersection_sets
Exemplo n.º 4
0
def merge_over_contexts_straight_lines(all_context_lines,
                                       bbox_diag,
                                       dep_node_id=-1):
    for c in all_context_lines:
        if len(c[-1]) == 0:
            print("beginning: EMPTY_VERSION SET")
    merged_context_lines = []
    clustered_geom_lines = []
    for context_line_id, context_line in enumerate(all_context_lines):
        found_cluster = False
        for cluster_id, cluster in enumerate(clustered_geom_lines):
            for geom_line_id in cluster:
                shorter_length = min(
                    tools_3d.line_3d_length(
                        all_context_lines[geom_line_id][0].geometry),
                    tools_3d.line_3d_length(
                        all_context_lines[context_line_id][0].geometry))
                merge_dist = min(0.005 * bbox_diag, 0.05 * shorter_length)
                merge_dist = min(0.02 * bbox_diag, 0.1 * shorter_length)
                if (np.linalg.norm(all_context_lines[geom_line_id][0].geometry[0] -
                     all_context_lines[context_line_id][0].geometry[
                      0]) < merge_dist) and \
                 (np.linalg.norm(all_context_lines[geom_line_id][0].geometry[-1] -
                     all_context_lines[context_line_id][0].geometry[
                      -1]) < merge_dist):#) or \
                    #((np.linalg.norm(all_context_lines[geom_line_id][0].geometry[0] -
                    #				 all_context_lines[context_line_id][0].geometry[
                    #					 -1]) < 0.1 * shorter_length) and
                    # (np.linalg.norm(
                    #	 all_context_lines[geom_line_id][0].geometry[-1] -
                    #	 all_context_lines[context_line_id][0].geometry[
                    #		 0]) < 0.1 * shorter_length)):
                    plane_normal_1 = all_context_lines[geom_line_id][
                        0].plane_normal
                    plane_normal_2 = all_context_lines[context_line_id][
                        0].plane_normal
                    if np.isclose(np.linalg.norm(plane_normal_1), 0.0) or \
                      np.isclose(np.linalg.norm(plane_normal_2), 0.0):
                        continue
                    plane_normal_1 /= np.linalg.norm(plane_normal_1)
                    plane_normal_2 /= np.linalg.norm(plane_normal_2)
                    if 1.0 - np.abs(
                            np.dot(plane_normal_1,
                                   plane_normal_2)) < np.deg2rad(5) / np.pi:
                        found_cluster = True
                        clustered_geom_lines[cluster_id].append(
                            context_line_id)
                        break
            if found_cluster:
                break
        if not found_cluster:
            clustered_geom_lines.append([context_line_id])

    #print("len(clustered_geom_lines)")
    #print(len(clustered_geom_lines))
    for cluster_id, cluster in enumerate(clustered_geom_lines):
        #print("len(cluster): ", len(cluster))
        new_cluster = []
        clustered_curves = [
            all_context_lines[geom_line_id][0] for geom_line_id in cluster
        ]
        #clustered_intersections = []
        #for inter_set in [all_context_lines[geom_line_id][1] for geom_line_id in cluster]:
        #	for inter in inter_set:
        #		clustered_intersections.append(inter)
        #geom = tools_3d.merge_n_curves(
        #	[all_context_lines[geom_line_id][0] for geom_line_id in cluster])
        #if dep_node_id == 42:
        #	geom = tools_3d.merge_n_curves(
        #			clustered_curves, VERBOSE=True, intersections=clustered_intersections)
        #else:
        geom = tools_3d.merge_n_curves(clustered_curves)
        new_cluster.append(geom)
        new_cluster.append(
            [all_context_lines[geom_line_id][1] for geom_line_id in cluster])
        new_cluster.append(
            [all_context_lines[geom_line_id][2] for geom_line_id in cluster])
        merged_context_lines.append(new_cluster)

    # cluster identical sketch_version_sets
    for context_lined_id, (_, inter_sets,
                           version_sets) in enumerate(merged_context_lines):
        version_set_clusters = []
        inter_set_clusters = []
        cluster_ids = []
        for version_set_id, version_set in enumerate(version_sets):
            if len(version_set) == 0:
                print("EMPTY VERSION SET!")
            found_cluster = False
            for version_set_cluster_id, version_set_cluster in enumerate(
                    version_set_clusters):
                if len(version_set_cluster) == len(version_set) and \
                  np.sum(
                   np.in1d(version_set_cluster, version_set)) == len(
                 version_set):
                    found_cluster = True
                    cluster_ids[version_set_cluster_id].append(version_set_id)
                    inter_sets[version_set_cluster_id] += inter_sets[
                        version_set_id]
                if found_cluster:
                    break
            if not found_cluster:
                version_set_clusters.append(version_set)
                inter_set_clusters.append(inter_sets[version_set_id])
                cluster_ids.append([version_set_id])

            # kick out identical intersections
            for inter_set_cluster_id, inter_set_cluster in enumerate(
                    inter_set_clusters):
                inter_ids = [inter.inter_id for inter in inter_set_cluster]
                u, inter_ids_unique = np.unique(inter_ids, return_index=True)
                inter_set_clusters[inter_set_cluster_id] = [
                    inter_set_cluster[inter_id]
                    for inter_id in inter_ids_unique
                ]
            merged_context_lines[context_lined_id][1] = inter_set_clusters
            merged_context_lines[context_lined_id][2] = version_set_clusters
            if len(version_set_clusters) == 0:
                print("EMPTY VERSION CLUSTER!")

    # merged_context_lines = []
    return merged_context_lines
    def update_dep_graph(self, merged_context_lines, dep_node_id):
        versions_used_by_stroke = []
        # go through all versions of all context_lines
        # if a version has already been used used by a previous context line,
        # create a new sketch_version

        # first: duplicate data when new branching occurs
        for context_line in merged_context_lines:
            for version_set_id, version_set in enumerate(context_line[2]):
                #print(version_set)
                for version in version_set:
                    if version in versions_used_by_stroke:
                        # create new version
                        self.sketch_version_counter += 1
                        self.sketch_versions_reference[self.sketch_version_counter] = \
                            deepcopy(self.sketch_versions_reference[version])
                        # update sketch_versions of candidate_nodes
                        for dep_node_id_tmp, (cand_id, _, _) in \
                                enumerate(self.sketch_versions_reference[version]):
                            self.dependency_nodes[dep_node_id_tmp].\
                                candidate_nodes[cand_id].sketch_versions.\
                                append(self.sketch_version_counter)
                        versions_used_by_stroke.append(self.sketch_version_counter)
                    else:
                        versions_used_by_stroke.append(version)

        #print("versions_used_by_stroke")
        #print(versions_used_by_stroke)
        # next: update data
        version_counter = 0
        for context_line_id, context_line in enumerate(merged_context_lines):
            versions_used_by_context_line = []
            for version_set_id, version_set in enumerate(context_line[2]):
                inter_set = context_line[1][version_set_id]
                for old_version in version_set:
                    version = versions_used_by_stroke[version_counter]
                    versions_used_by_context_line.append(version)
                    version_counter += 1
                    # update self.sketch_versions_reference
                    self.sketch_versions_reference[version].append([context_line_id, deepcopy(inter_set), ScoreContainer()])
                    # add intersections to prior strokes
                    for inter in inter_set:
                        prev_stroke_id = inter.stroke_ids[1 - np.argwhere(np.array(inter.stroke_ids) == self.dependency_nodes[dep_node_id].stroke_id).flatten()[0]]
                        self.sketch_versions_reference[version][self.stroke_id_to_dep_node_id[prev_stroke_id]][1].append(inter)
            cand = CandidateNode(candidate_nb=context_line_id)
            cand.geometry = context_line[0].geometry
            cand.plane_point = context_line[0].plane_point
            cand.plane_normal = context_line[0].plane_normal

            if (not self.dependency_nodes[dep_node_id].is_curve) and \
                    self.dependency_nodes[dep_node_id].axis_label < 3:
                # add axis-alignment score to candidate line
                cand.axis_alignment = tools_3d.compute_axis_alignment(cand.geometry,
                                                                      self.dependency_nodes[dep_node_id].axis_label)

            cand.sketch_versions = versions_used_by_context_line
            self.dependency_nodes[dep_node_id].insert_candidate_node(cand)
            self.stroke_lengths_3d[(dep_node_id, cand.candidate_nb)] = tools_3d.line_3d_length(cand.geometry)

        if len(list(self.dependency_nodes[dep_node_id].candidate_nodes.keys())) == 1:
            self.dependency_nodes[dep_node_id].is_assigned = True
    def display_sketch_versions(self):
        # remove old displays
        ps.remove_all_structures()
        #for i in self.sketch_version_counter:
        # display in total_score order
        sketch_versions = self.get_n_best_sketch_versions(len(self.sketch_versions_reference.keys()))
        #sketch_versions = list(self.sketch_versions_reference.keys())[:10]
        #sketch_versions = self.get_n_best_sketch_versions(1)
        for sketch_version_id, sketch_version in enumerate(sketch_versions):
            # collect 3D lines
            lines = []
            scores = []
            line_coverages = []
            axis_alignments = []
            orthogonalities = []
            tangentialities = []
            planarities = []
            foreshortenings = []
            curve_geoms = []
            circularities = []
            is_assigned = []
            dep_node_ids = []

            for dep_node_id, (cand_id, _, score_container) in enumerate(self.sketch_versions_reference[sketch_version]):
                if len(self.dependency_nodes[dep_node_id].candidate_nodes) == 0:
                    continue
                dep_node_ids.append(dep_node_id)
                lines.append(self.dependency_nodes[dep_node_id].candidate_nodes[cand_id].geometry)
                #if dep_node_id == 26:
                #    print("dep_node_id: ", dep_node_id)
                #    print(lines[-1])
                scores.append(score_container.total_score)
                line_coverages.append(score_container.line_coverage)
                axis_alignments.append(score_container.axis_alignment)
                orthogonalities.append(score_container.orthogonality)
                tangentialities.append(score_container.tangentiality)
                planarities.append(score_container.planarity)
                foreshortenings.append(score_container.foreshortening)
                curve_geoms.append(score_container.curve_geom)
                circularities.append(score_container.circularity)
                is_assigned.append(self.dependency_nodes[dep_node_id].is_assigned)
            nodes = []
            edge_counter = 0
            edges = []
            enabled = False
            if sketch_version_id == 0:
                enabled = True

            line_ids = []
            for line_id, line in enumerate(lines):
                for p in line:
                    nodes.append(p)
                for p_id in range(len(line)-1):
                    edges.append([edge_counter, edge_counter+1])
                    line_ids.append(line_id)
                    edge_counter += 1
                edge_counter += 1

                #edge_counter += 1
                #nodes.append(line[0])
                #nodes.append(line[-1])
                #edges.append([edge_counter, edge_counter+1])
                #edge_counter += 2
            sketch_3d = ps.register_curve_network("sketch_version: "+str(sketch_version),
                                                  nodes=np.array(nodes), edges=np.array(edges),
                                                  enabled=enabled)
            sketch_3d.add_scalar_quantity("dep_node_ids", np.array(dep_node_ids)[line_ids], defined_on="edges",
                                          enabled=False, cmap="reds")
            sketch_3d.add_scalar_quantity("line_coverage", np.array(line_coverages)[line_ids], defined_on="edges",
                                          enabled=True, cmap="jet", vminmax=(0., 1.))
            sketch_3d.add_scalar_quantity("axis_alignment", np.array(axis_alignments)[line_ids], defined_on="edges",
                                          enabled=True, cmap="jet", vminmax=(0., 1.))
            sketch_3d.add_scalar_quantity("orthogonality", np.array(orthogonalities)[line_ids], defined_on="edges",
                                          enabled=True, cmap="jet", vminmax=(0., 1.))
            sketch_3d.add_scalar_quantity("tangentiality", np.array(tangentialities)[line_ids], defined_on="edges",
                                          enabled=True, cmap="jet", vminmax=(0., 1.))
            sketch_3d.add_scalar_quantity("planarity", np.array(planarities)[line_ids], defined_on="edges",
                                          enabled=True, cmap="jet", vminmax=(0., 1.))
            sketch_3d.add_scalar_quantity("foreshortening", np.array(foreshortenings)[line_ids], defined_on="edges",
                                          enabled=True, cmap="jet", vminmax=(0., 1.))
            sketch_3d.add_scalar_quantity("curve_geom", np.array(curve_geoms)[line_ids], defined_on="edges",
                                          enabled=True, cmap="jet", vminmax=(0., 1.))
            sketch_3d.add_scalar_quantity("circularity", np.array(circularities)[line_ids], defined_on="edges",
                                          enabled=True, cmap="jet", vminmax=(0., 1.))
            sketch_3d.add_scalar_quantity("is_assigned", np.array(is_assigned)[line_ids], defined_on="edges",
                                          enabled=True, cmap="jet", vminmax=(0., 1.))
            sketch_3d.add_scalar_quantity("total_score", np.array(scores)[line_ids], defined_on="edges",
                                          enabled=True, cmap="jet", vminmax=(0., 1.))
            #print("sketch_version: "+str(sketch_version))
            #print("score: "+str(np.sum(scores)))
            #print(scores[-1])

            # collect 3D intersections
            points = []
            angles = []
            dep_node_ids = []
            distances = []
            curve_length = []
            for dep_node_id, (line_id, inter_set, _) in enumerate(self.sketch_versions_reference[sketch_version]):
                line = self.dependency_nodes[dep_node_id].candidate_nodes[line_id]
                line_length = tools_3d.line_3d_length(line.geometry)
                for inter in inter_set:
                    points.append(inter.coords_3d)
                    angles.append(inter.tangents_angle_3d)
                    dep_node_ids.append(self.stroke_id_to_dep_node_id[inter.stroke_ids])
                    dist = tools_3d.distance_point_to_polyline_vectorized(inter.coords_3d,
                                                               line.geometry)
                    distances.append(dist)
                    curve_length.append(line_length)
            if len(points) > 0:
                inter_cloud = ps.register_point_cloud("sketch_version: "+str(sketch_version),
                                        points=np.array(points), enabled=enabled, radius=0.01)
                inter_cloud.add_scalar_quantity("tangents_angle_3d", np.array(angles),
                                                enabled=True, cmap="jet", vminmax=(0., 90.))
                inter_cloud.add_scalar_quantity("first_stroke", np.array(dep_node_ids)[:, 0])
                inter_cloud.add_scalar_quantity("snd_stroke", np.array(dep_node_ids)[:, 1])
                inter_cloud.add_scalar_quantity("distance", np.array(distances))
                inter_cloud.add_scalar_quantity("line_length", np.array(curve_length))