Ejemplo n.º 1
0
    def cluster(self):
        """
        Cluster a vector field.

        Notes
        -----
        It sets `self._clustered_field`, `self_labels`, `self.centers`, and `self.loss`.
        Returns `None`.
        """
        # convert list of vectors to an array
        X = np.array(self.vector_field.to_sequence())

        # perform the clustering
        r = self._cluster(X, self.seeds, self.distance_func, self.iters,
                          self.tol, False)
        labels, centers, losses = r

        # fetch assigned centroid to each entry in the vector field
        clusters = centers[labels]

        # create a new vector field
        clustered_field = VectorField()
        clustered_labels = {}

        for index, fkey in enumerate(self.vector_field.keys()):
            vector = clusters[index, :].tolist()
            clustered_field.add_vector(fkey, vector)
            clustered_labels[fkey] = labels[index]

        # store data into attributes
        self._clustered_field = clustered_field  # clustered vector field
        self._labels = clustered_labels  # face labels
        self._centers = {idx: center for idx, center in enumerate(centers)}
        self._loss = losses[-1]
Ejemplo n.º 2
0
def my_vector_field():
    """
    A vector field compatibale with the mesh above.
    """
    field = VectorField()

    field.add_vector(0, [0.0, 1.0, 2.0])
    field.add_vector(1, [1.0, 1.0, 1.0])

    return field
Ejemplo n.º 3
0
def vector_field(vectors):
    """
    A vector field with three vectors.
    """
    vf = VectorField()

    for key, vector in enumerate(vectors):
        vf.add_vector(key, vector)

    return vf
Ejemplo n.º 4
0
def vector_field():
    """
    A vector field with two vectors.
    """
    field = VectorField()

    field.add_vector(0, [0.0, 1.0, 2.0])
    field.add_vector(1, [1.0, 1.0, 1.0])

    return field
Ejemplo n.º 5
0
def vector_field():
    """
    """
    vectors = {0: [1.0, 0.0, 0.0], 1: [1.0, -1.0, 0.0], 2: [1.0, 1.0, 2.0]}

    v_field = VectorField()

    for key, vector in vectors.items():
        v_field.add_vector(key, vector)

    return v_field
def vectors_dict_to_array(vector_field, num_faces):
    """
    Converts a vector field into an array.

    Parameters
    ----------
    vector_field : `directional_clustering.fields.VectorField()`
        A vector field.
    num_faces : `int`
        The number of faces in a mesh.

    Returns
    -------
    vectors_array : `np.array` (n, )
        An array of vectors.
    """
    if type(vector_field) is not type(VectorField()):
        raise TypeError

    # convert vectors dictionary into a numpy array
    vectors_array = zeros((num_faces, 3))
    for fkey, vec in vector_field.items():
        vectors_array[fkey, :] = vec

    return vectors_array
Ejemplo n.º 7
0
def test_vector_field_from_sequence(vectors_3d):
    """
    Checks that vectors are added in the order of the sequence.
    """
    vector_field = VectorField.from_sequence(vectors_3d)
    assert vector_field.vector(0) == vectors_3d[0], vector_field.keys()
    assert vector_field.vector(1) == vectors_3d[1]
    assert vector_field.size() == len(vectors_3d)
Ejemplo n.º 8
0
def transformed_stress_vector_fields(mesh, vector_fields, stress_type,
                                     ref_vector):
    """
    Rescales a vector field based on a plane stress transformation.
    """
    vf1, vf2 = vector_fields

    # TODO: mapping is not robust! depends on naming convention
    stress_components = {
        "bending": {
            "names": ["mx", "my", "mxy"],
            "ps": "m_1"
        },
        "axial": {
            "names": ["nx", "ny", "nxy"],
            "ps": "n_1"
        }
    }

    stress_names = stress_components[stress_type]["names"]
    vf_ps = mesh.vector_field(stress_components[stress_type]["ps"])

    vf1_transf = VectorField()
    vf2_transf = VectorField()

    for fkey in mesh.faces():
        # query stress components
        sx, sy, sxy = mesh.face_attributes(fkey, names=stress_names)

        # generate principal stresses and angles
        s1a, s1 = principal_stresses_and_angles(sx, sy, sxy)
        s1, angle1 = s1a

        vector_ps = vf_ps[fkey]
        vector1 = vf1[fkey]
        vector2 = vf2[fkey]

        # compute delta between reference vector and principal bending vector
        # TODO: will take m1 as reference. does this always hold?

        delta = angle1 - angle_vectors(vector_ps, ref_vector)
        # add delta to angles of the vector field to transform
        theta = delta + angle_vectors(vector1, ref_vector)

        # transform stresses - this becomes the scale of the vectors
        s1, s2, _ = transformed_stresses(sx, sy, sxy, theta)

        vf1_transf.add_vector(fkey, scale_vector(normalize_vector(vector1),
                                                 s1))
        vf2_transf.add_vector(fkey, scale_vector(normalize_vector(vector2),
                                                 s2))

    return vf1_transf, vf2_transf
Ejemplo n.º 9
0
def comb_vector_field(vector_field, mesh):
    """
    Combs a vector field defined on a reference triangular mesh.

    Parameters
    ----------
    vector_field : `directional_clustering.fields.VectorField`
        A vector field.
    mesh : `directional_clustering.mesh.MeshPlus`
        The reference triaangular mesh.

    Notes
    -----
    This function uses numpy and libigl to comb a field.
    The mesh must be composed only by triangular faces.
    """
    # mesh information
    F = []
    for fkey in mesh.faces():
        face_indices = mesh.face_vertices(fkey)
        assert len(face_indices) == 3
        F.append(face_indices)

    VE = []
    for vkey in sorted(list(mesh.vertices())):
        VE.append(mesh.vertex_coordinates(vkey))

    # numpify mesh information
    F = np.reshape(np.array(F), (-1, 3))
    VE = np.reshape(np.array(VE), (-1, 3))

    VF = [vector_field[fkey] for fkey in mesh.faces()]
    VF = np.reshape(np.array(VF), (-1, 3))

    combed_vf = comb_line_field(VE, F, VF)
    vf = VectorField()
    for idx, fkey in enumerate(mesh.faces()):
        vf.add_vector(fkey, combed_vf[idx, :].tolist())

    return vf
    def cluster(self):
        """
        Cluster a vector field.

        Notes
        -----
        It sets `self._clustered_field`, `self_labels`, `self.centers`, and `self.loss`.
        Returns `None`.
        """
        # do clustering
        cluster_log = k_means(self._initial_clusters, self._faces, self.iters,
                              self.merge_split)

        # last chunk in the cluster log
        final_clusters = cluster_log.pop()

        # create a new vector field
        clustered_field = VectorField()
        clustered_labels = {}
        centers = {}

        # fill arrays with results
        # TODO: Refactor this block!
        loss = 0
        for i, cluster in final_clusters.items():
            centroid = cluster.proxy
            centers[i] = centroid

            loss += cluster.distortion

            for fkey in cluster.faces_keys:
                clustered_field.add_vector(fkey, centroid)
                clustered_labels[fkey] = cluster.id

        # assign arrays as attributes
        self._clustered_field = clustered_field
        self._labels = clustered_labels
        self._centers = centers
        self._loss = loss
Ejemplo n.º 11
0
    def vector_field(self, name, vector_field=None):
        """
        Gets or sets a vector field that lives on the mesh.

        Parameters
        -----------
        name  : `str`
            The name of the vector field to get or to set.
        vector_field  : `directional_clustering.fields.VectorField`, optional.
            The vector field to store. Defaults to `None`.

        Returns
        --------
        vector_field : `directional_clustering.fields.VectorField`.
            The fetched vector field if a `name` was input.

        Notes
        -----
        Vector fields are stored a face attributes of a mesh.
        Refer to `compas.datastructures.face_attribute()` for more details.
        """
        if vector_field is None:
            vector_field = VectorField()
            for fkey in self.faces():
                try:
                    vector = self.face_attribute(fkey, name)
                    if type(vector) is list:
                        vector_field.add_vector(fkey, vector)
                    else:
                        raise ValueError
                except ValueError:
                    return None        #the attribute doesn't exist or it's not a vectorfield
            return vector_field
        else:
            msg = "The vector field to add is incompatible with the mesh"
            assert vector_field.size() == self.number_of_faces(), msg
            for vkey in vector_field.keys():
                self.face_attribute(vkey, name, vector_field.vector(vkey))
Ejemplo n.º 12
0
def directional_clustering(
        filenames,
        vf_name,
        algo_name="cosine_kmeans",
        n_init=5,
        n_clusters_max=10,
        eps=0.02,  # loss value threshold
        iters=100,
        tol=1e-6,
        stop_early=False,
        comb_vectors=False,
        align_vectors=False,
        alignment_ref=[1.0, 0.0, 0.0],
        smooth_iters=0,
        damping=0.5,
        plot_loss=True,
        draw_colorbar=True,
        save_json=False,
        save_img=False,
        draw_faces=False):  # if false, it will draw dots
    """
    Clusters a vector field that has been defined on a mesh. Exports a JSON file.

    Parameters
    ----------
    filename : `str`
        The name of the JSON file that encodes a mesh.
        All JSON files must reside in this repo's data/json folder.
    """

    # ==========================================================================
    # Make a plot
    # ==========================================================================

    fig, ax = plt.subplots(figsize=(9, 6))

    # ==========================================================================
    # Set directory of input JSON files
    # ==========================================================================

    # Relative path to the JSON file stores the vector fields and the mesh info
    # The JSON files must be stored in the data/json_files folder

    if isinstance(filenames, str):
        filenames = [filenames]

    if isinstance(smooth_iters, int):
        smooth_iters = [smooth_iters] * len(filenames)

    for filename, smooth_iter in zip(filenames, smooth_iters):

        print("\nWorking now with: {}".format(filename))

        name_in = filename + ".json"
        json_in = os.path.abspath(os.path.join(JSON, name_in))

        # ==========================================================================
        # Import a mesh as an instance of MeshPlus
        # ==========================================================================

        mesh = MeshPlus.from_json(json_in)

        # ==========================================================================
        # Extract vector field from mesh for clustering
        # ==========================================================================

        vectors = mesh.vector_field(vf_name)
        vectors_raw = mesh.vector_field(vf_name)

        # ==========================================================================
        # Align vector field to a reference vector
        # ==========================================================================

        if align_vectors:
            align_vector_field(vectors, alignment_ref)

    # ==========================================================================
    # Comb the vector field -- remember the hair ball theorem (seams exist)
    # ==========================================================================

        if comb_vectors:
            vectors = comb_vector_field(vectors, mesh)

    # ==========================================================================
    # Apply smoothing to the vector field
    # ==========================================================================

        if smooth_iter:
            print(
                "Smoothing vector field for {} iterations".format(smooth_iter))
            smoothen_vector_field(vectors, mesh.face_adjacency(), smooth_iter,
                                  damping)

    # ==========================================================================
    # Do K-means Clustering
    # ==========================================================================

        errors = []
        reached = False

        for i in range(1, n_clusters_max + 1):
            print()
            print("Clustering started...")
            print("Generating {} clusters".format(i))

            # perform n different initializations because of random seeding
            error_best = np.inf
            clusterer_best = None

            for _ in range(n_init):

                # Create an instance of a clustering algorithm from ClusteringFactory
                clustering_algo = ClusteringFactory.create(algo_name)
                clusterer = clustering_algo(mesh, vectors, i, iters, tol)
                clusterer.cluster()
                clustered_field = clusterer.clustered_field

                # ==========================================================================
                # Compute "loss" of clustering
                # ==========================================================================

                field_errors = np.zeros(mesh.number_of_faces())
                for fkey in mesh.faces():
                    error = distance_cosine(clustered_field.vector(fkey),
                                            vectors_raw.vector(fkey))
                    field_errors[fkey] = error
                error = np.mean(field_errors)

                # pick best contender
                if error < error_best:
                    error_best = error
                    clusterer_best = clusterer

            print("Clustering ended!")
            print(
                "Clustered Field Mean Error (Cosine Distances) after {} init: {}"
                .format(n_init, error_best))

            # ==========================================================================
            # Store data
            # ==========================================================================

            # record best error after n initializations
            errors.append(error_best)

            # store results in clustered_field and labels
            clustered_field = clusterer_best.clustered_field
            labels = clusterer_best.labels

            # ==========================================================================
            # Store data
            # ==========================================================================

            # plot image
            if save_img:
                plot_mesh_clusters(mesh, labels, draw_faces, draw_colorbar,
                                   filename, save_img)

            if i < 2:
                continue

            # delta_error = (errors[-2] - errors[-1]) / errors[-1]
            # print("Delta error: {}".format(delta_error))

            if fabs(error_best) <= eps:
                print("Convergence threshold of {} reached".format(eps))

                if not reached:
                    k_best = i
                    k_best_error = error_best
                    clustered_field_best = clustered_field
                    labels_best = labels
                    reached = True

                if stop_early:
                    print("Stopping early...")
                    break

    # ==========================================================================
    # Plot errors
    # ==========================================================================

        if plot_loss:

            plot_label = r"\_".join(filename.split("_"))
            plot = ax.plot(errors, label=plot_label, zorder=1)
            c = plot[0].get_color()
            ax.scatter(k_best - 1,
                       k_best_error,
                       marker='D',
                       s=100,
                       color=c,
                       zorder=2)

    # ==========================================================================
    # Variable reassignment for convencience
    # ==========================================================================

        labels = labels_best
        clustered_field = clustered_field_best

        # ==========================================================================
        # Assign cluster labels to mesh
        # ==========================================================================

        mesh.cluster_labels("cluster", labels)

        # ==========================================================================
        # Generate field orthogonal to the clustered field
        # ==========================================================================

        # add perpendicular field tha preserves magnitude
        # assumes that vector field name has format foo_bar_1 or baz_2
        vf_name_parts = vf_name.split("_")
        for idx, entry in enumerate(vf_name_parts):
            # exits at the first entry
            if entry.isnumeric():
                dir_idx = idx
                direction = entry

        n_90 = 2
        if direction == n_90:
            n_90 = 1

        vf_name_parts[dir_idx] = str(n_90)
        vf_name_90 = "_".join(vf_name_parts)

        vectors_90 = mesh.vector_field(vf_name_90)
        clustered_field_90 = VectorField()

        for fkey, _ in clustered_field.items():
            cvec_90 = cross_vectors(clustered_field[fkey], [0, 0, 1])

            scale = length_vector(vectors_90[fkey])

            if dot_vectors(cvec_90, vectors_90[fkey]):
                scale *= -1.0

            cvec_90 = scale_vector(cvec_90, scale)
            clustered_field_90.add_vector(fkey, cvec_90)

    # ==========================================================================
    # Scale fields based on stress transformations
    # ==========================================================================

        args = [
            mesh, (clustered_field, clustered_field_90), "bending",
            [1.0, 0.0, 0.0]
        ]
        clustered_field, clustered_field_90 = transformed_stress_vector_fields(
            *args)

        # ==========================================================================
        # Assign clustered fields to mesh
        # ==========================================================================

        clustered_field_name = vf_name + "_k"
        mesh.vector_field(clustered_field_name, clustered_field)

        clustered_field_name_90 = vf_name_90 + "_k"
        mesh.vector_field(clustered_field_name_90, clustered_field_90)

        # ==========================================================================
        # Export new JSON file for further processing
        # ==========================================================================

        if save_json:
            name_out = "{}_k{}_{}_eps_{}_smooth_{}.json".format(
                filename, k_best, vf_name, eps, smooth_iter)
            json_out = os.path.abspath(
                os.path.join(JSON, "clustered", algo_name, name_out))
            mesh.to_json(json_out)
            print("Exported clustered vector field with mesh to: {}".format(
                json_out))

    # ==========================================================================
    # Customize plot
    # ==========================================================================

    if plot_loss:

        plt.title(r"Best number of clusters $\hat{k}$", size=30)
        ax.grid(b=None, which='major', axis='both', linestyle='--')

        max_clusters = n_clusters_max

        ax.set_xticks(ticks=list(range(0, max_clusters)))
        ax.set_xticklabels(labels=list(range(1, max_clusters + 1)))

        ax.set_xlabel(r"Number of Clusters $k$", size=25)
        ax.set_ylabel(r"Loss $\mathcal{L}$", size=25)

        ax.legend()

        # ==========================================================================
        # Save the plot
        # ==========================================================================

        dt = datetime.now().strftime("%Y_%m_%d_%H_%M_%S")
        img_name = "k_best" + "_" + dt + ".png"
        img_path = os.path.abspath(os.path.join(DATA, "images", img_name))
        plt.tight_layout()
        plt.savefig(img_path, bbox_inches='tight', pad_inches=0.1, dpi=600)
        print("Saved image to : {}".format(img_path))

        # ==========================================================================
        # Show the plot
        # ==========================================================================

        plt.show()
Ejemplo n.º 13
0
def directional_clustering(filename,
                           algo_name="cosine_kmeans",
                           n_clusters=4,
                           iters=100,
                           tol=1e-6,
                           comb_vectors=False,
                           align_vectors=False,
                           alignment_ref=[1.0, 0.0, 0.0],
                           smooth_iters=0,
                           damping=0.5,
                           stress_transf_ref=[1.0, 0.0, 0.0]):
    """
    Clusters a vector field that has been defined on a mesh. Exports a JSON file.

    Parameters
    ----------
    filename : `str`
        The name of the JSON file that encodes a mesh.
        All JSON files must reside in this repo's data/json folder.

    algo_name : `str`
        The name of the algorithm to cluster the vector field.
        \nSupported options are `cosine_kmeans` and `variational_kmeans`.

    n_clusters : `int`
        The number of clusters to generate.

    iters : `int`
        The number of iterations to run the clustering algorithm for.

    tol : `float`
        A small threshold value that marks clustering convergence.
        \nDefaults to 1e-6.

    align_vectors : `bool`
        Flag to align vectors relative to a reference vector.
        \nDefaults to False.

    alignment_ref : `list` of `float`
        The reference vector for alignment.
        \nDefaults to [1.0, 0.0, 0.0].

    smooth_iters : `int`
        The number iterations of Laplacian smoothing on the vector field.
        \nIf set to 0, no smoothing will take place.
        Defaults to 0.

    damping : `float`
        A value between 0.0 and 1.0 to control the intensity of the smoothing.
        \nZero technically means no smoothing. One means maximum smoothing.
        Defaults to 0.5.
    """

    # ==========================================================================
    # Set directory of input JSON files
    # ==========================================================================

    # Relative path to the JSON file stores the vector fields and the mesh info
    # The JSON files must be stored in the data/json_files folder

    name_in = filename + ".json"
    json_in = os.path.abspath(os.path.join(JSON, name_in))

    # ==========================================================================
    # Import a mesh as an instance of MeshPlus
    # ==========================================================================

    mesh = MeshPlus.from_json(json_in)

    # ==========================================================================
    # Search for supported vector field attributes and take one choice from user
    # ==========================================================================

    # supported vector field attributes
    available_vf = mesh.vector_fields()
    print("Avaliable vector fields on the mesh are:\n", available_vf)

    # the name of the vector field to cluster.
    while True:
        vf_name = input("Please choose one vector field to cluster:")
        if vf_name in available_vf:
            break
        else:
            print("This vector field is not available. Please try again.")

    # ==========================================================================
    # Extract vector field from mesh for clustering
    # ==========================================================================

    vectors = mesh.vector_field(vf_name)

    # ==========================================================================
    # Align vector field to a reference vector
    # ==========================================================================

    # the output of the FEA creates vector fields that are oddly oriented.
    # Eventually, what we want is to create "lines" from this vector
    # field that can be materialized into reinforcement bars, beams, or pipes which
    # do not really care about where the vectors are pointing to.
    # concretely, a vector can be pointing to [1, 1] or to [-1, 1] but for archi-
    # tectural and structural reasons this would be the same, because both versions
    # are colinear.
    #
    # in short, mitigating directional duplicity is something we are kind of
    # sorting out with a heuristic. this will eventually improve the quality of the
    # clustering
    #
    # how to pick the reference vector is arbitrary ("user-defined") and something
    # where there's more work to be done on. in the meantime, i've used the global
    # x and global y vectors as references, which have worked ok for my purposes.

    if align_vectors:
        align_vector_field(vectors, alignment_ref)

    # ==========================================================================
    # Comb the vector field -- remember the hair ball theorem (seams exist)
    # ==========================================================================

    if comb_vectors:
        vectors = comb_vector_field(vectors, mesh)

    # ==========================================================================
    # Apply smoothing to the vector field
    # ==========================================================================

    # moreover, depending on the quality of the initial mesh, the FEA-produced
    # vector field will be very noisy, especially around "singularities".
    # this means there can be drastic orientation jumps/flips between two vectors
    # which will affect the quality of the clustering.
    # to mitigate this, we apply laplacian smoothing which helps to soften and
    # preserve continuity between adjacent vectors
    # what this basically does is going through every face in the mesh,
    # querying what are their neighbor faces, and then averaging the vectors stored
    # on each of them to finally average them all together. the "intensity" of this
    # operation is controlled with the number of smoothing iterations and the
    # damping coefficient.
    # too much smoothing however, will actually distort the initial field to the
    # point that is not longer "representing" the original vector field
    # so smoothing is something to use with care

    if smooth_iters:
        smoothen_vector_field(vectors, mesh.face_adjacency(), smooth_iters,
                              damping)

    # ==========================================================================
    # Do K-means Clustering
    # ==========================================================================

    # now we need to put the vector field into numpy arrays to carry out clustering
    # current limitation: at the moment this only works in planar 2d meshes!
    # other clustering methods, like variational clustering, can help working
    # directly on 3d by leveraging the mesh datastructure
    # other ideas relate to actually "reparametrizing" (squishing) a 3d mesh into a
    # 2d mesh, carrying out clustering directly in 2d, and then reconstructing
    # the results back into the 3d mesh ("reparametrizing it back")

    # One of the key differences of this work is that use cosine distance as
    # basis metric for clustering. this is in constrast to numpy/scipy
    # whose implementations, as far as I remember support other types of distances
    # like euclidean or manhattan in their Kmeans implementations. this would not
    # work for the vector fields used here, but maybe this has changed now.
    # in any case, this "limitation" led me to write our own version of kmeans
    # which can do clustering based either on cosine or euclidean similarity

    # kmeans is sensitive to initialization
    # there are a bunch of approaches that go around it, like kmeans++
    # i use here another heuristic which iteratively to find the initial seeds
    # using a furthest point strategy, which basically picks as a new seed the
    # vector which is the "most distant" at a given iteration using kmeans itself
    # These seeds will be used later on as input to start the final kmeans run.

    print("Clustering started...")

    # Create an instance of a clustering algorithm from ClusteringFactory
    clustering_algo = ClusteringFactory.create(algo_name)
    clusterer = clustering_algo(mesh, vectors, n_clusters, iters, tol)

    # do kmeans clustering
    # labels contains the cluster index assigned to every vector in the vector field
    # centers contains the centroid of every cluster (the average of all vectors in
    # a cluster), and losses stores the losses generated per epoch.
    # the loss is simply the mean squared error of the cosine distance between
    # every vector and the centroid of the cluster it is assigned to
    # the goal of kmeans is to minimize this loss function

    clusterer.cluster()

    print("Loss Clustering: {}".format(clusterer.loss))
    print("Clustering ended!")

    # store results in clustered_field and labels
    clustered_field = clusterer.clustered_field
    labels = clusterer.labels

    # ==========================================================================
    # Compute mean squared error "loss" of clustering
    # ==========================================================================

    # probably would be better to encapsulate this in a function or in a Loss object
    # clustering_error = MeanSquaredError(vector_field, clustered_field)
    # clustering_error = MeanAbsoluteError(vector_field, clustered_field)

    errors = np.zeros(mesh.number_of_faces())
    for fkey in mesh.faces():
        # for every face compute difference between clustering output and
        # aligned+smoothed vector, might be better to compare against the
        # raw vector
        # difference_vector = subtract_vectors(clustered_field.vector(fkey), vectors.vector(fkey))
        # errors[fkey] = length_vector_sqrd(difference_vector)
        error = distance_cosine(clustered_field.vector(fkey),
                                vectors.vector(fkey))
        errors[fkey] = error

    mse = np.mean(errors)
    print("Clustered Field Mean Error (Cosine Distances): {}".format(mse))

    # ==========================================================================
    # Assign cluster labels to mesh
    # ==========================================================================

    mesh.cluster_labels("cluster", labels)

    # ==========================================================================
    # Generate field orthogonal to the clustered field
    # ==========================================================================

    # add perpendicular field tha preserves magnitude
    # assumes that vector field name has format foo_bar_1 or baz_2
    vf_name_parts = vf_name.split("_")
    for idx, entry in enumerate(vf_name_parts):
        # exits at the first entry
        if entry.isnumeric():
            dir_idx = idx
            direction = entry

    n_90 = 2
    if direction == n_90:
        n_90 = 1

    vf_name_parts[dir_idx] = str(n_90)
    vf_name_90 = "_".join(vf_name_parts)

    vectors_90 = mesh.vector_field(vf_name_90)
    clustered_field_90 = VectorField()

    for fkey, vector in clustered_field.items():
        cvec_90 = cross_vectors(clustered_field[fkey], [0, 0, 1])

        scale = length_vector(vectors_90[fkey])

        if dot_vectors(cvec_90, vectors_90[fkey]):
            scale *= -1.0

        cvec_90 = scale_vector(cvec_90, scale)
        clustered_field_90.add_vector(fkey, cvec_90)

    # ==========================================================================
    # Scale fields based on stress transformations
    # ==========================================================================

    while True:
        stress_type = input(
            "What stress type are we looking at, bending or axial? ")

        if stress_type in ["bending", "axial"]:
            break
        else:
            print("Hmm...That's neither axial nor bending. Please try again.")

    args = [
        mesh, (clustered_field, clustered_field_90), stress_type,
        stress_transf_ref
    ]
    clustered_field, clustered_field_90 = transformed_stress_vector_fields(
        *args)

    # ==========================================================================
    # Assign clustered fields to mesh
    # ==========================================================================

    clustered_field_name = vf_name + "_k"
    mesh.vector_field(clustered_field_name, clustered_field)

    clustered_field_name_90 = vf_name_90 + "_k"
    mesh.vector_field(clustered_field_name_90, clustered_field_90)

    # ==========================================================================
    # Export new JSON file for further processing
    # ==========================================================================

    name_out = "{}_k{}_{}_smooth{}.json".format(filename, n_clusters, vf_name,
                                                smooth_iters)
    json_out = os.path.abspath(
        os.path.join(JSON, "clustered", algo_name, name_out))
    mesh.to_json(json_out)
    print("Exported clustered vector field with mesh to: {}".format(json_out))
Ejemplo n.º 14
0
def test_vector_field_from_mesh_faces_fails(mesh):
    """
    Attempts to create a vector field from an unexisting attribute.
    """
    with pytest.raises(AssertionError):
        VectorField.from_mesh_faces(mesh, "unexisting_field")
Ejemplo n.º 15
0
def test_vector_field_from_mesh_faces(mesh):
    """
    Adds all the vectors stored as face attributes in a mesh.
    """
    vector_field = VectorField.from_mesh_faces(mesh, "my_vector_field")
    assert vector_field.size() == mesh.number_of_faces()