Beispiel #1
0
def test_align_vectors_scaled_weights():
    rng = np.random.RandomState(0)
    c = Rotation.random(random_state=rng)
    b = rng.normal(size=(5, 3))
    a = c.apply(b)

    est1, rmsd1, cov1 = Rotation.align_vectors(a, b, np.ones(5), True)
    est2, rmsd2, cov2 = Rotation.align_vectors(a, b, 2 * np.ones(5), True)

    assert_allclose(est1.as_matrix(), est2.as_matrix())
    assert_allclose(np.sqrt(2) * rmsd1, rmsd2)
    assert_allclose(cov1, cov2)
def get_predicted_distance(coordinates, noise=0):
    X = (coordinates - coordinates[0])
    x_true, y_true, z_true = X.transpose()

    D = distance_matrix(coordinates, coordinates)

    D2 = D ** 2

    M = np.zeros_like(D2)
    for i in range(10):
        for j in range(10):
            M[i, j] = (D2[0, j] + D2[i, 0] - D2[i, j]) / 2
            M[j, i] = M[i, j]

    noise = noise * np.random.randn(*M.shape)
    noise = (noise + noise.transpose()) / 2
    w, v = np.linalg.eigh(M + noise)

    size = len(w)
    nd = coordinates.shape[1]
    vevs = np.arange(size - nd, size)[::-1]

    x_v = v[:, vevs]

    sqrt_s = np.expand_dims(np.sqrt(w[vevs]), 0)
    x_v = (sqrt_s * x_v)
    x, y, z = x_v.transpose()

    R, loss = Rotation.align_vectors(x_v, X)
    x_new, y_new, z_new = R.apply(x_v, inverse=True).transpose()

    return x_true, y_true, x_new, y_new, D, D2, loss
def get_predicted(coordinates, noise=.2):
    X = (coordinates - coordinates[0])
    x_true, y_true, z_true = X.transpose()

    D = distance_matrix(coordinates, coordinates)

    M = np.inner(X, X)
    noise = noise * np.random.randn(*M.shape)
    noise = (noise + noise.transpose()) / 2
    w, v = np.linalg.eigh(M + noise)

    size = len(w)
    nd = coordinates.shape[1]
    vevs = np.arange(size - nd, size)[::-1]

    x_v = v[:, vevs]

    sqrt_s = np.expand_dims(np.sqrt(w[vevs]), 0)
    x_v = (sqrt_s * x_v)
    x, y, z = x_v.transpose()

    R, loss = Rotation.align_vectors(x_v, X)

    x_new, y_new, z_new = R.apply(x_v, inverse=True).transpose()

    return x_true, y_true, x_new, y_new, loss
Beispiel #4
0
def test_align_vectors_no_rotation():
    x = np.array([[1, 2, 3], [4, 5, 6]])
    y = x.copy()

    r, rmsd = Rotation.align_vectors(x, y)
    assert_array_almost_equal(r.as_matrix(), np.eye(3))
    assert_allclose(rmsd, 0, atol=1e-6)
def extract_planes_and_polygons_from_mesh(tri_mesh, avg_peaks,
                                          polylidar_kwargs=dict(alpha=0.0, lmax=0.1, min_triangles=2000,
                                                                z_thresh=0.1, norm_thresh=0.95, norm_thresh_min=0.95, min_hole_vertices=50, task_threads=4),
                                          filter_polygons=True, pl_=None, optimized=False,
                                          postprocess=dict(filter=dict(hole_area=dict(min=0.025, max=100.0), hole_vertices=dict(min=6), plane_area=dict(min=0.05)),
                                                           positive_buffer=0.00, negative_buffer=0.00, simplify=0.0)):

    if pl_ is not None:
        pl = pl_
    else:
        pl = Polylidar3D(**polylidar_kwargs)

    avg_peaks_mat = MatrixDouble(avg_peaks)
    t0 = time.perf_counter()
    if optimized:
        all_planes, all_polygons = pl.extract_planes_and_polygons_optimized(tri_mesh, avg_peaks_mat)
    else:
        all_planes, all_polygons = pl.extract_planes_and_polygons(tri_mesh, avg_peaks_mat)
    t1 = time.perf_counter()

    # tri_set = pl.extract_tri_set(tri_mesh, avg_peaks_mat)
    # planes_tri_set = [np.argwhere(np.asarray(tri_set) == i)  for i in range(1, 2)]
    # o3d_mesh_painted = paint_planes(o3d_mesh, planes_tri_set)

    polylidar_time = (t1 - t0) * 1000
    # logging.info("Polygon Extraction - Took (ms): %.2f", polylidar_time)
    all_planes_shapely = []
    all_obstacles_shapely = []
    time_filter = []
    time_geometric_planes = []
    # all_poly_lines = []
    geometric_planes = []
    # all_polygons = [[NORMAL_0_POLY_1, NORMAL_0_POLY_2], [NORMAL_1_POLY_1]]
    if filter_polygons:
        vertices = np.asarray(tri_mesh.vertices)
        for i in range(avg_peaks.shape[0]):
            avg_peak = avg_peaks[i, :]
            rm, _ = R.align_vectors([[0, 0, 1]], [avg_peak])  # Rotating matrix the polygon
            polygons_for_normal = all_polygons[i]
            planes = all_planes[i]
            # print(polygons_for_normal)
            if len(polygons_for_normal) > 0:
                planes_shapely, obstacles_shapely, planes_indices, filter_time = filter_and_create_polygons(
                    vertices, polygons_for_normal, rm=rm, postprocess=postprocess)

                t3 = time.perf_counter()
                geometric_planes_for_normal = [extract_geometric_plane(plane_poly[0], planes[plane_idx], tri_mesh, avg_peak) for (
                    plane_poly, plane_idx) in zip(planes_shapely, planes_indices)]
                geometric_planes.append(geometric_planes_for_normal)
                t4 = time.perf_counter()
                time_geometric_planes.append((t4 - t3) * 1000)

                all_planes_shapely.extend(planes_shapely)
                all_obstacles_shapely.extend(obstacles_shapely)
                time_filter.append(filter_time)
                # all_poly_lines.extend(poly_lines)

    timings = dict(t_polylidar_planepoly=polylidar_time, t_polylidar_filter=np.array(time_filter).mean(), t_geometric_planes=np.array(time_geometric_planes).sum())
    # all_planes_shapely, all_obstacles_shapely, all_poly_lines, timings
    return all_planes_shapely, all_obstacles_shapely, geometric_planes, timings
Beispiel #6
0
def test_align_vectors_noise():
    rnd = np.random.RandomState(0)
    n_vectors = 100
    rot = Rotation.random(random_state=rnd)
    vectors = rnd.normal(size=(n_vectors, 3))
    result = rot.apply(vectors)

    # The paper adds noise as independently distributed angular errors
    sigma = np.deg2rad(1)
    tolerance = 1.5 * sigma
    noise = Rotation.from_rotvec(rnd.normal(size=(n_vectors, 3), scale=sigma))

    # Attitude errors must preserve norm. Hence apply individual random
    # rotations to each vector.
    noisy_result = noise.apply(result)

    est, rmsd, cov = Rotation.align_vectors(noisy_result,
                                            vectors,
                                            return_sensitivity=True)

    # Use rotation compositions to find out closeness
    error_vector = (rot * est.inv()).as_rotvec()
    assert_allclose(error_vector[0], 0, atol=tolerance)
    assert_allclose(error_vector[1], 0, atol=tolerance)
    assert_allclose(error_vector[2], 0, atol=tolerance)

    # Check error bounds using covariance matrix
    cov *= sigma
    assert_allclose(cov[0, 0], 0, atol=tolerance)
    assert_allclose(cov[1, 1], 0, atol=tolerance)
    assert_allclose(cov[2, 2], 0, atol=tolerance)

    assert_allclose(rmsd, np.sum((noisy_result - est.apply(vectors))**2)**0.5)
Beispiel #7
0
def extract_planes_and_polygons_from_classified_mesh(
    tri_mesh,
    avg_peaks,
    polylidar_kwargs=dict(alpha=0.0,
                          lmax=0.1,
                          min_triangles=2000,
                          z_thresh=0.1,
                          norm_thresh=0.95,
                          norm_thresh_min=0.95,
                          min_hole_vertices=50,
                          task_threads=4),
    filter_polygons=True,
    pl_=None,
    optimized=True,
    postprocess=dict(filter=dict(hole_area=dict(min=0.025, max=100.0),
                                 hole_vertices=dict(min=6),
                                 plane_area=dict(min=0.05)),
                     positive_buffer=0.00,
                     negative_buffer=0.00,
                     simplify=0.0)):

    if pl_ is not None:
        pl = pl_
    else:
        pl = Polylidar3D(**polylidar_kwargs)

    avg_peaks_mat = MatrixDouble(avg_peaks)
    t0 = time.perf_counter()
    all_planes, all_polygons = pl.extract_planes_and_polygons_optimized_classified(
        tri_mesh, avg_peaks_mat)
    t1 = time.perf_counter()

    polylidar_time = (t1 - t0) * 1000
    all_planes_shapely = []
    all_obstacles_shapely = []
    time_filter = []
    # all_poly_lines = []
    if filter_polygons:
        vertices = np.asarray(tri_mesh.vertices)
        for i in range(avg_peaks.shape[0]):
            avg_peak = avg_peaks[i, :]
            rm, _ = R.align_vectors([[0, 0, 1]], [avg_peak])
            polygons_for_normal = all_polygons[i]
            # print(polygons_for_normal)
            if len(polygons_for_normal) > 0:
                planes_shapely, obstacles_shapely, filter_time = filter_and_create_polygons(
                    vertices,
                    polygons_for_normal,
                    rm=rm,
                    postprocess=postprocess)
                all_planes_shapely.extend(planes_shapely)
                all_obstacles_shapely.extend(obstacles_shapely)
                time_filter.append(filter_time)
                # all_poly_lines.extend(poly_lines)

    timings = dict(t_polylidar_planepoly=polylidar_time,
                   t_polylidar_filter=np.array(time_filter).sum())
    # all_planes_shapely, all_obstacles_shapely, all_poly_lines, timings
    return all_planes_shapely, all_obstacles_shapely, timings
Beispiel #8
0
def test_align_vectors_no_noise():
    rnd = np.random.RandomState(0)
    c = Rotation.random(random_state=rnd)
    b = rnd.normal(size=(5, 3))
    a = c.apply(b)

    est, rmsd = Rotation.align_vectors(a, b)
    assert_allclose(c.as_quat(), est.as_quat())
    assert_allclose(rmsd, 0, atol=1e-7)
Beispiel #9
0
def test_align_vectors_invalid_input():
    with pytest.raises(ValueError, match="Expected input `a` to have shape"):
        Rotation.align_vectors([1, 2, 3], [[1, 2, 3]])

    with pytest.raises(ValueError, match="Expected input `b` to have shape"):
        Rotation.align_vectors([[1, 2, 3]], [1, 2, 3])

    with pytest.raises(ValueError, match="Expected inputs `a` and `b` "
                                         "to have same shapes"):
        Rotation.align_vectors([[1, 2, 3],[4, 5, 6]], [[1, 2, 3]])

    with pytest.raises(ValueError,
                       match="Expected `weights` to be 1 dimensional"):
        Rotation.align_vectors([[1, 2, 3]], [[1, 2, 3]], weights=[[1]])

    with pytest.raises(ValueError,
                       match="Expected `weights` to have number of values"):
        Rotation.align_vectors([[1, 2, 3]], [[1, 2, 3]], weights=[1, 2])
Beispiel #10
0
def test_align_vectors_no_noise():
    np.random.seed(0)
    c = Rotation.from_quat(np.random.normal(size=4))
    b = np.random.normal(size=(5, 3))
    a = c.apply(b)

    est, rmsd = Rotation.align_vectors(a, b)
    assert_allclose(c.as_quat(), est.as_quat())
    assert_allclose(rmsd, 0, atol=1e-7)
Beispiel #11
0
def test_align_vectors_improper_rotation():
    # Tests correct logic for issue #10444
    x = np.array([[0.89299824, -0.44372674, 0.0752378],
                  [0.60221789, -0.47564102, -0.6411702]])
    y = np.array([[0.02386536, -0.82176463, 0.5693271],
                  [-0.27654929, -0.95191427, -0.1318321]])

    est, rmsd = Rotation.align_vectors(x, y)
    assert_allclose(x, est.apply(y), atol=1e-6)
    assert_allclose(rmsd, 0, atol=1e-7)
Beispiel #12
0
def extract_all_dominant_planes(tri_mesh,
                                vertices,
                                polylidar_kwargs,
                                config_pp,
                                ds=50,
                                min_samples=10000):
    ga = GaussianAccumulatorS2(level=4, max_phi=180)
    ico = IcoCharts(level=4)

    triangle_normals = np.asarray(tri_mesh.triangle_normals)
    num_normals = triangle_normals.shape[0]
    triangle_normals_ds = down_sample_normals(triangle_normals)

    # Get the data
    t0 = time.perf_counter()
    ga.integrate(MatX3d(triangle_normals_ds))
    t1 = time.perf_counter()
    avg_peaks, _, _, timings = get_image_peaks(ico,
                                               ga,
                                               level=4,
                                               with_o3d=False)
    timings['t_fastga_integrate'] = (t1 - t0) * 1000
    timings['t_fastga_total'] = timings['t_fastga_integrate'] + timings[
        't_fastga_peak']

    logging.info("Processing mesh with %d triangles", num_normals)
    logging.info("Dominant Plane Normals")
    print(avg_peaks)

    avg_peaks_selected = np.copy(avg_peaks[[0, 1, 2, 3], :])
    pl = Polylidar3D(**polylidar_kwargs)
    avg_peaks_mat = MatrixDouble(avg_peaks_selected)

    # debugging purposes, ignore
    tri_set = pl.extract_tri_set(tri_mesh, avg_peaks_mat)

    t0 = time.perf_counter()
    all_planes, all_polygons = pl.extract_planes_and_polygons_optimized(
        tri_mesh, avg_peaks_mat)
    t1 = time.perf_counter()
    timings['t_polylidar_planepoly'] = (t1 - t0) * 1000

    all_poly_lines = []
    for i in range(avg_peaks_selected.shape[0]):
        avg_peak = avg_peaks[i, :]
        rm, _ = R.align_vectors([[0, 0, 1]], [avg_peak])
        polygons_for_normal = all_polygons[i]
        # print(polygons_for_normal)
        if len(polygons_for_normal) > 0:
            poly_lines, _ = filter_and_create_open3d_polygons(
                vertices, polygons_for_normal, rm=rm, config_pp=config_pp)
            all_poly_lines.extend(poly_lines)

    return all_planes, tri_set, all_poly_lines, timings
Beispiel #13
0
    def computeReference(self):

        B = np.array([sin(self.inclination), 0,
                      cos(self.inclination)],
                     dtype=np.float64)
        A = np.array([0, 0, 1], dtype=np.float64)

        r, f = Rotation.align_vectors(np.array([A, B]),
                                      np.array([self.accel, self.mag]),
                                      self.weight)

        self.referenceOrientation = r.as_euler("xyz")
        self.r = r.as_quat()
Beispiel #14
0
def sample_sphere_cap(n=100, deg=10, normal=[1, 0, 0]):
    min_z = np.cos(np.radians(deg))
    z = np.random.uniform(min_z, 1.0, n)
    r = np.sqrt(1 - z * z)
    theta = np.random.uniform(0, 1.0, n) * 2 * np.pi
    x = r * np.cos(theta)
    y = r * np.sin(theta)

    rm, _, = R.align_vectors(np.array([normal]), np.array([[0.0, 0.0, 1.0]]))
    points = np.column_stack([x, y, z])
    points = rm.apply(points)

    return points
Beispiel #15
0
def extract_all_dominant_planes(tri_mesh,
                                vertices,
                                polylidar_kwargs,
                                ds=50,
                                min_samples=10000):
    ga = GaussianAccumulatorS2(level=4, max_phi=180)
    triangle_normals = np.asarray(tri_mesh.triangle_normals)
    num_normals = triangle_normals.shape[0]

    # Downsample, TODO improve this
    ds_normals = int(num_normals / ds)
    to_sample = max(min([num_normals, min_samples]), ds_normals)
    ds_step = int(num_normals / to_sample)

    triangle_normals_ds = np.ascontiguousarray(
        triangle_normals[:num_normals:ds_step, :])
    # A copy occurs here for triangle_normals......if done in c++ there would be no copy
    ga.integrate(MatX3d(triangle_normals_ds))
    gaussian_normals = np.asarray(ga.get_bucket_normals())
    accumulator_counts = np.asarray(ga.get_normalized_bucket_counts())
    _, _, avg_peaks, _ = find_peaks_from_accumulator(gaussian_normals,
                                                     accumulator_counts)
    logging.info("Processing mesh with %d triangles", num_normals)
    logging.info("Dominant Plane Normals")
    print(avg_peaks)

    avg_peaks_selected = np.copy(avg_peaks[[0, 1, 2, 3], :])
    pl = Polylidar3D(**polylidar_kwargs)
    avg_peaks_mat = MatrixDouble(avg_peaks_selected)

    tri_set = pl.extract_tri_set(tri_mesh, avg_peaks_mat)
    t0 = time.perf_counter()

    all_planes, all_polygons = pl.extract_planes_and_polygons_optimized(
        tri_mesh, avg_peaks_mat)
    t1 = time.perf_counter()
    polylidar_time = (t1 - t0) * 1000

    all_poly_lines = []
    for i in range(avg_peaks_selected.shape[0]):
        avg_peak = avg_peaks[i, :]
        rm, _ = R.align_vectors([[0, 0, 1]], [avg_peak])
        polygons_for_normal = all_polygons[i]
        # print(polygons_for_normal)
        if len(polygons_for_normal) > 0:
            poly_lines, _ = filter_and_create_open3d_polygons(
                vertices, polygons_for_normal, rm=rm)
            all_poly_lines.extend(poly_lines)

    return all_planes, tri_set, all_poly_lines, polylidar_time
def create_samples(normal=np.array([0, 1, 0]), std=0.01, size=50):
    # create 2D samples
    # import ipdb; ipdb.set_trace()
    np.random.seed(4)
    cov = np.identity(2) * std
    samples_2d = np.random.multivariate_normal([0.0, 0.0], cov, size=size)
    samples_3d = np.append(samples_2d, np.ones((samples_2d.shape[0], 1)), 1)
    samples_3d, _ = normalized(samples_3d)
    # create rotation matrix
    default_normal = np.array([0, 0, 1])
    rm, _, = Rotation.align_vectors([default_normal], [normal])
    normals_3d = rm.apply(samples_3d)
    # TODO normalize
    return normals_3d
Beispiel #17
0
def GetPrincipalAxis(coordinates):
    cl = np.array(coordinates)
    n, m = cl.shape
    mu = cl.mean(axis=0)
    cl = cl - mu
    A = np.dot(cl.T, cl) / (n - 1)
    eigVecs = np.linalg.eig(A)[1]
    eigVals = np.diag(np.linalg.eig(A)[0])
    imax = eigVals.argmax() % 3
    L = np.argsort(-eigVals.flatten()) % 3
    qalign = R.align_vectors([eigVecs[:, L[0]]], [[0, 1, 0]])
    #qalign = R.align_vectors([eigVecs[:,L[0]],eigVecs[:,L[1]]],[[0,1,0],[1,0,0]])
    #qalign = R.align_vectors([eigVecs[:,L[0]],eigVecs[:,L[1]],eigVecs[:,L[2]]],[[0,1,0],[1,0,0],[0,0,1]])
    #r = R.from_matrix([eigVecs[L[0]],eigVecs[L[1]],eigVecs[L[2]]])
    # stage.animationControls.rotate(new NGL.Quaternion(  -0.44752089, -0.3509433 , -0.50757132, -0.64725205), 0);
    return qalign  #r.as_quat(), r.inv().as_euler('zxy', degrees=True)
def extract_all_dominant_planes(tri_mesh,
                                vertices,
                                polylidar_kwargs,
                                ds=50,
                                min_samples=10000):
    ga = GaussianAccumulatorS2Beta(level=4)
    ico = IcoCharts(level=4)

    fast_ga_kwargs = dict(find_peaks_kwargs=dict(threshold_abs=15,
                                                 min_distance=1,
                                                 exclude_border=False,
                                                 indices=False),
                          cluster_kwargs=dict(t=0.28, criterion='distance'),
                          average_filter=dict(min_total_weight=0.1))

    avg_peaks, _, _, _, alg_timings = extract_all_dominant_plane_normals(
        tri_mesh, ga_=ga, ico_chart_=ico, **fast_ga_kwargs)
    logging.info("Dominant Plane Normals")
    print(avg_peaks)

    avg_peaks_selected = np.copy(avg_peaks[[0, 1, 2, 3, 4], :])
    pl = Polylidar3D(**polylidar_kwargs)
    avg_peaks_mat = MatrixDouble(avg_peaks_selected)

    tri_set = pl.extract_tri_set(tri_mesh, avg_peaks_mat)
    t0 = time.perf_counter()

    all_planes, all_polygons = pl.extract_planes_and_polygons_optimized(
        tri_mesh, avg_peaks_mat)
    t1 = time.perf_counter()
    polylidar_time = (t1 - t0) * 1000
    all_poly_lines = []
    for i in range(avg_peaks_selected.shape[0]):
        avg_peak = avg_peaks[i, :]
        rm, _ = R.align_vectors([[0, 0, 1]], [avg_peak])
        polygons_for_normal = all_polygons[i]
        # print(polygons_for_normal)
        if len(polygons_for_normal) > 0:
            poly_lines, _ = filter_and_create_open3d_polygons(
                vertices, polygons_for_normal, rm=rm)
            all_poly_lines.extend(poly_lines)

    return all_planes, tri_set, all_poly_lines, polylidar_time
Beispiel #19
0
def hyperNEB(embedder,
             coords,
             atomnos,
             ids,
             constrained_indexes,
             title='temp'):
    '''
    Turn a geometry close to TS to a proper TS by getting
    reagents and products and running a climbing image NEB calculation through ASE.
    '''

    reagents = get_reagent(embedder,
                           coords,
                           atomnos,
                           ids,
                           constrained_indexes,
                           method=embedder.options.theory_level)
    products = get_product(embedder,
                           coords,
                           atomnos,
                           ids,
                           constrained_indexes,
                           method=embedder.options.theory_level)
    # get reagents and products for this reaction

    reagents -= np.mean(reagents, axis=0)
    products -= np.mean(products, axis=0)
    # centering both structures on the centroid of reactive atoms

    aligment_rotation = R.align_vectors(reagents, products)
    # products = np.array([aligment_rotation @ v for v in products])
    products = (aligment_rotation @ products.T).T
    # rotating the two structures to minimize differences

    ts_coords, ts_energy, success = ase_neb(embedder,
                                            reagents,
                                            products,
                                            atomnos,
                                            title=title)
    # Use these structures plus the TS guess to run a NEB calculation through ASE

    return ts_coords, ts_energy, success
Beispiel #20
0
    def calc_node_orientation(self,
                              unchanged='z',
                              clean_unchanged=True,
                              return_normal=False,
                              flip_normal=1):
        valid_unchanged = ['x', 'y', 'z']
        if unchanged not in valid_unchanged:
            raise ValueError('unchanged must be one of {}.'.format(unchanged))

        cell_normals = self.calc_shell_normals() * flip_normal

        if clean_unchanged:
            cell_normals[:, valid_unchanged.index(unchanged)] = 0

        cell_normals = normalize_vectors(cell_normals)

        node_normals = np.zeros((self.number_of_nodes(), 3))

        for i, vertices in enumerate(self.shells):
            node_normals[vertices, :] += cell_normals[i, :]

        node_normals = normalize_vectors(node_normals)

        # TODO: which coordinate points outwards
        global_frame = np.zeros((2, 3))
        global_frame[0, 1] = 1
        global_frame[1, 2] = 1

        local_frame = global_frame.copy()

        orientation = np.zeros((self.number_of_nodes(), 3))

        for i, normal in enumerate(node_normals):
            local_frame[0, :] = normal
            rotation = R.align_vectors(global_frame, local_frame)
            orientation[i, :] = rotation[0].as_euler('xyz')

        self.node_orientations = orientation

        if return_normal:
            return orientation, node_normals
        return orientation
Beispiel #21
0
def _save_aligned_conformers(rdmol, log):

    elements = [a.GetSymbol() for a in rdmol.GetAtoms()]
    ref_coords = rdmol.GetConformer(0).GetPositions()

    xyz_block = ""
    for conformer in rdmol.GetConformers():
        # Get the coordinates of the conformer
        coords = conformer.GetPositions()
        # Translate atom 0 to the same position
        translate = coords[0] - ref_coords[0]
        coords = coords - translate
        # Compute two vectors -- atom 1 - atom 0, atom2 - atom0
        vec1_ref = ref_coords[1] - ref_coords[0]
        vec1_ref = vec1_ref / np.linalg.norm(vec1_ref)
        vec1 = coords[1] - coords[0]
        vec1 = vec1 / np.linalg.norm(vec1)
        vec2_ref = ref_coords[2] - ref_coords[0]
        vec2_ref = vec2_ref / np.linalg.norm(vec2_ref)
        vec2 = coords[2] - coords[0]
        vec2 = vec2 / np.linalg.norm(vec2)
        # Rotate the molecule to best align these two vectors
        a = np.array([vec1_ref, vec2_ref], dtype=np.float64)
        b = np.array([vec1, vec2], dtype=np.float64)
        R, rmsd = Rotation.align_vectors(a, b)
        coords = R.apply(coords)

        xyz_block += f"{len(elements)}\n\n"
        for element, coord in zip(elements, coords):
            x, y, z = coord
            xyz_block += f"{element:5s}{x:12.6f}{y:12.6f}{z:12.6f}\n"

    fpath = Path("structures/aligned_conformers.xyz")
    with fpath.open("w") as f:
        f.write(xyz_block)
    log.log("Wrote all aligned to 'structures/aligned_conformers.xyz'.")
Beispiel #22
0
def extract_planes_and_polygons_from_mesh(tri_mesh, avg_peaks,
                                          polylidar_kwargs=dict(alpha=0.0, lmax=0.1, min_triangles=2000,
                                                                z_thresh=0.1, norm_thresh=0.95, norm_thresh_min=0.95, min_hole_vertices=50, task_threads=4),
                                          filter_polygons=True, pl_=None, optimized=True, **kwargs):

    if pl_ is not None:
        pl = pl_
    else:
        pl = Polylidar3D(**polylidar_kwargs)

    avg_peaks_mat = MatrixDouble(avg_peaks)
    t0 = time.perf_counter()
    if optimized:
        all_planes, all_polygons = pl.extract_planes_and_polygons_optimized(tri_mesh, avg_peaks_mat)
    else:
        all_planes, all_polygons = pl.extract_planes_and_polygons(tri_mesh, avg_peaks_mat)

    t1 = time.perf_counter()

    polylidar_time = (t1 - t0) * 1000
    logger.info("Polygon Extraction - Took (ms): %.2f", polylidar_time)
    all_poly_lines = []
    if filter_polygons:
        vertices = np.asarray(tri_mesh.vertices)
        for i in range(avg_peaks.shape[0]):
            avg_peak = avg_peaks[i, :]
            rm, _ = R.align_vectors([[0, 0, 1]], [avg_peak])
            polygons_for_normal = all_polygons[i]
            # print(polygons_for_normal)
            if len(polygons_for_normal) > 0:
                poly_lines, _ = filter_and_create_open3d_polygons(vertices, polygons_for_normal, rm=rm, **kwargs)
                all_poly_lines.extend(poly_lines)

    timings = dict(polylidar=polylidar_time)

    return all_planes, all_polygons, all_poly_lines, timings
Beispiel #23
0
def align_positions(
    positions_from: Positions,
    positions_to: Positions,
    rot_axes: Literal["x", "y", "z", "xyz"],
    rsmd_threshold=0.4,
) -> Transform:
    """
    Let positions_from be fixed local coordinate frame, and positions_to be some other
    fixed coordinate system. Further, let the relationship between the two reference
    systems be described as

    positions_from = rotation.apply(positions_to) + translation

    This function finds the rotation and translation by matching the two
    coordinate systems, and represent the transformation through a Transform object.
    For robustness it is advised to use more than 2 positions in alignment and using
    positions with some distance to each other.
    :param positions_from: Coordinates in a fixed frame
    :param positions_to: Coordinates in a fixed frame
    :param rot_axes: Axis of rotation. For rotations in the xy plane (most common),
        this is set to 'z'
    :param rsmd_threshold: The root mean square distance threshold, for the coordinate
        fitting error in matching the two coordinate systems.
    """
    if len(positions_from.positions) != len(positions_to.positions):
        raise ValueError(
            f"Expected inputs 'positions_from' and 'positions_to' to have the same shapes"
            + f" got {len(positions_from.positions)} and {len(positions_to.positions)}, respectively"
        )
    if len(positions_from.positions) < 2:
        raise ValueError(
            f" Expected at least 2 positions, got {len(positions_from.positions)}"
        )
    if len(positions_from.positions) < 3 and rot_axes == "xyz":
        raise ValueError(
            f" Expected at least 3 positions, got {len(positions_from.positions)}"
        )

    try:
        edges_1, edges_2 = _get_edges(positions_from, positions_to, rot_axes)
    except Exception as e:
        raise ValueError(e)

    rotation, rmsd_rot, sensitivity = Rotation.align_vectors(
        edges_2, edges_1, return_sensitivity=True
    )

    translations: Translation = Translation.from_array(
        np.mean(
            positions_to.to_array() - rotation.apply(positions_from.to_array()),
            axis=0,  # type:ignore
        ),
        from_=positions_from.frame,
        to_=positions_to.frame,
    )  # type: ignore
    transform = Transform(
        from_=positions_from.frame,
        to_=positions_to.frame,
        translation=translations,
        rotation=rotation,
    )

    try:
        _check_rsme_treshold(transform, positions_to, positions_from, rsmd_threshold)
    except Exception as e:
        raise ValueError(e)

    return transform
Beispiel #24
0
def test_align_vectors_single_vector():
    with pytest.warns(UserWarning, match="Optimal rotation is not"):
        r_estimate, rmsd = Rotation.align_vectors([[1, -1, 1]], [[1, 1, -1]])
        assert_allclose(rmsd, 0, atol=1e-16)
Beispiel #25
0
    def calc_initial_position(self):
        """Position the molecules in the initial position."""
        # calculate the geometric center of the molecule and of the restraints

        ga_log.info('Positioning molecules in starting conformation')
        r_c = np.array(self.receptor_coord)
        l_c = np.array(self.ligand_coord)

        r_rest_c = np.array(self.receptor_rest_coord)
        l_rest_c = np.array(self.ligand_rest_coord)

        r_center = r_c.mean(axis=0)
        l_center = l_c.mean(axis=0)

        # Move both to origin
        r_c -= r_center
        l_c -= l_center

        r_rest_c -= r_center
        l_rest_c -= l_center

        # get center of interface and molecule
        r_center = r_c.mean(axis=0)
        l_center = l_c.mean(axis=0)

        r_rest_center_c = r_rest_c.mean(axis=0)
        l_rest_center_c = l_rest_c.mean(axis=0)

        # ====
        # Align the vectors between molecule center and interface

        a = np.array([r_center, r_rest_center_c])
        b = np.array([l_center, l_rest_center_c])

        mat, _ = Rotation.align_vectors(a, b)

        rot_l_c = mat.apply(l_c)
        rot_l_rest_c = mat.apply(l_rest_c)

        rot_l_center = rot_l_c.mean(axis=0)
        rot_l_rest_center = rot_l_rest_c.mean(axis=0)

        # Move them apart
        a = np.array([r_center, r_rest_center_c])
        b = np.array([rot_l_center, rot_l_rest_center])

        _, b_i = np.add(a, b + 5)  # +5 to keep them further apart

        rot_l_c += b_i
        rot_l_rest_c += b_i

        # Rotate so that they face each other
        # Move to origin and rotate again
        c = rot_l_c.mean(axis=0)
        rot_l_c -= c
        rot_l_rest_c -= c

        rotation_radians = np.radians(180)
        rotation_axis = np.array([0, 0, 1])
        rotation_vector = rotation_radians * rotation_axis
        rotation = Rotation.from_rotvec(rotation_vector)

        l_c = rotation.apply(rot_l_c)
        l_rest_c = rotation.apply(rot_l_rest_c)

        l_c += c
        l_rest_c += c

        self.ligand_coord = l_c
        self.receptor_coord = r_c
Beispiel #26
0
    def calibrate(self, cam):
        cam.compute_relative_RT()
        print(len(cam.img_T), len(self.RPY))
        u0_arr = []
        u1_arr = []
        w_arr = []
        R0_arr = []
        R1_arr = []
        T0_arr = []
        T1_arr = []
        for i in range(0, len(self.RPY) - 1):
            if (i >= len(cam.img_T)): break
            for j in range(i + 1, len(self.RPY)):
                if (j >= len(cam.img_T)): break
                R_, t_ = cam.get_Rt(i, j)
                if (R_ is None or t_ is None): continue

                R, t = self.get_Rt(self.T[i], self.RPY[i], self.T[j],
                                   self.RPY[j])
                if (R is None or t is None): continue

                u0 = R.as_rotvec()
                u1 = R_.as_rotvec()
                e = np.abs(np.linalg.norm(u0) - np.linalg.norm(u1))
                if (e > 0.001): continue
                if (np.linalg.norm(u0) < 0.2): continue
                w_arr.append(e / np.linalg.norm(u0))
                u0 /= np.linalg.norm(u0)
                u1 /= np.linalg.norm(u1)

                print(i, j, ':', u0, np.linalg.norm(R.as_rotvec()), u1,
                      np.linalg.norm(R_.as_rotvec()))
                print(t.transpose()[0],
                      t_.transpose()[0],
                      np.linalg.norm(t) - np.linalg.norm(t_))

                u0_arr.append(u0)
                u1_arr.append(u1)
                R0_arr.append(R)
                R1_arr.append(R_)
                T0_arr.append(t)
                T1_arr.append(t_)

        u0 = np.array(u0_arr)
        u1 = np.array(u1_arr)
        w = np.array(w_arr)  # * 0 + 1
        w /= np.linalg.norm(w)

        R, rmsd = Rotation.align_vectors(u0, u1, weights=w)
        uerr = np.linalg.norm(u0 - (R.as_dcm() @ u1.transpose()).transpose(),
                              axis=1)
        merr = np.median(uerr)
        print("Initial R:", R.as_euler('xyz'), "; median error:", merr)
        w[uerr > merr] = 0
        R, rmsd = Rotation.align_vectors(u0, u1, weights=w)
        print("Refined R:", R.as_euler('xyz'))

        a = np.zeros(shape=(0, 3), dtype=np.float)
        b = np.zeros(shape=(0, 1), dtype=np.float)
        for i in range(len(R0_arr)):
            R0 = R0_arr[i]
            R1 = R1_arr[i]
            T0 = T0_arr[i]
            T1 = T1_arr[i]

            a = np.vstack((a, R0.as_dcm() - np.eye(3)))
            b = np.vstack((b, R.as_dcm() @ T1 - T0))

        print(a.shape, b.shape)
        T, res, rnk, s = np.linalg.lstsq(a, b, rcond=None)

        print(R.as_dcm(), rmsd)
        print()
        print('R:', R.inv().as_rotvec())
        print(T.transpose()[0], res)
        print((R.as_dcm() @ T).transpose()[0])
        print((R.inv().as_dcm() @ T).transpose()[0])
        print((R.as_dcm() @ (-1 * T)).transpose()[0])
        print((R.inv().as_dcm() @ (-1 * T)).transpose()[0])
        print()
        print("x-y-z-R-P-Y:")
        print(np.hstack((T.transpose()[0], R.as_euler('xyz'))))
        return R.as_dcm(), T
def rotate_data_planar(points, normal, inverse=False):
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        rm, _ = R.align_vectors([[0, 0, 1]],
                                [normal])  # Rotating matrix the polygon
    return rm.apply(points, inverse=inverse)
Beispiel #28
0
  def get_mapping(self, env: Dict[str, Any]) -> Dict[int, int]:
    """
    Returns mapping of sites from input to this object

    Pymatgen molecule_matcher does not work unfortunately as it needs to be
    a reasonably physical molecule.
    Here, the graph is constructed by connecting the nearest neighbor, and
    isomorphism is performed to find matches, then kabsch algorithm is
    performed to make sure it is a match. NetworkX is used for portability.

    Parameters
    ----------

    env : Dict[str, Any]
      dictionary that contains information of local environment of a
      site in datum. See _get_SiteEnvironments definition in the class
      _SiteEnvironments for what this variable should be.

    Returns
    -------
    dict : Dict[int, int]
      Atom mapping from Primitive cell to data point.
    """
    try:
      import networkx.algorithms.isomorphism as iso
    except:
      raise ImportError("This class requires networkx to be installed.")
    # construct graph

    G = self._construct_graph(env['pos'], env['sitetypes'])
    if len(self.G.nodes) != len(G.nodes):
      s = 'Number of nodes is not equal.\n'
      raise ValueError(s)
    elif len(self.G.edges) != len(G.edges):
      logging.warning("Expected the number of edges to be equal",
                      len(self.G.edges), len(G.edges))
      s = 'Number of edges is not equal.\n'
      s += "- Is the data point's cell a redefined lattice of primitive cell?\n"
      s += '- If relaxed structure is used, you may want to check structure or increase Gatol\n'
      raise ValueError(s)
    GM = iso.GraphMatcher(self.G, G, self._nm, self._em)
    # Gets the isomorphic mapping. Also the most time consuming part of the code
    ams = list(GM.isomorphisms_iter())

    if not ams:
      s = 'No isomorphism found.\n'
      s += "- Is the data point's cell a redefined lattice of primitive cell?\n"
      s += '- If relaxed structure is used, you may want to check structure or increase rtol\n'
      raise ValueError(s)

    rmsd = []
    for am in ams:  # Loop over isomorphism
      # reconstruct graph after alinging point order
      xyz = np.zeros((len(self.pos), 3))
      for i in am:
        xyz[i, :] = env['pos'][am[i], :]
      rotation, _ = Rotation.align_vectors(self.pos, xyz)
      R = rotation.as_matrix()
      # RMSD
      rmsd.append(
          np.sqrt(
              np.mean(np.linalg.norm(np.dot(self.pos, R) - xyz, axis=1)**2)))
    mini = np.argmin(rmsd)
    minrmsd = rmsd[mini]
    if minrmsd < self.tol:
      return ams[mini]
    else:
      s = 'No isomorphism found.\n'
      s += '-Consider increasing neighbor finding tolerance'
      raise ValueError(s)
import os
import sys
if sys.path[0] != os.getcwd():
    sys.path.insert(0, os.getcwd())
lib_local_path = os.path.normpath(r'C:\WORKSPACE\DEV\mocaplib')
if os.path.exists(lib_local_path):
    if sys.path[1] != lib_local_path:
        sys.path.insert(1, lib_local_path)
from mocaplib import btkapp as ba
import numpy as np
from scipy.spatial.transform import Rotation as R
#%%
cwd = os.path.dirname(__file__)

c3d_sample_dir_path = os.path.join(cwd, r'..\Samples_C3D\Sample00\Vicon Motion Systems')
src_c3d_path = os.path.join(c3d_sample_dir_path, 'pyCGM2 lower limb CGM24 Walking01.c3d')
trc_path = os.path.join(cwd, 'Test02_result.trc')
mot_path = os.path.join(cwd, 'Test02_result.mot')

acq = ba.open_c3d(src_c3d_path)
dict_pts = ba.get_dict_points(acq, tgt_types=['Marker'])
mkr_names = [x for x in list(dict_pts['LABELS']) if len(x)<=4]
axes_src = [[1, 0, 0], [0, 1, 0], [0, 0, 1]]
axes_tgt = [[0, -1, 0], [0, 0, 1], [-1, 0, 0]]
rot_ret = R.align_vectors(a=axes_src, b=axes_tgt)
rot_obj = rot_ret[0]
trf = rot_obj.as_matrix()

ba.export_trc(acq, trc_path, rot_mat=trf, filt_fc=20.0, tgt_mkr_names=mkr_names)
ret = ba.export_mot(acq, mot_path, rot_mat=trf, filt_fc=20.0, threshold=15.0)
Beispiel #30
0
 def rotateInDirection(self, position_matrix, direction, axis):
     estimated_rotation, rmsd = R.align_vectors([direction], [axis])
     rotation_matrix = estimated_rotation.as_matrix()
     rotated_position_matrix = np.dot(rotation_matrix, position_matrix)
     return rotated_position_matrix