Example #1
0
 def test_similarity_transform_inverse_known_values(self):
     """
     SimilarityTransform.inverse should return known result with known input
     for similarity transformations.
     """
     src = SIMILARITY_KNOWN_VALUES[0][-1]
     for rotation, (scale, _), _, translation, dst in SIMILARITY_KNOWN_VALUES:
         tform = SimilarityTransform(rotation=rotation, scale=scale,
                                     translation=translation).inverse()
         numpy.testing.assert_array_almost_equal(src, tform.apply(dst))
Example #2
0
 def test_similarity_transform_from_pointset_known_values(self):
     """
     SimilarityTransform.from_pointset should return known result with known
     input for similarity transformations.
     """
     src = SIMILARITY_KNOWN_VALUES[0][-1]
     for rotation, scale, _, translation, dst in SIMILARITY_KNOWN_VALUES:
         tform = SimilarityTransform.from_pointset(src, dst)
         self.assertAlmostEqual(0., _angle_diff(rotation, tform.rotation))
         self.assertAlmostEqual(scale, tform.scale)
         numpy.testing.assert_array_almost_equal(translation, tform.translation)
Example #3
0
 def test_similarity_transform_from_pointset_umeyama(self):
     """
     SimilarityTransform.from_pointset should return the known results for
     the specific known input as described in the paper by Umeyama.
     """
     src = numpy.array([(0., 0.), (1., 0.), (0., 2.)])
     dst = numpy.array([(0., 0.), (-1., 0.), (0., 2.)])
     tform = SimilarityTransform.from_pointset(src, dst)
     numpy.testing.assert_array_almost_equal(tform.rotation_matrix,
                                   numpy.array([(0.832, 0.555),
                                                (-0.555, 0.832)]),
                                   decimal=3)
     self.assertAlmostEqual(tform.scale, 0.721, places=3)
     numpy.testing.assert_array_almost_equal(tform.translation,
                                   numpy.array([-0.800, 0.400]))
Example #4
0
def FindGridSpots(image, repetition, spot_size=18, method=GRID_AFFINE):
    """
    Find the coordinates of a grid of spots in an image. And find the
    corresponding transformation to transform a grid centered around the origin
    to the spots in an image.

    Parameters
    ----------
    image : array like
        Data array containing the greyscale image.
    repetition : tuple of ints
        Number of expected spots in (X, Y). Where the total number of expected spots must be at least 6.
    spot_size : int
        A length in pixels somewhat larger than a typical spot.
    method : GRID_AFFINE or GRID_SIMILARITY
        The transformation method used to get the returned grid of spots.
        If the similarity method is used the returned grid has 90 degree angles with equal scaling in x and y.
        It the affine method is used the returned grid contains a shear component, therefore the angles in the grid
        do not have to be 90 degrees. The grid can also have different scaling in x and y.

    Returns
    -------
    spot_coordinates : array like
        A 2D array of shape (N, 2) containing the coordinates of the spots,
        in respect to the top left of the image.
    translation : tuple of two floats
        Translation from the origin to the center of the grid in image space,
        origin is top left of the image. Primary axis points right and the
        secondary axis points down.
    scaling : tuple of two floats or float
        Scaling factors for primary and secondary axis when the affine method is used.
        Single scaling factor when the similarity method is used.
    rotation : float
        Rotation in image space, positive rotation is clockwise.
    shear : float
        Horizontal shear factor. A positive shear factor transforms a coordinate
        in the positive x direction parallel to the x axis. The shear is None
        when similarity method is used.

    """
    if repetition[0] * repetition[1] < 6:
        raise ValueError(
            "Need at least 6 expected points to properly find the grid.")
    # Find the center coordinates of the spots in the image.
    spot_positions = MaximaFind(image,
                                repetition[0] * repetition[1],
                                len_object=spot_size)
    if len(spot_positions) < repetition[0] * repetition[1]:
        logging.warning(
            'Not enough spots found, returning only the found spots.')
        return spot_positions, None, None, None, None
    # Estimate the two most common (orthogonal) directions in the grid of spots, defined in the image coordinate system.
    lattice_constants = EstimateLatticeConstant(spot_positions)
    # Each row in the lattice_constants array corresponds to one direction. By transposing the array the direction
    # vectors are on the columns of the array. This allows us to directly use them as a transformation matrix.
    transformation_matrix = numpy.transpose(lattice_constants)

    # Translation is the mean of the spots, which is the distance from the origin to the center of the grid of spots.
    translation = numpy.mean(spot_positions, axis=0)
    transform_to_spot_positions = AffineTransform(transformation_matrix,
                                                  translation)
    # Iterative closest point algorithm - single iteration, to fit a grid to the found spot positions
    grid = GridPoints(*repetition)
    spot_grid = transform_to_spot_positions.apply(grid)
    tree = KDTree(spot_positions)
    dd, ii = tree.query(spot_grid, k=1)
    # Sort the original spot positions by mapping them to the order of the GridPoints.
    pos_sorted = spot_positions[ii.ravel(), :]
    # Find the transformation from a grid centered around the origin to the sorted positions.
    if method == GRID_AFFINE:
        transformation = AffineTransform.from_pointset(grid, pos_sorted)
        scale, rotation, shear = alt_transformation_matrix_to_implicit(
            transformation.matrix, "RSU")
    elif method == GRID_SIMILARITY:
        transformation = SimilarityTransform.from_pointset(grid, pos_sorted)
        scale, rotation, _ = alt_transformation_matrix_to_implicit(
            transformation.matrix, "RSU")
        shear = None  # The similarity transform does not have a shear component.
    else:
        raise ValueError(
            "Method: %s is unknown, should be 'affine' or 'similarity'." %
            method)
    spot_coordinates = transformation.apply(grid)
    return spot_coordinates, translation, scale, rotation, shear
Example #5
0
 def test_similarity_transform_matrix_known_values(self):
     for rotation, scale, _, _, _ in SIMILARITY_KNOWN_VALUES:
         tform = SimilarityTransform(rotation=rotation, scale=scale)
         R = _rotation_matrix_from_angle(rotation)
         matrix = scale * R
         numpy.testing.assert_array_almost_equal(matrix, tform.transformation_matrix)