def solve(self,
              world_points,
              image_features,
              pixel_scale=None,
              annotate_image=None):

        assert pixel_scale is not None
        assert set(world_points.keys()) >= set(image_features.keys())
        keys = sorted(list(image_features.keys()))

        world_points = hstack([matrix(list(world_points[k])).T for k in keys])

        def project_feature(feature):
            centre = feature.get_centre()
            axes = feature.get_axes()
            assert axes[0] >= axes[1]

            x = centre[0] * 10.0  / axes[0]
            y = centre[1] * 10.0  / axes[0]
            z = 10.0 * pixel_scale / axes[0]

            out = matrix([[x], [y], [z]])
            return out

        projected_points = hstack([project_feature(image_features[k]) for k in keys])

        R, T = util.orientation_from_correspondences(world_points,
                                                     projected_points)

        return R, T, pixel_scale
Example #2
0
def solve(world_points_in, image_points_in, pixel_scale, annotate_image=None):
    """
    Find a camera's orientation given a set of world coordinates and
    corresponding set of camera coordinates.

    world_points: Dict mapping point names to triples corresponding with world
                  x, y, z coordinates.
    image_points: Dict mapping point names to triples corresponding with
                  camera x, y coordinates. Coordinates are translated such that
                  0, 0 corresponds with the centre of the image.

    Return: 4x4 matrix representing the camera's orientation.
    """

    assert set(world_points_in.keys()) >= set(image_points_in.keys())
    keys = sorted(list(image_points_in.keys()))
    assert len(keys) >= 4
    world_points = hstack([matrix(list(world_points_in[k])).T for k in keys])
    image_points = hstack([matrix(list(image_points_in[k]) + [pixel_scale]).T for k in keys])
    image_points = image_points / pixel_scale

    control_indices = choose_control_points(world_points)
    C = make_coeff_matrix(world_points, control_indices)
    M = make_M(image_points, C)

    eig_vals, eig_vecs = numpy.linalg.eig(M.T * M)
    V = eig_vecs.T[eig_vals.argsort()].T

    world_ctrl_points = util.col_slice(world_points, control_indices)
    b1 = calc_beta_case_1(V[:, :1], world_ctrl_points)
    b2 = calc_beta_case_2(V[:, :2], world_ctrl_points)
    b3 = calc_beta_case_3(V[:, :3], world_ctrl_points)
   
    outs = []
    errs = []
    for b in [b1, b2, b3]:
        x = V[:, :b.shape[1]] * b.T
        x = x.reshape((4, 3))

        R, offs = util.orientation_from_correspondences(world_ctrl_points, x.T)
        outs.append((R, offs))

        e = calc_reprojection_error(R, offs, world_points, image_points)
        print "Reprojection error = %f" % e
        errs.append(e)

    R, offs = outs[array(errs).argmin()]

    if annotate_image:
        P = hstack([R, offs])
        all_keys = list(world_points_in.keys())
        world_points_mat = hstack([matrix(list(world_points_in[k]) + [1.0]).T for k in all_keys])
        image_points_mat = P * world_points_mat
        image_points_mat = matrix([[r[0,0]/r[0,2], r[0,1]/r[0,2]] for r in image_points_mat.T]).T
        util.draw_points(annotate_image,
                         dict(zip(all_keys, list(image_points_mat.T))))
    
    return R, offs
Example #3
0
def align_image(image_stars, ast_db):
    image_star_db = stardb.StarDatabase(image_stars)

    best_scores = []
    for image_star in itertools.islice(
            sorted(image_star_db, key=lambda s: s.mag),
            0, 50):
        print "Trying {}".format(image_star)

        scores = collections.defaultdict(int)
        
        for query_ast in asterisms_for_star(image_star, image_star_db,
                                            num_neighbours=4):
            closest = ast_db.search(query_ast)[0].main_star
            scores[closest] += 1

        import pdb; pdb.set_trace()

        if scores:
            best_star, score = max(scores.iteritems(), key=(lambda x: x[1]))
            best_scores.append((score, image_star, best_star))
            
    for score, image_star, best_star in sorted(best_scores):
        print "Best match for %s: %s (score %s)" % (image_star.coords, best_star.id, score)

    best_scores = [x for x in best_scores if x[0] >= SCORE_THRESHOLD]
    if len(best_scores) == 0:
        raise CouldNotAlignError()

    import pdb; pdb.set_trace()

    camera_points = hstack([image_star.vec for score, image_star, best_star in best_scores])
    world_points = hstack([best_star.vec for score, image_star, best_star in best_scores])

    R, T = util.orientation_from_correspondences(camera_points, world_points)

    return stardb.vec_to_angles(R[:, 2]) + (R,)