コード例 #1
0
def add_point_constraints(ra, reconstruction_shots, reconstruction_name):
    connections = connected_reconstructions(reconstruction_shots)
    for connection in connections:

        i1, (r1, g1) = load_reconstruction(connection[0].submodel_path,
                                           connection[0].index)
        i2, (r2, g2) = load_reconstruction(connection[1].submodel_path,
                                           connection[1].index)

        rec_name1 = reconstruction_name(connection[0])
        rec_name2 = reconstruction_name(connection[1])

        scale_treshold = 1.3
        treshold_in_meter = 0.3
        minimum_inliers = 20
        status, T, inliers = reconstruction.resect_reconstruction(
            r1, r2, g1, g2, treshold_in_meter, minimum_inliers)
        if not status:
            continue

        s, R, t = multiview.decompose_similarity_transform(T)
        if s > scale_treshold or s < (1.0/scale_treshold) or \
                len(inliers) < minimum_inliers:
            continue

        for t1, t2 in inliers:
            c1 = r1.points[t1].coordinates
            c2 = r2.points[t2].coordinates

            ra.add_common_point_constraint(rec_name1, c1[0], c1[1], c1[2],
                                           rec_name2, c2[0], c2[1], c2[2],
                                           1e-1)
コード例 #2
0
def merge_two_reconstructions(r1, r2, config, threshold=1):
    ''' Merge two reconstructions with common tracks
    '''
    t1, t2 = r1['points'], r2['points']
    common_tracks = list(set(t1) & set(t2))

    # print 'Number of common tracks between two reconstructions: {0}'.format(len(common_tracks))
    if len(common_tracks) > 6:

        # Estimate similarity transform
        p1 = np.array([t1[t]['coordinates'] for t in common_tracks])
        p2 = np.array([t2[t]['coordinates'] for t in common_tracks])

        T, inliers = multiview.fit_similarity_transform(p1,
                                                        p2,
                                                        max_iterations=1000,
                                                        threshold=threshold)

        if len(inliers) >= 10:
            s, A, b = multiview.decompose_similarity_transform(T)
            r1p = r1
            apply_similarity(r1p, s, A, b)
            r = r2
            r['shots'].update(r1p['shots'])
            r['points'].update(r1p['points'])
            align_reconstruction(r, config)
            return [r]
        else:
            return [r1, r2]
    else:
        return [r1, r2]
コード例 #3
0
ファイル: reconstruction.py プロジェクト: BobDeng1974/compv-1
def merge_two_reconstructions(r1, r2, config, threshold=1):
    """Merge two reconstructions with common tracks."""
    t1, t2 = r1.points, r2.points
    common_tracks = list(set(t1) & set(t2))

    if len(common_tracks) > 6:

        # Estimate similarity transform
        p1 = np.array([t1[t].coordinates for t in common_tracks])
        p2 = np.array([t2[t].coordinates for t in common_tracks])

        T, inliers = multiview.fit_similarity_transform(p1,
                                                        p2,
                                                        max_iterations=1000,
                                                        threshold=threshold)

        if len(inliers) >= 10:
            s, A, b = multiview.decompose_similarity_transform(T)
            r1p = r1
            align.apply_similarity(r1p, s, A, b)
            r = r2
            r.shots.update(r1p.shots)
            r.points.update(r1p.points)
            align.align_reconstruction(r, None, config)
            return [r]
        else:
            return [r1, r2]
    else:
        return [r1, r2]
コード例 #4
0
ファイル: reconstruction.py プロジェクト: imclab/OpenSfM
def merge_two_reconstructions(r1, r2, config, threshold=1):
    ''' Merge two reconstructions with common tracks
    '''
    t1, t2 = r1.points, r2.points
    common_tracks = list(set(t1) & set(t2))

    # print 'Number of common tracks between two reconstructions: {0}'.format(len(common_tracks))
    if len(common_tracks) > 6:

        # Estimate similarity transform
        p1 = np.array([t1[t].coordinates for t in common_tracks])
        p2 = np.array([t2[t].coordinates for t in common_tracks])

        T, inliers = multiview.fit_similarity_transform(p1, p2, max_iterations=1000, threshold=threshold)

        if len(inliers) >= 10:
            s, A, b = multiview.decompose_similarity_transform(T)
            r1p = r1
            apply_similarity(r1p, s, A, b)
            r = r2
            r.shots.update(r1p.shots)
            r.points.update(r1p.points)
            align_reconstruction(r, config)
            return [r]
        else:
            return [r1, r2]
    else:
        return [r1, r2]
コード例 #5
0
ファイル: tools.py プロジェクト: mapillary/OpenSfM
def add_point_constraints(ra, reconstruction_shots, reconstruction_name):
    connections = connected_reconstructions(reconstruction_shots)
    for connection in connections:

        i1, (r1, g1) = load_reconstruction(
            connection[0].submodel_path, connection[0].index)
        i2, (r2, g2) = load_reconstruction(
            connection[1].submodel_path, connection[1].index)

        rec_name1 = reconstruction_name(connection[0])
        rec_name2 = reconstruction_name(connection[1])

        scale_treshold = 1.3
        treshold_in_meter = 0.3
        minimum_inliers = 20
        status, T, inliers = reconstruction.resect_reconstruction(
            r1, r2, g1, g2, treshold_in_meter, minimum_inliers)
        if not status:
            continue

        s, R, t = multiview.decompose_similarity_transform(T)
        if s > scale_treshold or s < (1.0/scale_treshold) or \
                len(inliers) < minimum_inliers:
            continue

        for t1, t2 in inliers:
            c1 = r1.points[t1].coordinates
            c2 = r2.points[t2].coordinates

            ra.add_common_point_constraint(
                rec_name1, c1[0], c1[1], c1[2],
                rec_name2, c2[0], c2[1], c2[2],
                1e-1)
コード例 #6
0
ファイル: reconstruction.py プロジェクト: weisui-ad/OpenSfM
def merge_two_reconstructions(r1, r2, config, threshold=1):
    """Merge two reconstructions with common tracks IDs."""
    common_tracks = list(set(r1.points) & set(r2.points))
    worked, T, inliers = align_two_reconstruction(r1, r2, common_tracks, threshold)

    if worked and len(inliers) >= 10:
        s, A, b = multiview.decompose_similarity_transform(T)
        r1p = r1
        apply_similarity(r1p, s, A, b)
        r = r2
        r.shots.update(r1p.shots)
        r.points.update(r1p.points)
        align_reconstruction(r, None, config)
        return [r]
    else:
        return [r1, r2]
コード例 #7
0
ファイル: reconstruction.py プロジェクト: mapillary/OpenSfM
def merge_two_reconstructions(r1, r2, config, threshold=1):
    """Merge two reconstructions with common tracks IDs."""
    common_tracks = list(set(r1.points) & set(r2.points))
    worked, T, inliers = align_two_reconstruction(
        r1, r2, common_tracks, threshold)

    if worked and len(inliers) >= 10:
        s, A, b = multiview.decompose_similarity_transform(T)
        r1p = r1
        apply_similarity(r1p, s, A, b)
        r = r2
        r.shots.update(r1p.shots)
        r.points.update(r1p.points)
        align_reconstruction(r, None, config)
        return [r]
    else:
        return [r1, r2]
コード例 #8
0
def test_absolute_pose_generalized_shot():
    """Whole reconstruction resection (generalized pose) on a toy
    reconstruction with 0.01 meter point noise and zero outliers."""
    noise = 0.01
    parameters = config.default_config()
    scene, tracks = synthetic_reconstruction()
    cluster1, cluster2 = split_synthetic_reconstruction(
        scene, tracks, 3, noise)
    cluster2, translation, scale = move_and_scale_cluster(cluster2)

    status, T, inliers = reconstruction.\
        resect_reconstruction(cluster1, cluster2,
                              tracks, tracks,
                              2*noise,
                              parameters['resection_min_inliers'])

    assert status is True
    s, A, b = multiview.decompose_similarity_transform(T)
    np.testing.assert_almost_equal(scale, s, 2)
    np.testing.assert_almost_equal(np.eye(3), A, 2)
    np.testing.assert_almost_equal(translation, b, 2)