Пример #1
0
def check_prior(data: dataset.DataSet, output_rec_path: str):
    reconstruction = data.load_reconstruction()  # load old reconstruction
    prior_rec = data.load_reconstruction(output_rec_path)
    for shot_id, shot in reconstruction[0].shots.items():
        utils.assert_shots_equal(shot, prior_rec[0].shots[shot_id])

    assert len(prior_rec[0].points) > 1000
Пример #2
0
def run_dataset(
    data: DataSet,
    proj: str,
    transformation: bool,
    image_positions: bool,
    reconstruction: bool,
    dense : bool,
    output: str,
) -> None:
    """Export reconstructions in geographic coordinates

    Args:
        proj: PROJ.4 projection string
        transformation : print cooordinate transformation matrix'
        image_positions : export image positions
        reconstruction : export reconstruction.json
        dense : export dense point cloud (depthmaps/merged.ply)
        output : path of the output file relative to the dataset

    """

    if not (transformation or image_positions or reconstruction or dense):
        logger.info("Nothing to do. At least on of the options: ")
        logger.info(" --transformation, --image-positions, --reconstruction, --dense")

    reference = data.load_reference()

    projection = pyproj.Proj(proj)
    t = _get_transformation(reference, projection)

    if transformation:
        output = output or "geocoords_transformation.txt"
        output_path = os.path.join(data.data_path, output)
        _write_transformation(t, output_path)

    if image_positions:
        reconstructions = data.load_reconstruction()
        output = output or "image_geocoords.tsv"
        output_path = os.path.join(data.data_path, output)
        _transform_image_positions(reconstructions, t, output_path)

    if reconstruction:
        reconstructions = data.load_reconstruction()
        for r in reconstructions:
            _transform_reconstruction(r, t)
        output = output or "reconstruction.geocoords.json"
        data.save_reconstruction(reconstructions, output)

    if dense:
        output = output or "undistorted/depthmaps/merged.geocoords.ply"
        output_path = os.path.join(data.data_path, output)
        udata = data.undistorted_dataset()
        _transform_dense_point_cloud(udata, t, output_path)
Пример #3
0
def run_dataset(data: DataSet, list_path, bundle_path, undistorted):
    """Export reconstruction to bundler format.

    Args:
        list_path: txt list of images to export
        bundle_path : output path
        undistorted : export undistorted reconstruction

    """

    udata = data.undistorted_dataset()

    default_path = os.path.join(data.data_path, "bundler")
    list_file_path = list_path if list_path else default_path
    bundle_file_path = bundle_path if bundle_path else default_path

    if undistorted:
        reconstructions = udata.load_undistorted_reconstruction()
        track_manager = udata.load_undistorted_tracks_manager()
        images = reconstructions[0].shots.keys()
    else:
        reconstructions = data.load_reconstruction()
        track_manager = data.load_tracks_manager()
        images = data.images()

    io.export_bundler(images, reconstructions, track_manager, bundle_file_path,
                      list_file_path)
Пример #4
0
def run_dataset(data: DataSet, diagram_max_points=-1):
    """Compute various staistics of a datasets and write them to 'stats' folder

    Args:
        data: dataset object

    """
    reconstructions = data.load_reconstruction()
    tracks_manager = data.load_tracks_manager()

    output_path = os.path.join(data.data_path, "stats")
    data.io_handler.mkdir_p(output_path)

    stats_dict = stats.compute_all_statistics(data, tracks_manager, reconstructions)

    stats.save_residual_grids(
        data, tracks_manager, reconstructions, output_path, data.io_handler
    )
    stats.save_matchgraph(
        data, tracks_manager, reconstructions, output_path, data.io_handler
    )
    stats.save_residual_histogram(stats_dict, output_path, data.io_handler)

    if diagram_max_points > 0:
        stats.decimate_points(reconstructions, diagram_max_points)

    stats.save_heatmap(
        data, tracks_manager, reconstructions, output_path, data.io_handler
    )
    stats.save_topview(
        data, tracks_manager, reconstructions, output_path, data.io_handler
    )

    with data.io_handler.open_wt(os.path.join(output_path, "stats.json")) as fout:
        io.json_dump(stats_dict, fout)
Пример #5
0
def run_dataset(data: DataSet, reconstruction, reconstruction_index, tracks,
                output):
    """Export reconstruction to NVM_V3 format from VisualSfM

    Args:
        reconstruction: reconstruction to undistort
        reconstruction_index: index of the reconstruction component to undistort
        tracks: tracks graph of the reconstruction
        output: undistorted

    """
    undistorted_data_path = os.path.join(data.data_path, output)
    udata = dataset.UndistortedDataSet(data,
                                       undistorted_data_path,
                                       io_handler=data.io_handler)
    reconstructions = data.load_reconstruction(reconstruction)
    if data.tracks_exists(tracks):
        tracks_manager = data.load_tracks_manager(tracks)
    else:
        tracks_manager = None

    if reconstructions:
        r = reconstructions[reconstruction_index]
        undistort.undistort_reconstruction_and_images(tracks_manager, r, data,
                                                      udata)
Пример #6
0
def run_dataset(
    data: DataSet,
    reconstruction: Optional[str] = None,
    reconstruction_index: int = 0,
    tracks: Optional[str] = None,
    output: str = "undistorted",
    skip_images: bool = False,
) -> None:
    """Export reconstruction to NVM_V3 format from VisualSfM

    Args:
        reconstruction: reconstruction to undistort
        reconstruction_index: index of the reconstruction component to undistort
        tracks: tracks graph of the reconstruction
        output: undistorted
        skip_images: do not undistort images
    """
    undistorted_data_path = os.path.join(data.data_path, output)
    udata = dataset.UndistortedDataSet(data,
                                       undistorted_data_path,
                                       io_handler=data.io_handler)
    reconstructions = data.load_reconstruction(reconstruction)
    if data.tracks_exists(tracks):
        tracks_manager = data.load_tracks_manager(tracks)
    else:
        tracks_manager = None

    if reconstructions:
        r = reconstructions[reconstruction_index]
        undistort.undistort_reconstruction_with_images(tracks_manager, r, data,
                                                       udata, skip_images)
Пример #7
0
def run_dataset(data: DataSet, points, image_list, output, undistorted):
    """Export reconstruction to PLY format

    Args:
        points: export points
        image_list: export only the shots included in this file (path to .txt file)
        output: output pmvs directory
        undistorted: export the undistorted reconstruction

    """

    udata = data.undistorted_dataset()

    base_output_path = output if output else os.path.join(
        data.data_path, "pmvs")
    io.mkdir_p(base_output_path)
    logger.info("Converting dataset [%s] to PMVS dir [%s]" %
                (data.data_path, base_output_path))

    if undistorted:
        reconstructions = udata.load_undistorted_reconstruction()
    else:
        reconstructions = data.load_reconstruction()

    # load tracks for vis.dat
    try:
        if undistorted:
            tracks_manager = udata.load_undistorted_tracks_manager()
        else:
            tracks_manager = data.load_tracks_manager()
        image_graph = tracking.as_weighted_graph(tracks_manager)
    except IOError:
        image_graph = None

    export_only = None
    if image_list:
        export_only = {}
        with open(image_list, "r") as f:
            for image in f:
                export_only[image.strip()] = True

    for h, reconstruction in enumerate(reconstructions):
        export(
            reconstruction,
            h,
            image_graph,
            # pyre-fixme[61]: `tracks_manager` may not be initialized here.
            tracks_manager,
            base_output_path,
            data,
            undistorted,
            udata,
            points,
            export_only,
        )
Пример #8
0
    def check_merge_partial_reconstructions(self):
        if self.reconstructed():
            data = DataSet(self.opensfm_project_path)
            reconstructions = data.load_reconstruction()
            tracks_manager = data.load_tracks_manager()

            if len(reconstructions) > 1:
                log.ODM_WARNING(
                    "Multiple reconstructions detected (%s), this might be an indicator that some areas did not have sufficient overlap"
                    % len(reconstructions))
                log.ODM_INFO("Attempting merge")

                merged = Reconstruction()
                merged.set_reference(reconstructions[0].reference)

                for ix_r, rec in enumerate(reconstructions):
                    if merged.reference != rec.reference:
                        # Should never happen
                        continue

                    log.ODM_INFO("Merging reconstruction %s" % ix_r)

                    for camera in rec.cameras.values():
                        merged.add_camera(camera)

                    for point in rec.points.values():
                        try:
                            new_point = merged.create_point(
                                point.id, point.coordinates)
                            new_point.color = point.color
                        except RuntimeError as e:
                            log.ODM_WARNING("Cannot merge shot id %s (%s)" %
                                            (shot.id, str(e)))
                            continue

                    for shot in rec.shots.values():
                        merged.add_shot(shot)
                        try:
                            obsdict = tracks_manager.get_shot_observations(
                                shot.id)
                        except RuntimeError:
                            log.ODM_WARNING(
                                "Shot id %s missing from tracks_manager!" %
                                shot.id)
                            continue
                        for track_id, obs in obsdict.items():
                            if track_id in merged.points:
                                merged.add_observation(shot.id, track_id, obs)

                data.save_reconstruction([merged])
Пример #9
0
def run_dataset(data: DataSet, no_cameras, no_points, depthmaps,
                point_num_views):
    """Export reconstruction to PLY format

    Args:
        no_cameras: do not save camera positions
        no_points: do not save points
        depthmaps: export per-image depthmaps as pointclouds
        point_num_views: Export the number of views associated with each point

    """

    reconstructions = data.load_reconstruction()
    tracks_manager = data.load_tracks_manager()
    no_cameras = no_cameras
    no_points = no_points
    point_num_views = point_num_views

    if reconstructions:
        data.save_ply(reconstructions[0], tracks_manager, None, no_cameras,
                      no_points, point_num_views)

    if depthmaps:
        udata = dataset.UndistortedDataSet(data)
        urec = udata.load_undistorted_reconstruction()[0]
        for shot in urec.shots.values():
            rgb = udata.load_undistorted_image(shot.id)
            for t in ("clean", "raw"):
                path_depth = udata.depthmap_file(shot.id, t + ".npz")
                if not os.path.exists(path_depth):
                    continue
                depth = np.load(path_depth)["depth"]
                rgb = scale_down_image(rgb, depth.shape[1], depth.shape[0])
                ply = depthmap_to_ply(shot, depth, rgb)
                with io.open_wt(udata.depthmap_file(shot.id,
                                                    t + ".ply")) as fout:
                    fout.write(ply)
Пример #10
0
def check_reconstruction(data: dataset.DataSet):
    reconstruction = data.load_reconstruction()
    assert len(reconstruction[0].shots) == 3
    assert len(reconstruction[0].points) > 1000