def init_chunk(doc: ms.Document, label: str):
    """
    Initialise a chunk.

    Sets the appropriate settings and aligns the images.

    param: doc: The Metashape document instance.
    param: label: What label to assign the chunk.
    """
    big_print("Importing survey: {}".format(label))

    # Create the chunk's temporary folder
    os.makedirs(os.path.join(PROCESSING_FOLDER, label), exist_ok=True)

    chunk = doc.addChunk()
    chunk.label = label

    # Add the chunk's images
    photo_dir = os.path.join("input/surveys", label, "images")
    photos = [
        os.path.join(photo_dir, photo) for photo in os.listdir(photo_dir)
    ]
    with no_stdout():
        chunk.addPhotos(photos)

    # Set the x/y/z location accuracy
    chunk.camera_location_accuracy = [2.5] * 3

    # Convert camera coordinates a projected coordinate system
    out_crs = ms.CoordinateSystem("EPSG::21781")
    for cam in chunk.cameras:
        if cam.reference.location is None:
            continue
        cam.reference.location = ms.CoordinateSystem.transform(
            cam.reference.location, chunk.crs, out_crs)
    chunk.crs = out_crs

    # Remove cameras at a low altitude (for the initial ascent and final descent)
    min_altitude = min(
        [camera.reference.location.z for camera in chunk.cameras])
    for camera in chunk.cameras:
        if camera.reference.location.z < (min_altitude +
                                          CONSTANTS.low_height_threshold):
            chunk.remove(camera)

    # Remove the rotation information as it seems to be erroneous
    for cam in chunk.cameras:
        cam.reference.rotation = None

    # Enable rolling shutter compensation on all cameras.
    for sensor in chunk.sensors:
        sensor.rolling_shutter = True

    # Align the cameras
    with no_stdout():
        chunk.matchPhotos()
        chunk.alignCameras()

    print(f"Finished aligning: {label}")
    log(f"Finished aligning: {label}")
def save(doc, filename=None):
    """Save the document."""
    if filename is None:
        with no_stdout():
            doc.save()
        print("Saved project")
    else:
        doc.save(filename)
def generate_dem(chunk: ms.Chunk, redo: bool = False):
    """
    Generate a DEM using PDAL.

    Generating a DEM in PDAL is better than in Metashape since the grid size can be specified and interpolation is off.

    param: chunk: The input chunk.
    param: redo: Whether to redo the analysis even if it is partially completed
    """
    extent = processing_tools.calculate_dem_extent(chunk)

    dense_cloud_path = os.path.join(PROCESSING_FOLDER, chunk.label,
                                    "dense_cloud.ply")

    if not os.path.isfile(dense_cloud_path) or redo:
        with no_stdout():
            chunk.exportPoints(dense_cloud_path,
                               source_data=ms.DataSource.DenseCloudData,
                               crs=chunk.crs,
                               save_confidence=True)

    output_raster_path = os.path.join(os.path.dirname(dense_cloud_path),
                                      "dem.tif")

    dem_pipeline = '''
    [
        "DENSE_CLOUD_PATH",
        {
            "type": "filters.range",
            "limits": "confidence[2:]"
        },
        {
            "resolution": GRID_SIZE,
            "bounds": "([MIN_X, MAX_X],[MIN_Y, MAX_Y])",
            "output_type": "mean",
            "filename": "OUTPUT_RASTER_PATH"
        }
    ]
    '''

    # Run PDAL
    print("Running PDAL")
    processing_tools.run_pdal_pipeline(pipeline=dem_pipeline,
                                       parameters={
                                           "DENSE_CLOUD_PATH":
                                           dense_cloud_path,
                                           "GRID_SIZE":
                                           str(CONSTANTS.dem_gridsize),
                                           "MIN_X": extent[0],
                                           "MAX_X": extent[1],
                                           "MIN_Y": extent[2],
                                           "MAX_Y": extent[3],
                                           "OUTPUT_RASTER_PATH":
                                           output_raster_path
                                       })

    # Import the raster
    chunk.importRaster(path=output_raster_path, crs=chunk.crs)
def build_dense_cloud(chunk: ms.Chunk, point_confidence: bool = False):
    """
    Build a dense cloud for the selected chunk.

    param: chunk: The chunk to be processed.
    param: point_confidence: Whether to calculate point confidences.
    """
    with no_stdout():
        chunk.buildDepthMaps(downscale=CONSTANTS.depth_map_downscaling,
                             filter_mode=ms.FilterMode.AggressiveFiltering)
        chunk.buildDenseCloud(point_confidence=point_confidence)
def save_point_cloud(chunk: ms.Chunk) -> None:
    """
    Save a sparse point cloud in the chunk's temp folder.

    param: chunk: What chunk to export the point cloud from.
    """
    with no_stdout():
        chunk.exportPoints(os.path.join(PROCESSING_FOLDER, chunk.label,
                                        "point_cloud.las"),
                           source_data=ms.DataSource.PointCloudData,
                           save_normals=False,
                           save_colors=False,
                           save_classes=False,
                           crs=chunk.crs)
def export_dense_cloud(chunk: ms.Chunk, filename: str) -> None:
    """
    Export a chunk's dense cloud.

    param: chunk: The chunk to be processed.
    param: name_template: The name to give the point cloud in its appropriate chunk processing directory.

    """
    try:
        with no_stdout():
            chunk.exportPoints(os.path.join(PROCESSING_FOLDER, chunk.label,
                                            filename),
                               source_data=ms.DataSource.DenseCloudData,
                               crs=chunk.crs)
    except Exception as exception:
        if "Null dense cloud" not in str(exception):
            raise exception
def import_camera_reference(chunk: ms.Chunk, filepath: str) -> None:
    """
    Import camera reference data from a CSV.

    It is assumed the the entries are only for cameras and that they share the chunk crs.

    param: chunk: The chunk to import the reference to.
    param: filepath: The input filename.
    """
    with no_stdout():
        chunk.importReference(path=filepath,
                              delimiter=",",
                              columns="nxyz",
                              create_markers=False,
                              crs=chunk.crs,
                              items=ms.ReferenceItemsCameras)
        chunk.updateTransform()
def build_orthomosaic(chunk: ms.Chunk) -> None:
    """Build an orthomosaic."""
    with no_stdout():
        chunk.buildOrthomosaic(surface_data=ms.DataSource.ElevationData,
                               resolution=CONSTANTS.orthomosaic_resolution)
Пример #9
0
def main(redo=False):
    """
    Run the entire processing pipeline.

    1. Import the images in separate chunks and set appropriate settings
    2. Align the images
    3. Align the chunks to a reference chunk using ICP
    4. Generate dense clouds, DEMs and orthomosaics
    """
    if redo:
        big_print(
            "Redo flag set to True. Redoing steps that already seem to exist")
    else:
        big_print(
            "Redo flag set to False. Not redoing steps that already seem to exist"
        )
    # Remove all temporary results if the analysis should be redone.
    if redo:
        shutil.rmtree(PROCESSING_FOLDER)
        os.mkdir(PROCESSING_FOLDER)

    # Instantiate a Metashape document
    doc = ms.Document()
    # Set a fitting name (with its full path)
    document_name = os.path.join(os.getcwd(), PROCESSING_FOLDER,
                                 "Illgraben.psx")
    # Check that an input folder is present
    if not os.path.isdir("input"):
        raise ValueError("No input/ folder in the working directory!")

    # Load an already existing document if it shouldn't be redone
    if os.path.isfile(document_name) and not redo:
        doc.open(document_name)
        log("Existing document loaded")
    # Otherwise, make a new one
    else:
        metashape_tools.save(doc, document_name)
        log("New document created")

    # Check that the document is not in readonly mode
    assert not doc.read_only, "Document is in read-only mode."

    # Load the reference chunk label
    reference_chunk_label = open("input/reference.txt").read().strip()

    # Load/create the reference chunk and a list of chunks to be aligned
    reference_chunk, chunks_to_be_aligned = metashape_tools.initialise_chunks(
        doc, reference_chunk_label)

    # Check that a reference chunk exists
    assert reference_chunk is not None, "Reference chunk {} could not be found".format(
        reference_chunk_label)

    metashape_tools.save(doc)

    # Generate low-resolution dense point clouds for fine-grained ICP
    # TODO: Dense clouds are right now created in the same resolution
    for chunk in doc.chunks:
        # Check if the dense cloud step should be skipped (if a dense cloud with the proper resolution exists)
        if chunk.dense_cloud is not None and\
                chunk.dense_cloud.meta["BuildDepthMaps/downscale"] == str(CONSTANTS.depth_map_downscaling):
            continue
        big_print("Generating small dense cloud for {}".format(chunk.label))
        # TODO: Change to build smaller dense clouds?
        try:
            metashape_tools.build_dense_cloud(chunk, point_confidence=True)
        except Exception as exception:
            if "Zero resolution" in str(exception):
                continue
            if "Assertion 23910910127 failed" in str(exception):
                continue

            raise exception
        metashape_tools.export_dense_cloud(chunk, "dense_cloud_for_ICP.ply")
        metashape_tools.save(doc)

    big_print("Running fine grained ICP on stable ground features")
    for chunk in chunks_to_be_aligned:
        # Check if an automatic ICP exists.
        if "auto_ICP_000" in (marker.label for marker in chunk.markers):
            print("auto_ICPs already seem to exist. Skipping {}".format(
                chunk.label))
            continue
        # Create automatic ICP tie points
        metashape_tools.align_stable_ground_locations(reference_chunk, chunk)
        with no_stdout():
            chunk.optimizeCameras()
        metashape_tools.save(doc)

    # Generate dense point clouds
    if not redo:
        big_print("Checking for dense clouds")
    for chunk in doc.chunks:
        # Check if dense cloud exists, and whether the dense cloud has the right resolution
        # If any of those criteria are false, it rebuilds the dense cloud
        if chunk.dense_cloud is not None and\
                chunk.dense_cloud.meta["BuildDepthMaps/downscale"] == str(CONSTANTS.depth_map_downscaling):
            continue
        big_print("Generating dense cloud for {}".format(chunk.label))

        try:
            metashape_tools.build_dense_cloud(chunk, point_confidence=True)
        except Exception as exception:
            if "Zero resolution" not in str(exception):
                raise exception
        metashape_tools.save(doc)

    # Generate DEMs
    if not redo:
        big_print("Checking for DEMs")
    for chunk in doc.chunks:
        if chunk.elevation is not None:
            continue
        big_print("Generating DEM for {}".format(chunk.label))
        metashape_tools.generate_dem(chunk, redo=redo)
        metashape_tools.save(doc)

    # Generate orthomosaics
    if not redo:
        big_print("Checking for orthomosaics")
    for chunk in doc.chunks:
        if chunk.orthomosaic is not None:
            continue
        big_print("Generating orthomosaic for {}".format(chunk.label))
        metashape_tools.build_orthomosaic(chunk)
        metashape_tools.save(doc)
    notify("Finished processing")