def add_correction_marker(chunk: ms.Chunk, initial_position: tuple[float, float, float], corrected_position: tuple[float, float, float], name: str) -> None: """ Add a marker from its assumed position to its actual position. param: chunk: The chunk to analyse. param: initial_position: The initial X/Y/Z position of the point. param: corrected_position: The corrected X/Y/Z position of the point. param: name: The name to label the marker. """ # Transform the initial position to local coordinates local_initial_position = chunk.transform.matrix.inv().mulp( chunk.crs.unproject(initial_position)) # Remove any marker with the same name (if the analysis is run on a preexisting chunk) for previous_marker in chunk.markers: if previous_marker.label == name: chunk.remove(previous_marker) # Add the marker with the local initial (assumed) position to estimate projections from marker = chunk.addMarker(local_initial_position) # Pin the projections (to consistently keep them where they are) for camera in marker.projections.keys(): marker.projections[camera].pinned = True marker.label = name marker.reference.location = corrected_position marker.reference.enabled = True
def generate_dem(chunk: ms.Chunk, redo: bool = False): """ Generate a DEM using PDAL. Generating a DEM in PDAL is better than in Metashape since the grid size can be specified and interpolation is off. param: chunk: The input chunk. param: redo: Whether to redo the analysis even if it is partially completed """ extent = processing_tools.calculate_dem_extent(chunk) dense_cloud_path = os.path.join(PROCESSING_FOLDER, chunk.label, "dense_cloud.ply") if not os.path.isfile(dense_cloud_path) or redo: with no_stdout(): chunk.exportPoints(dense_cloud_path, source_data=ms.DataSource.DenseCloudData, crs=chunk.crs, save_confidence=True) output_raster_path = os.path.join(os.path.dirname(dense_cloud_path), "dem.tif") dem_pipeline = ''' [ "DENSE_CLOUD_PATH", { "type": "filters.range", "limits": "confidence[2:]" }, { "resolution": GRID_SIZE, "bounds": "([MIN_X, MAX_X],[MIN_Y, MAX_Y])", "output_type": "mean", "filename": "OUTPUT_RASTER_PATH" } ] ''' # Run PDAL print("Running PDAL") processing_tools.run_pdal_pipeline(pipeline=dem_pipeline, parameters={ "DENSE_CLOUD_PATH": dense_cloud_path, "GRID_SIZE": str(CONSTANTS.dem_gridsize), "MIN_X": extent[0], "MAX_X": extent[1], "MIN_Y": extent[2], "MAX_Y": extent[3], "OUTPUT_RASTER_PATH": output_raster_path }) # Import the raster chunk.importRaster(path=output_raster_path, crs=chunk.crs)
def build_dense_cloud(chunk: ms.Chunk, point_confidence: bool = False): """ Build a dense cloud for the selected chunk. param: chunk: The chunk to be processed. param: point_confidence: Whether to calculate point confidences. """ with no_stdout(): chunk.buildDepthMaps(downscale=CONSTANTS.depth_map_downscaling, filter_mode=ms.FilterMode.AggressiveFiltering) chunk.buildDenseCloud(point_confidence=point_confidence)
def save_point_cloud(chunk: ms.Chunk) -> None: """ Save a sparse point cloud in the chunk's temp folder. param: chunk: What chunk to export the point cloud from. """ with no_stdout(): chunk.exportPoints(os.path.join(PROCESSING_FOLDER, chunk.label, "point_cloud.las"), source_data=ms.DataSource.PointCloudData, save_normals=False, save_colors=False, save_classes=False, crs=chunk.crs)
def export_dense_cloud(chunk: ms.Chunk, filename: str) -> None: """ Export a chunk's dense cloud. param: chunk: The chunk to be processed. param: name_template: The name to give the point cloud in its appropriate chunk processing directory. """ try: with no_stdout(): chunk.exportPoints(os.path.join(PROCESSING_FOLDER, chunk.label, filename), source_data=ms.DataSource.DenseCloudData, crs=chunk.crs) except Exception as exception: if "Null dense cloud" not in str(exception): raise exception
def import_camera_reference(chunk: ms.Chunk, filepath: str) -> None: """ Import camera reference data from a CSV. It is assumed the the entries are only for cameras and that they share the chunk crs. param: chunk: The chunk to import the reference to. param: filepath: The input filename. """ with no_stdout(): chunk.importReference(path=filepath, delimiter=",", columns="nxyz", create_markers=False, crs=chunk.crs, items=ms.ReferenceItemsCameras) chunk.updateTransform()
def build_orthomosaic(chunk: ms.Chunk) -> None: """Build an orthomosaic.""" with no_stdout(): chunk.buildOrthomosaic(surface_data=ms.DataSource.ElevationData, resolution=CONSTANTS.orthomosaic_resolution)