Esempio n. 1
0
def mve_dense_recon(undist_image_max_size, mve_file_path, max_concurrency):

    depthmap_resolution = 640
    max_pixels = depthmap_resolution * depthmap_resolution

    if undist_image_max_size * undist_image_max_size <= max_pixels:
        mve_output_scale = 0
    else:
        ratio = float(
            undist_image_max_size * undist_image_max_size) / float(max_pixels)
        mve_output_scale = int(math.ceil(math.log(ratio) / math.log(4.0)))

    dmrecon_config = [
        "-s%s" % mve_output_scale,
        "--progress=fancy",
        "--local-neighbors=2",
        # "--filter-width=3",
    ]

    retry_count = 1
    while retry_count < 10:
        try:
            system.run('%s %s "%s"' %
                       (context.dmrecon_path, ' '.join(dmrecon_config),
                        mve_file_path),
                       env_vars={'OMP_NUM_THREADS': max_concurrency})
            break
        except Exception as e:
            if str(e) == "Child returned 134" or str(e) == "Child returned 1":
                retry_count += 1
                log.ODM_WARNING("Caught error code, retrying attempt #%s" %
                                retry_count)
            else:
                raise e
Esempio n. 2
0
def get_projection(image_type):
    '''
    Generate EXIF data, utm zone and hemisphere.
    :param image_type:
    :return: utm_zone, hemisphere
    '''
    Image = namedtuple('Image', ['image', 'point', 'altitude'])

    kwargs = {'image_type': image_type}

    system.run('exiftool -filename -gpslongitude -gpslatitude -gpsaltitude '
               '-T -n *.{image_type} > imageEXIF.txt'.format(**kwargs))

    with open('imageEXIF.txt', 'r') as f:
        lines = (l.split('\t') for l in f.readlines())
        coords = [
            Image(image=l[0].strip(),
                  point=Point(float(l[1]), float(l[2])),
                  altitude=l[3].strip()) for l in lines
        ]

    p = Point(coords[0][1])
    u = utm.from_latlon(p.y, p.x)
    utm_zone = u[2]
    hemisphere = "north" if p.y > 0 else "south"

    log.MM_INFO('UTM - %s' % utm_zone)
    log.MM_INFO('Hemisphere - %s' % hemisphere)

    return {'utm_zone': utm_zone, 'hemisphere': hemisphere}
Esempio n. 3
0
def build_entwine(input_point_cloud_files,
                  tmpdir,
                  output_path,
                  max_concurrency=8):
    kwargs = {
        'threads': max_concurrency,
        'tmpdir': tmpdir,
        'all_inputs': "-i " + " ".join(map(quote, input_point_cloud_files)),
        'outputdir': output_path
    }

    # Run scan to compute dataset bounds
    system.run(
        'entwine scan --threads {threads} --tmp "{tmpdir}" {all_inputs} -o "{outputdir}"'
        .format(**kwargs))
    scan_json = os.path.join(output_path, "scan.json")

    if os.path.exists(scan_json):
        kwargs['input'] = scan_json
        for _ in range(len(input_point_cloud_files)):
            # One at a time
            system.run(
                'entwine build --threads {threads} --tmp "{tmpdir}" -i "{input}" -o "{outputdir}" --run 1'
                .format(**kwargs))
    else:
        log.ODM_WARNING("%s does not exist, no point cloud will be built." %
                        scan_json)
Esempio n. 4
0
def split(input_point_cloud, outdir, filename_template, capacity, dims=None):
    log.ODM_INFO(
        "Splitting point cloud filtering in chunks of {} vertices".format(
            capacity))

    if not os.path.exists(input_point_cloud):
        log.ODM_ERROR(
            "{} does not exist, cannot split point cloud. The program will now exit."
            .format(input_point_cloud))
        sys.exit(1)

    if not os.path.exists(outdir):
        system.mkdir_p(outdir)

    if len(os.listdir(outdir)) != 0:
        log.ODM_ERROR(
            "%s already contains some files. The program will now exit.".
            format(outdir))
        sys.exit(1)

    cmd = 'pdal split -i "%s" -o "%s" --capacity %s ' % (
        input_point_cloud, os.path.join(outdir, filename_template), capacity)

    if filename_template.endswith(".ply"):
        cmd += ("--writers.ply.sized_types=false "
                "--writers.ply.storage_mode='little endian' ")
    if dims is not None:
        cmd += '--writers.ply.dims="%s"' % dims
    system.run(cmd)

    return [os.path.join(outdir, f) for f in os.listdir(outdir)]
Esempio n. 5
0
def gdal_translate(proj_str, src, dst):
    '''
    Execute gdal_translate
    :param proj_str: projection string
    :param src: input tif
    :param dst: output tif
    :return:
    '''
    kwargs = {
        'tiled': '-co TILED=yes',
        'compress': 'LZW',
        'predictor': '-co PREDICTOR=2',
        'proj': proj_str,
        'bigtiff': 'YES',
        'src': src,
        'dst': dst,
        'max_memory': 2048,
        'threads': args.max_concurrency
    }

    system.run('gdal_translate '
               '{tiled} '
               '-co BIGTIFF={bigtiff} '
               '-co COMPRESS={compress} '
               '{predictor} '
               '-co BLOCKXSIZE=512 '
               '-co BLOCKYSIZE=512 '
               '-co NUM_THREADS={threads} '
               '-a_srs \"{proj}\" '
               '--config GDAL_CACHEMAX {max_memory} '
               '{src} {dst}'.format(**kwargs))
Esempio n. 6
0
def build(input_point_cloud_files,
          output_path,
          max_concurrency=8,
          rerun=False):
    if len(input_point_cloud_files) == 0:
        log.ODM_WARNING("No input point cloud files to process")
        return

    tmpdir = io.related_file_path(output_path, postfix="-tmp")

    if rerun and io.dir_exists(output_path):
        log.ODM_WARNING("Removing previous EPT directory: %s" % output_path)
        shutil.rmtree(output_path)

    kwargs = {
        'threads': max_concurrency,
        'tmpdir': tmpdir,
        'input': "-i " + " ".join(map(quote, input_point_cloud_files)),
        'outputdir': output_path
    }

    system.run(
        "entwine build --threads {threads} --tmp {tmpdir} {input} -o {outputdir}"
        .format(**kwargs))

    if os.path.exists(tmpdir):
        shutil.rmtree(tmpdir)
Esempio n. 7
0
    def create_bounds_shapefile(self, pointcloud_path, buffer_distance = 0, decimation_step=40, outlier_radius=20):
        """
        Compute a buffered polygon around the data extents (not just a bounding box)
        of the given point cloud.
        
        @return filename to Shapefile containing the polygon
        """
        if not os.path.exists(pointcloud_path):
            log.ODM_WARNING('Point cloud does not exist, cannot generate shapefile bounds {}'.format(pointcloud_path))
            return ''

        bounds_geojson_path = self.create_bounds_geojson(pointcloud_path, buffer_distance, decimation_step, outlier_radius)

        summary_file_path = os.path.join(self.storage_dir, '{}.summary.json'.format(self.files_prefix))
        run('pdal info --summary {0} > {1}'.format(pointcloud_path, summary_file_path))
        
        pc_proj4 = None
        with open(summary_file_path, 'r') as f:
            json_f = json.loads(f.read())
            pc_proj4 = json_f['summary']['srs']['proj4']

        if pc_proj4 is None: raise RuntimeError("Could not determine point cloud proj4 declaration")

        bounds_shapefile_path = os.path.join(self.storage_dir, '{}.bounds.shp'.format(self.files_prefix))

        # Convert bounds to Shapefile
        kwargs = {
            'input': bounds_geojson_path,
            'output': bounds_shapefile_path,
            'proj4': pc_proj4
        }

        run('ogr2ogr -overwrite -a_srs "{proj4}" {output} {input}'.format(**kwargs))

        return bounds_shapefile_path
Esempio n. 8
0
def export_to_bounds_files(boundary, proj4, bounds_json_file, bounds_gpkg_file):
    with open(bounds_json_file, "w") as f:
        f.write(json.dumps({
            "type": "FeatureCollection",
            "name": "bounds",
            "features": [{
                "type": "Feature",
                "properties": {},
                "geometry": {
                    "type": "Polygon",
                    "coordinates": [boundary]
                }
            }]
        }))
    
    if os.path.isfile(bounds_gpkg_file):
        os.remove(bounds_gpkg_file)
    
    kwargs = {
        'proj4': proj4,
        'input': double_quote(bounds_json_file),
        'output': double_quote(bounds_gpkg_file)
    }

    system.run('ogr2ogr -overwrite -f GPKG -a_srs "{proj4}" {output} {input}'.format(**kwargs))
Esempio n. 9
0
def post_point_cloud_steps(args, tree):
    # XYZ point cloud output
    if args.pc_csv:
        log.ODM_INFO("Creating CSV file (XYZ format)")
        
        system.run("pdal translate -i \"{}\" "
            "-o \"{}\" "
            "--writers.text.format=csv "
            "--writers.text.order=\"X,Y,Z\" "
            "--writers.text.keep_unspecified=false ".format(
                tree.odm_georeferencing_model_laz,
                tree.odm_georeferencing_xyz_file))

    # LAS point cloud output
    if args.pc_las:
        log.ODM_INFO("Creating LAS file")
        
        system.run("pdal translate -i \"{}\" "
            "-o \"{}\" ".format(
                tree.odm_georeferencing_model_laz,
                tree.odm_georeferencing_model_las))

    # EPT point cloud output
    if args.pc_ept:
        log.ODM_INFO("Creating Entwine Point Tile output")
        entwine.build([tree.odm_georeferencing_model_laz], tree.entwine_pointcloud, max_concurrency=args.max_concurrency, rerun=False)
Esempio n. 10
0
def generate_png(orthophoto_file, output_file=None, outsize=None):
    if output_file is None:
        base, ext = os.path.splitext(orthophoto_file)
        output_file = base + '.png'

    # See if we need to select top three bands
    bandparam = ""

    gtif = gdal.Open(orthophoto_file)
    if gtif.RasterCount > 4:
        bands = []
        for idx in range(1, gtif.RasterCount + 1):
            bands.append(gtif.GetRasterBand(idx).GetColorInterpretation())
        bands = dict(zip(bands, range(1, len(bands) + 1)))

        try:
            red = bands.get(gdal.GCI_RedBand)
            green = bands.get(gdal.GCI_GreenBand)
            blue = bands.get(gdal.GCI_BlueBand)
            if red is None or green is None or blue is None:
                raise Exception("Cannot find bands")

            bandparam = "-b %s -b %s -b %s -a_nodata 0" % (red, green, blue)
        except:
            bandparam = "-b 1 -b 2 -b 3 -a_nodata 0"
    gtif = None

    osparam = ""
    if outsize is not None:
        osparam = "-outsize %s 0" % outsize

    system.run(
        'gdal_translate -of png "%s" "%s" %s %s '
        '--config GDAL_CACHEMAX %s%% ' %
        (orthophoto_file, output_file, osparam, bandparam, get_max_memory()))
Esempio n. 11
0
def build_copc(input_point_cloud_files, output_file):
    if len(input_point_cloud_files) == 0:
        logger.ODM_WARNING("Cannot build COPC, no input files")
        return

    base_path, ext = os.path.splitext(output_file)
    tmpdir = io.related_file_path(base_path, postfix="-tmp")
    if os.path.exists(tmpdir):
        log.ODM_WARNING("Removing previous directory %s" % tmpdir)
        shutil.rmtree(tmpdir)

    kwargs = {
        'tmpdir': tmpdir,
        'files':
        "--files " + " ".join(map(double_quote, input_point_cloud_files)),
        'output': output_file
    }

    # Run untwine
    system.run(
        'untwine --temp_dir "{tmpdir}" {files} -o "{output}" --single_file'.
        format(**kwargs))

    if os.path.exists(tmpdir):
        shutil.rmtree(tmpdir)
Esempio n. 12
0
 def export_bundler(self, destination_bundle_file, rerun=False):
     if not io.file_exists(destination_bundle_file) or rerun:
             # convert back to bundler's format
             system.run('%s/bin/export_bundler %s' %
                     (context.opensfm_path, self.opensfm_project_path))
     else:
         log.ODM_WARNING('Found a valid Bundler file in: %s' % destination_bundle_file)
Esempio n. 13
0
    def create_bounds_gpkg(self, pointcloud_path, buffer_distance = 0, decimation_step=40):
        """
        Compute a buffered polygon around the data extents (not just a bounding box)
        of the given point cloud.
        
        @return filename to Geopackage containing the polygon
        """
        if not os.path.exists(pointcloud_path):
            log.ODM_WARNING('Point cloud does not exist, cannot generate GPKG bounds {}'.format(pointcloud_path))
            return ''

        bounds_geojson_path = self.create_bounds_geojson(pointcloud_path, buffer_distance, decimation_step)

        summary_file_path = os.path.join(self.storage_dir, '{}.summary.json'.format(self.files_prefix))
        export_summary_json(pointcloud_path, summary_file_path)
        
        pc_proj4 = None
        with open(summary_file_path, 'r') as f:
            json_f = json.loads(f.read())
            pc_proj4 = json_f['summary']['srs']['proj4']

        if pc_proj4 is None: raise RuntimeError("Could not determine point cloud proj4 declaration")

        bounds_gpkg_path = os.path.join(self.storage_dir, '{}.bounds.gpkg'.format(self.files_prefix))

        # Convert bounds to GPKG
        kwargs = {
            'input': bounds_geojson_path,
            'output': bounds_gpkg_path,
            'proj4': pc_proj4
        }

        run('ogr2ogr -overwrite -f GPKG -a_srs "{proj4}" {output} {input}'.format(**kwargs))

        return bounds_gpkg_path
Esempio n. 14
0
def build(input_point_cloud_files,
          output_path,
          max_concurrency=8,
          rerun=False):
    num_files = len(input_point_cloud_files)
    if num_files == 0:
        log.ODM_WARNING("No input point cloud files to process")
        return

    tmpdir = io.related_file_path(output_path, postfix="-tmp")

    if rerun and io.dir_exists(output_path):
        log.ODM_WARNING("Removing previous EPT directory: %s" % output_path)
        shutil.rmtree(output_path)

    kwargs = {
        # 'threads': max_concurrency,
        'tmpdir': tmpdir,
        'files': "--files " + " ".join(map(quote, input_point_cloud_files)),
        'outputdir': output_path
    }

    # Run untwine
    system.run(
        'untwine --temp_dir "{tmpdir}" {files} --output_dir "{outputdir}"'.
        format(**kwargs))

    # Cleanup
    if os.path.exists(tmpdir):
        shutil.rmtree(tmpdir)
Esempio n. 15
0
File: mesh.py Progetto: Dhyani01/ODM
def screened_poisson_reconstruction(inPointCloud,
                                    outMesh,
                                    depth=8,
                                    samples=1,
                                    maxVertexCount=100000,
                                    pointWeight=4,
                                    threads=context.num_cores,
                                    verbose=False):

    mesh_path, mesh_filename = os.path.split(outMesh)
    # mesh_path = path/to
    # mesh_filename = odm_mesh.ply

    basename, ext = os.path.splitext(mesh_filename)
    # basename = odm_mesh
    # ext = .ply

    outMeshDirty = os.path.join(mesh_path, "{}.dirty{}".format(basename, ext))

    poissonReconArgs = {
        'bin': context.poisson_recon_path,
        'outfile': outMeshDirty,
        'infile': inPointCloud,
        'depth': depth,
        'samples': samples,
        'pointWeight': pointWeight,
        'threads': threads,
        'verbose': '--verbose' if verbose else ''
    }

    # Run PoissonRecon
    system.run('{bin} --in {infile} '
               '--out {outfile} '
               '--depth {depth} '
               '--pointWeight {pointWeight} '
               '--samplesPerNode {samples} '
               '--threads {threads} '
               '--linearFit '
               '{verbose}'.format(**poissonReconArgs))

    # Cleanup and reduce vertex count if necessary
    cleanupArgs = {
        'reconstructmesh': context.omvs_reconstructmesh_path,
        'outfile': outMesh,
        'infile': outMeshDirty,
        'max_faces': maxVertexCount * 2,
        'verbose': '-verbose' if verbose else ''
    }

    system.run('{reconstructmesh} --mesh-file "{infile}" '
               '-o "{outfile}" '
               '--remove-spikes 0 --remove-spurious 0 --smooth 0 '
               '--target-face-num {max_faces} '
               '{verbose} '.format(**cleanupArgs))

    # Delete intermediate results
    os.remove(outMeshDirty)

    return outMesh
Esempio n. 16
0
def compute_cutline(orthophoto_file, crop_area_file, destination, max_concurrency=1, tmpdir=None, scale=1):
    if io.file_exists(orthophoto_file) and io.file_exists(crop_area_file):
        from opendm.grass_engine import grass
        log.ODM_DEBUG("Computing cutline")

        if tmpdir and not io.dir_exists(tmpdir):
            system.mkdir_p(tmpdir)

        scale = max(0.0001, min(1, scale))
        scaled_orthophoto = None

        if scale < 1:
            log.ODM_DEBUG("Scaling orthophoto to %s%% to compute cutline" % (scale * 100))

            scaled_orthophoto = os.path.join(tmpdir, os.path.basename(io.related_file_path(orthophoto_file, postfix=".scaled")))
            # Scale orthophoto before computing cutline
            system.run("gdal_translate -outsize {}% 0 "
                "-co NUM_THREADS={} "
                "--config GDAL_CACHEMAX {}% "
                "{} {}".format(
                scale * 100,
                max_concurrency,
                concurrency.get_max_memory(),
                orthophoto_file,
                scaled_orthophoto
            ))
            orthophoto_file = scaled_orthophoto

        try:
            ortho_width,ortho_height = get_image_size.get_image_size(orthophoto_file, fallback_on_error=False)
            log.ODM_DEBUG("Orthophoto dimensions are %sx%s" % (ortho_width, ortho_height))
            number_lines = int(max(8, math.ceil(min(ortho_width, ortho_height) / 256.0)))
        except:
            log.ODM_DEBUG("Cannot compute orthophoto dimensions, setting arbitrary number of lines.")
            number_lines = 32
        
        log.ODM_DEBUG("Number of lines: %s" % number_lines)

        gctx = grass.create_context({'auto_cleanup' : False, 'tmpdir': tmpdir})
        gctx.add_param('orthophoto_file', orthophoto_file)
        gctx.add_param('crop_area_file', crop_area_file)
        gctx.add_param('number_lines', number_lines)
        gctx.add_param('max_concurrency', max_concurrency)
        gctx.add_param('memory', int(concurrency.get_max_memory_mb(300)))
        gctx.set_location(orthophoto_file)

        cutline_file = gctx.execute(os.path.join("opendm", "grass", "compute_cutline.grass"))
        if cutline_file != 'error':
            if io.file_exists(cutline_file):
                shutil.move(cutline_file, destination)
                log.ODM_INFO("Generated cutline file: %s --> %s" % (cutline_file, destination))
                gctx.cleanup()
                return destination
            else:
                log.ODM_WARNING("Unexpected script result: %s. No cutline file has been generated." % cutline_file)
        else:
            log.ODM_WARNING("Could not generate orthophoto cutline. An error occured when running GRASS. No orthophoto will be generated.")
    else:
        log.ODM_WARNING("We've been asked to compute cutline, but either %s or %s is missing. Skipping..." % (orthophoto_file, crop_area_file))
Esempio n. 17
0
def generate_png(orthophoto_file):
    log.ODM_INFO("Generating PNG")
    base, ext = os.path.splitext(orthophoto_file)
    orthophoto_png = base + '.png'

    system.run('gdal_translate -of png "%s" "%s" '
               '--config GDAL_CACHEMAX %s%% ' %
               (orthophoto_file, orthophoto_png, get_max_memory()))
Esempio n. 18
0
    def crop(gpkg_path, geotiff_path, gdal_options, keep_original=True):
        if not os.path.exists(gpkg_path) or not os.path.exists(geotiff_path):
            log.ODM_WARNING(
                "Either {} or {} does not exist, will skip cropping.".format(
                    gpkg_path, geotiff_path))
            return geotiff_path

        log.ODM_INFO("Cropping %s" % geotiff_path)

        # Rename original file
        # path/to/odm_orthophoto.tif --> path/to/odm_orthophoto.original.tif

        path, filename = os.path.split(geotiff_path)
        # path = path/to
        # filename = odm_orthophoto.tif

        basename, ext = os.path.splitext(filename)
        # basename = odm_orthophoto
        # ext = .tif

        original_geotiff = os.path.join(path,
                                        "{}.original{}".format(basename, ext))
        os.rename(geotiff_path, original_geotiff)

        try:
            kwargs = {
                'gpkg_path':
                gpkg_path,
                'geotiffInput':
                original_geotiff,
                'geotiffOutput':
                geotiff_path,
                'options':
                ' '.join(
                    map(lambda k: '-co {}={}'.format(k, gdal_options[k]),
                        gdal_options)),
                'max_memory':
                get_max_memory()
            }

            run('gdalwarp -cutline {gpkg_path} '
                '-crop_to_cutline '
                '{options} '
                '{geotiffInput} '
                '{geotiffOutput} '
                '--config GDAL_CACHEMAX {max_memory}%'.format(**kwargs))

            if not keep_original:
                os.remove(original_geotiff)

        except Exception as e:
            log.ODM_WARNING('Something went wrong while cropping: {}'.format(
                e.message))

            # Revert rename
            os.rename(original_geotiff, geotiff_path)

        return geotiff_path
Esempio n. 19
0
def dem_to_mesh_gridded(inGeotiff, outMesh, maxVertexCount, verbose=False, maxConcurrency=1):
    log.ODM_INFO('Creating mesh from DSM: %s' % inGeotiff)

    mesh_path, mesh_filename = os.path.split(outMesh)
    # mesh_path = path/to
    # mesh_filename = odm_mesh.ply

    basename, ext = os.path.splitext(mesh_filename)
    # basename = odm_mesh
    # ext = .ply

    outMeshDirty = os.path.join(mesh_path, "{}.dirty{}".format(basename, ext))

    # This should work without issues most of the times, 
    # but just in case we lower maxConcurrency if it fails.
    while True:
        try:
            kwargs = {
                'bin': context.dem2mesh_path,
                'outfile': outMeshDirty,
                'infile': inGeotiff,
                'maxVertexCount': maxVertexCount,
                'maxConcurrency': maxConcurrency,
                'verbose': '-verbose' if verbose else ''
            }
            system.run('"{bin}" -inputFile "{infile}" '
                '-outputFile "{outfile}" '
                '-maxTileLength 2000 '
                '-maxVertexCount {maxVertexCount} '
                '-maxConcurrency {maxConcurrency} '
                ' {verbose} '.format(**kwargs))
            break
        except Exception as e:
            maxConcurrency = math.floor(maxConcurrency / 2)
            if maxConcurrency >= 1:
                log.ODM_WARNING("dem2mesh failed, retrying with lower concurrency (%s) in case this is a memory issue" % maxConcurrency)
            else:
                raise e


    # Cleanup and reduce vertex count if necessary 
    # (as dem2mesh cannot guarantee that we'll have the target vertex count)
    cleanupArgs = {
        'reconstructmesh': context.omvs_reconstructmesh_path,
        'outfile': outMesh,
        'infile': outMeshDirty,
        'max_faces': maxVertexCount * 2
    }

    system.run('"{reconstructmesh}" -i "{infile}" '
         '-o "{outfile}" '
         '--remove-spikes 0 --remove-spurious 0 --smooth 0 '
         '--target-face-num {max_faces} '.format(**cleanupArgs))

    # Delete intermediate results
    os.remove(outMeshDirty)

    return outMesh
Esempio n. 20
0
def convert_to_cogeo(src_path, blocksize=256, max_workers=1):
    """
    Guarantee that the .tif passed as an argument is a Cloud Optimized GeoTIFF (cogeo)
    The file is destructively converted into a cogeo.
    If the file cannot be converted, the function does not change the file
    :param src_path: path to GeoTIFF
    :return: True on success
    """

    if not os.path.isfile(src_path):
        logger.warning("Cannot convert to cogeo: %s (file does not exist)" %
                       src_path)
        return False

    log.ODM_INFO("Optimizing %s as Cloud Optimized GeoTIFF" % src_path)

    tmpfile = io.related_file_path(src_path, postfix='_cogeo')
    swapfile = io.related_file_path(src_path, postfix='_cogeo_swap')

    kwargs = {
        'threads': max_workers if max_workers else 'ALL_CPUS',
        'blocksize': blocksize,
        'max_memory': get_max_memory(),
        'src_path': src_path,
        'tmpfile': tmpfile,
    }

    try:
        system.run("gdal_translate "
                   "-of COG "
                   "-co NUM_THREADS={threads} "
                   "-co BLOCKSIZE={blocksize} "
                   "-co COMPRESS=deflate "
                   "-co BIGTIFF=IF_SAFER "
                   "-co RESAMPLING=NEAREST "
                   "--config GDAL_CACHEMAX {max_memory}% "
                   "--config GDAL_NUM_THREADS {threads} "
                   "\"{src_path}\" \"{tmpfile}\" ".format(**kwargs))
    except Exception as e:
        log.ODM_WARNING("Cannot create Cloud Optimized GeoTIFF: %s" % str(e))

    if os.path.isfile(tmpfile):
        shutil.move(src_path, swapfile)  # Move to swap location

        try:
            shutil.move(tmpfile, src_path)
        except IOError as e:
            log.ODM_WARNING("Cannot move %s to %s: %s" %
                            (tmpfile, src_path, str(e)))
            shutil.move(swapfile, src_path)  # Attempt to restore

        if os.path.isfile(swapfile):
            os.remove(swapfile)

        return True
    else:
        return False
Esempio n. 21
0
def build_overviews(orthophoto_file):
    log.ODM_INFO("Building Overviews")
    kwargs = {'orthophoto': orthophoto_file}

    # Run gdaladdo
    system.run('gdaladdo -ro -r average '
               '--config BIGTIFF_OVERVIEW IF_SAFER '
               '--config COMPRESS_OVERVIEW JPEG '
               '{orthophoto} 2 4 8 16'.format(**kwargs))
Esempio n. 22
0
    def process(self, inputs, outputs):

        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running ODM Meshing Cell')

        # get inputs
        args = self.inputs.args
        tree = self.inputs.tree
        verbose = '-verbose' if self.params.verbose else ''

        # define paths and create working directories
        system.mkdir_p(tree.odm_meshing)

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'odm_meshing') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'odm_meshing' in args.rerun_from)

        if not io.file_exists(tree.odm_mesh) or rerun_cell:
            log.ODM_DEBUG('Writing ODM Mesh file in: %s' % tree.odm_mesh)

            kwargs = {
                'bin': context.odm_modules_path,
                'outfile': tree.odm_mesh,
                'log': tree.odm_meshing_log,
                'max_vertex': self.params.max_vertex,
                'oct_tree': self.params.oct_tree,
                'samples': self.params.samples,
                'solver': self.params.solver,
                'verbose': verbose
            }
            if not args.use_pmvs:
                kwargs['infile'] = tree.opensfm_model
            else:
                kwargs['infile'] = tree.pmvs_model

            # run meshing binary
            system.run(
                '{bin}/odm_meshing -inputFile {infile} '
                '-outputFile {outfile} -logFile {log} '
                '-maxVertexCount {max_vertex} -octreeDepth {oct_tree} {verbose} '
                '-samplesPerNode {samples} -solverDivide {solver}'.format(
                    **kwargs))
        else:
            log.ODM_WARNING('Found a valid ODM Mesh file in: %s' %
                            tree.odm_mesh)

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'Meshing')

        log.ODM_INFO('Running ODM Meshing Cell - Finished')
        return ecto.OK if args.end_with != 'odm_meshing' else ecto.QUIT
Esempio n. 23
0
def filter(pointCloudPath, standard_deviation=2.5, meank=16, verbose=False):
    """
    Filters a point cloud in place (it will replace the input file with the filtered result).
    """
    if standard_deviation <= 0 or meank <= 0:
        log.ODM_INFO("Skipping point cloud filtering")
        return

    log.ODM_INFO(
        "Filtering point cloud (statistical, meanK {}, standard deviation {})".
        format(meank, standard_deviation))

    if not os.path.exists(pointCloudPath):
        log.ODM_ERROR(
            "{} does not exist, cannot filter point cloud. The program will now exit."
            .format(pointCloudPath))
        sys.exit(1)

    filter_program = os.path.join(context.odm_modules_path, 'odm_filterpoints')
    if not os.path.exists(filter_program):
        log.ODM_WARNING(
            "{} program not found. Will skip filtering, but this installation should be fixed."
        )
        return

    pc_path, pc_filename = os.path.split(pointCloudPath)
    # pc_path = path/to
    # pc_filename = pointcloud.ply

    basename, ext = os.path.splitext(pc_filename)
    # basename = pointcloud
    # ext = .ply

    tmpPointCloud = os.path.join(pc_path, "{}.tmp{}".format(basename, ext))

    filterArgs = {
        'bin': filter_program,
        'inputFile': pointCloudPath,
        'outputFile': tmpPointCloud,
        'sd': standard_deviation,
        'meank': meank,
        'verbose': '--verbose' if verbose else '',
    }

    system.run('{bin} -inputFile {inputFile} '
               '-outputFile {outputFile} '
               '-sd {sd} '
               '-meank {meank} {verbose} '.format(**filterArgs))

    # Remove input file, swap temp file
    if os.path.exists(tmpPointCloud):
        os.remove(pointCloudPath)
        os.rename(tmpPointCloud, pointCloudPath)
    else:
        log.ODM_WARNING(
            "{} not found, filtering has failed.".format(tmpPointCloud))
Esempio n. 24
0
    def process(self, inputs, outputs):

        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running ODM Texturing Cell')

        # get inputs
        args = self.inputs.args
        tree = self.inputs.tree

        # define paths and create working directories
        system.mkdir_p(tree.odm_texturing)

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'odm_texturing') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'odm_texturing' in args.rerun_from)

        if not io.file_exists(tree.odm_textured_model_obj) or rerun_cell:
            log.ODM_DEBUG('Writing ODM Textured file in: %s' %
                          tree.odm_textured_model_obj)

            # odm_texturing definitions
            kwargs = {
                'bin': context.odm_modules_path,
                'out_dir': tree.odm_texturing,
                'bundle': tree.opensfm_bundle,
                'imgs_path': tree.dataset_resize,
                'imgs_list': tree.opensfm_bundle_list,
                'model': tree.odm_mesh,
                'log': tree.odm_texuring_log,
                'resize': self.params.resize,
                'resolution': self.params.resolution,
                'size': self.params.size
            }

            # run texturing binary
            system.run(
                '{bin}/odm_texturing -bundleFile {bundle} '
                '-imagesPath {imgs_path} -imagesListPath {imgs_list} '
                '-inputModelPath {model} -outputFolder {out_dir}/ '
                '-textureResolution {resolution} -bundleResizedTo {resize} '
                '-textureWithSize {size} -logFile {log}'.format(**kwargs))
        else:
            log.ODM_WARNING('Found a valid ODM Texture file in: %s' %
                            tree.odm_textured_model_obj)

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'Texturing')

        log.ODM_INFO('Running ODM Texturing Cell - Finished')
        return ecto.OK if args.end_with != 'odm_texturing' else ecto.QUIT
Esempio n. 25
0
def screened_poisson_reconstruction(inPointCloud, outMesh, depth = 8, samples = 1, maxVertexCount=100000, pointWeight=4, threads=context.num_cores, verbose=False):

    mesh_path, mesh_filename = os.path.split(outMesh)
    # mesh_path = path/to
    # mesh_filename = odm_mesh.ply

    basename, ext = os.path.splitext(mesh_filename)
    # basename = odm_mesh
    # ext = .ply

    outMeshDirty = os.path.join(mesh_path, "{}.dirty{}".format(basename, ext))
    
    # Since PoissonRecon has some kind of a race condition on ppc64el, and this helps...
    if platform.machine() == 'ppc64le':
        log.ODM_WARNING("ppc64le platform detected, forcing single-threaded operation for PoissonRecon")
        threads = 1
    
    poissonReconArgs = {
      'bin': context.poisson_recon_path,
      'outfile': outMeshDirty,
      'infile': inPointCloud,
      'depth': depth,
      'samples': samples,
      'pointWeight': pointWeight,
      'threads': threads,
      'verbose': '--verbose' if verbose else ''
    }

    # Run PoissonRecon
    system.run('"{bin}" --in "{infile}" '
             '--out "{outfile}" '
             '--depth {depth} '
             '--pointWeight {pointWeight} '
             '--samplesPerNode {samples} '
             '--threads {threads} '
             '--linearFit '
             '{verbose}'.format(**poissonReconArgs))

    # Cleanup and reduce vertex count if necessary
    cleanupArgs = {
        'reconstructmesh': context.omvs_reconstructmesh_path,
        'outfile': outMesh,
        'infile':outMeshDirty,
        'max_faces': maxVertexCount * 2
    }

    system.run('"{reconstructmesh}" -i "{infile}" '
         '-o "{outfile}" '
         '--remove-spikes 0 --remove-spurious 0 --smooth 0 '
         '--target-face-num {max_faces} '.format(**cleanupArgs))

    # Delete intermediate results
    os.remove(outMeshDirty)

    return outMesh
Esempio n. 26
0
    def process(self, inputs, outputs):

        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running ODM Meshing Cell')

        # get inputs
        args = self.inputs.args
        tree = self.inputs.tree
        verbose = '-verbose' if self.params.verbose else ''

        # define paths and create working directories
        system.mkdir_p(tree.odm_meshing)

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'odm_meshing') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'odm_meshing' in args.rerun_from)

        if not io.file_exists(tree.odm_mesh) or rerun_cell:
            log.ODM_DEBUG('Writing ODM Mesh file in: %s' % tree.odm_mesh)

            kwargs = {
                'bin': context.odm_modules_path,
                'outfile': tree.odm_mesh,
                'log': tree.odm_meshing_log,
                'max_vertex': self.params.max_vertex,
                'oct_tree': self.params.oct_tree,
                'samples': self.params.samples,
                'solver': self.params.solver,
                'verbose': verbose
            }
            if not args.use_pmvs:
                kwargs['infile'] = tree.opensfm_model
            else:
                kwargs['infile'] = tree.pmvs_model

            # run meshing binary
            system.run('{bin}/odm_meshing -inputFile {infile} '
                       '-outputFile {outfile} -logFile {log} '
                       '-maxVertexCount {max_vertex} -octreeDepth {oct_tree} {verbose} '
                       '-samplesPerNode {samples} -solverDivide {solver}'.format(**kwargs))
        else:
            log.ODM_WARNING('Found a valid ODM Mesh file in: %s' %
                            tree.odm_mesh)

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'Meshing')

        log.ODM_INFO('Running ODM Meshing Cell - Finished')
        return ecto.OK if args.end_with != 'odm_meshing' else ecto.QUIT
Esempio n. 27
0
    def process(self, inputs, outputs):

        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running ODM Texturing Cell')

        # get inputs
        args = self.inputs.args
        tree = self.inputs.tree

        # define paths and create working directories
        system.mkdir_p(tree.odm_texturing)

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'odm_texturing') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'odm_texturing' in args.rerun_from)

        if not io.file_exists(tree.odm_textured_model_obj) or rerun_cell:
            log.ODM_DEBUG('Writing ODM Textured file in: %s'
                          % tree.odm_textured_model_obj)

            # odm_texturing definitions
            kwargs = {
                'bin': context.odm_modules_path,
                'out_dir': tree.odm_texturing,
                'bundle': tree.opensfm_bundle,
                'imgs_path': tree.dataset_resize,
                'imgs_list': tree.opensfm_bundle_list,
                'model': tree.odm_mesh,
                'log': tree.odm_texuring_log,
                'resize': self.params.resize,
                'resolution': self.params.resolution,
                'size': self.params.size
            }

            # run texturing binary
            system.run('{bin}/odm_texturing -bundleFile {bundle} '
                       '-imagesPath {imgs_path} -imagesListPath {imgs_list} '
                       '-inputModelPath {model} -outputFolder {out_dir}/ '
                       '-textureResolution {resolution} -bundleResizedTo {resize} '
                       '-textureWithSize {size} -logFile {log}'.format(**kwargs))
        else:
            log.ODM_WARNING('Found a valid ODM Texture file in: %s'
                            % tree.odm_textured_model_obj)

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'Texturing')

        log.ODM_INFO('Running ODM Texturing Cell - Finished')
        return ecto.OK if args.end_with != 'odm_texturing' else ecto.QUIT
Esempio n. 28
0
    def process_local(self):
        log.ODM_INFO("=============================")
        log.ODM_INFO("Local Toolchain %s" % self)
        log.ODM_INFO("=============================")

        submodel_name = os.path.basename(self.project_path)
        submodels_path = os.path.abspath(self.path(".."))
        project_name = os.path.basename(os.path.abspath(os.path.join(submodels_path, "..")))
        argv = get_submodel_argv(project_name, submodels_path, submodel_name)

        # Re-run the ODM toolchain on the submodel
        system.run(" ".join(map(quote, argv)), env_vars=os.environ.copy())
Esempio n. 29
0
def generate_kmz(orthophoto_file, output_file=None, outsize=None):
    if output_file is None:
        base, ext = os.path.splitext(orthophoto_file)
        output_file = base + '.kmz'
    
    # See if we need to select top three bands
    bandparam = ""
    gtif = gdal.Open(orthophoto_file)
    if gtif.RasterCount > 4:
        bandparam = "-b 1 -b 2 -b 3 -a_nodata 0"

    system.run('gdal_translate -of KMLSUPEROVERLAY -co FORMAT=JPEG "%s" "%s" %s '
               '--config GDAL_CACHEMAX %s%% ' % (orthophoto_file, output_file, bandparam, get_max_memory()))    
Esempio n. 30
0
def screened_poisson_reconstruction(inPointCloud, outMesh, depth = 8, samples = 1, maxVertexCount=100000, pointWeight=4, threads=context.num_cores, verbose=False):

    mesh_path, mesh_filename = os.path.split(outMesh)
    # mesh_path = path/to
    # mesh_filename = odm_mesh.ply

    basename, ext = os.path.splitext(mesh_filename)
    # basename = odm_mesh
    # ext = .ply

    outMeshDirty = os.path.join(mesh_path, "{}.dirty{}".format(basename, ext))

    poissonReconArgs = {
      'bin': context.poisson_recon_path,
      'outfile': outMeshDirty,
      'infile': inPointCloud,
      'depth': depth,
      'samples': samples,
      'pointWeight': pointWeight,
      'threads': threads,
      'verbose': '--verbose' if verbose else ''
    }

    # Run PoissonRecon
    system.run('{bin} --in {infile} '
             '--out {outfile} '
             '--depth {depth} '
             '--pointWeight {pointWeight} '
             '--samplesPerNode {samples} '
             '--threads {threads} '
             '--linearFit '
             '{verbose}'.format(**poissonReconArgs))

    # Cleanup and reduce vertex count if necessary
    cleanupArgs = {
        'bin': context.odm_modules_path,
        'outfile': outMesh,
        'infile': outMeshDirty,
        'max_vertex': maxVertexCount,
        'verbose': '-verbose' if verbose else ''
    }

    system.run('{bin}/odm_cleanmesh -inputFile {infile} '
         '-outputFile {outfile} '
         '-removeIslands '
         '-decimateMesh {max_vertex} {verbose} '.format(**cleanupArgs))

    # Delete intermediate results
    os.remove(outMeshDirty)

    return outMesh
Esempio n. 31
0
    def process(self, inputs, outputs):

        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running OMD PMVS Cell')

        # get inputs
        args = self.inputs.args
        tree = self.inputs.tree

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'pmvs') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'pmvs' in args.rerun_from)

        if not io.file_exists(tree.pmvs_model) or rerun_cell:
            log.ODM_DEBUG('Creating dense pointcloud in: %s' % tree.pmvs_model)

            kwargs = {
                'bin': context.cmvs_opts_path,
                'prefix': tree.pmvs_rec_path,
                'level': self.params.level,
                'csize': self.params.csize,
                'thresh': self.params.thresh,
                'wsize': self.params.wsize,
                'min_imgs': self.params.min_imgs,
                'cores': self.params.cores
            }

            # generate pmvs2 options
            system.run('{bin} {prefix}/ {level} {csize} {thresh} {wsize} '
                       '{min_imgs} {cores}'.format(**kwargs))

            # run pmvs2
            system.run('%s %s/ option-0000' %
                       (context.pmvs2_path, tree.pmvs_rec_path))

        else:
            log.ODM_WARNING('Found a valid PMVS file in %s' % tree.pmvs_model)

        outputs.reconstruction = inputs.reconstruction
        
        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'PMVS')

        log.ODM_INFO('Running ODM PMVS Cell - Finished')
        return ecto.OK if args.end_with != 'pmvs' else ecto.QUIT
Esempio n. 32
0
def merge_point_clouds(input_files, output_file, verbose=False):
    if len(input_files) == 0:
        log.ODM_WARNING("Cannot merge point clouds, no point clouds to merge.")
        return

    cmd = [
        'pdal',
        'merge',
        ' '.join(map(quote, input_files + [output_file])),
    ]

    if verbose:
        log.ODM_INFO(' '.join(cmd))

    system.run(' '.join(cmd))
Esempio n. 33
0
    def process(self, inputs, outputs):

        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running OMD PMVS Cell')

        # get inputs
        args = self.inputs.args
        tree = self.inputs.tree

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'pmvs') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'pmvs' in args.rerun_from)

        if not io.file_exists(tree.pmvs_model) or rerun_cell:
            log.ODM_DEBUG('Creating dense pointcloud in: %s' % tree.pmvs_model)

            kwargs = {
                'bin': context.cmvs_opts_path,
                'prefix': tree.pmvs_rec_path,
                'level': self.params.level,
                'csize': self.params.csize,
                'thresh': self.params.thresh,
                'wsize': self.params.wsize,
                'min_imgs': self.params.min_imgs,
                'cores': self.params.cores
            }

            # generate pmvs2 options
            system.run('{bin} {prefix}/ {level} {csize} {thresh} {wsize} '
                       '{min_imgs} {cores}'.format(**kwargs))

            # run pmvs2
            system.run('%s %s/ option-0000' %
                       (context.pmvs2_path, tree.pmvs_rec_path))

        else:
            log.ODM_WARNING('Found a valid PMVS file in %s' % tree.pmvs_model)

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'PMVS')

        log.ODM_INFO('Running ODM PMVS Cell - Finished')
        return ecto.OK if args.end_with != 'pmvs' else ecto.QUIT
Esempio n. 34
0
def merge(input_point_cloud_files, output_file, rerun=False):
    num_files = len(input_point_cloud_files)
    if num_files == 0:
        log.ODM_WARNING("No input point cloud files to process")
        return

    if io.file_exists(output_file):
        log.ODM_WARNING("Removing previous point cloud: %s" % output_file)
        os.remove(output_file)

    kwargs = {
        'all_inputs': " ".join(map(quote, input_point_cloud_files)),
        'output': output_file
    }

    system.run('lasmerge -i {all_inputs} -o "{output}"'.format(**kwargs))
Esempio n. 35
0
def merge_ply(input_point_cloud_files, output_file, dims=None):
    num_files = len(input_point_cloud_files)
    if num_files == 0:
        log.ODM_WARNING("No input point cloud files to process")
        return

    cmd = [
        'pdal',
        'merge',
        '--writers.ply.sized_types=false',
        '--writers.ply.storage_mode="little endian"',
        ('--writers.ply.dims="%s"' % dims) if dims is not None else '',
        ' '.join(map(quote, input_point_cloud_files + [output_file])),
    ]

    system.run(' '.join(cmd))
Esempio n. 36
0
    def crop(shapefile_path, geotiff_path, gdal_options, keep_original=True):
        if not os.path.exists(shapefile_path) or not os.path.exists(geotiff_path):
            log.ODM_WARNING("Either {} or {} does not exist, will skip cropping.".format(shapefile_path, geotiff_path))
            return geotiff_path

        # Rename original file
        # path/to/odm_orthophoto.tif --> path/to/odm_orthophoto.original.tif
        
        path, filename = os.path.split(geotiff_path)
        # path = path/to
        # filename = odm_orthophoto.tif

        basename, ext = os.path.splitext(filename)
        # basename = odm_orthophoto
        # ext = .tif

        original_geotiff = os.path.join(path, "{}.original{}".format(basename, ext))
        os.rename(geotiff_path, original_geotiff)

        try:
            kwargs = {
                'shapefile_path': shapefile_path,
                'geotiffInput': original_geotiff,
                'geotiffOutput': geotiff_path,
                'options': ' '.join(map(lambda k: '-co {}={}'.format(k, gdal_options[k]), gdal_options)),
                'max_memory': get_max_memory()
            }

            run('gdalwarp -cutline {shapefile_path} '
                '-crop_to_cutline '
                '{options} '
                '{geotiffInput} '
                '{geotiffOutput} '
                '--config GDAL_CACHEMAX {max_memory}%'.format(**kwargs))

            if not keep_original:
                os.remove(original_geotiff)

        except Exception as e:
            log.ODM_WARNING('Something went wrong while cropping: {}'.format(e.message))
            
            # Revert rename
            os.rename(original_geotiff, geotiff_path)

        return geotiff_path
Esempio n. 37
0
def dem_to_mesh_gridded(inGeotiff, outMesh, maxVertexCount, verbose=False):
    log.ODM_INFO('Creating mesh from DSM: %s' % inGeotiff)

    mesh_path, mesh_filename = os.path.split(outMesh)
    # mesh_path = path/to
    # mesh_filename = odm_mesh.ply

    basename, ext = os.path.splitext(mesh_filename)
    # basename = odm_mesh
    # ext = .ply

    outMeshDirty = os.path.join(mesh_path, "{}.dirty{}".format(basename, ext))

    kwargs = {
        'bin': context.dem2mesh_path,
        'outfile': outMeshDirty,
        'infile': inGeotiff,
        'maxVertexCount': maxVertexCount,
        'verbose': '-verbose' if verbose else ''
    }

    system.run('{bin} -inputFile {infile} '
         '-outputFile {outfile} '
         '-maxVertexCount {maxVertexCount} '
         ' {verbose} '.format(**kwargs))

    # Cleanup and reduce vertex count if necessary 
    # (as dem2mesh cannot guarantee that we'll have the target vertex count)
    cleanupArgs = {
        'bin': context.odm_modules_path,
        'outfile': outMesh,
        'infile': outMeshDirty,
        'max_vertex': maxVertexCount,
        'verbose': '-verbose' if verbose else ''
    }

    system.run('{bin}/odm_cleanmesh -inputFile {infile} '
         '-outputFile {outfile} '
         '-removeIslands '
         '-decimateMesh {max_vertex} {verbose} '.format(**cleanupArgs))

    # Delete intermediate results
    os.remove(outMeshDirty)

    return outMesh
Esempio n. 38
0
def dem_to_points(inGeotiff, outPointCloud, verbose=False):
    log.ODM_INFO('Sampling points from DSM: %s' % inGeotiff)

    kwargs = {
        'bin': context.dem2points_path,
        'outfile': outPointCloud,
        'infile': inGeotiff,
        'verbose': '-verbose' if verbose else ''
    }

    system.run('{bin} -inputFile {infile} '
         '-outputFile {outfile} '
         '-skirtHeightThreshold 1.5 '
         '-skirtIncrements 0.2 '
         '-skirtHeightCap 100 '
         ' {verbose} '.format(**kwargs))

    return outPointCloud
Esempio n. 39
0
    def process(self, inputs, outputs):

        # Benchmarking
        start_time = system.now_raw()
        
        log.ODM_INFO('Running ODM CMVS Cell')

        # get inputs 
        args = self.inputs.args
        tree = self.inputs.tree

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'cmvs') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'cmvs' in args.rerun_from)

        if not io.file_exists(tree.pmvs_bundle) or rerun_cell:
            log.ODM_DEBUG('Writing CMVS vis in: %s' % tree.pmvs_bundle)

            # copy bundle file to pmvs dir
            from shutil import copyfile
            copyfile(tree.opensfm_bundle, 
                     tree.pmvs_bundle)

            kwargs = {
                'bin': context.cmvs_path,
                'prefix': self.inputs.tree.pmvs_rec_path,
                'max_images': self.params.max_images,
                'cores': self.params.cores
            }

            # run cmvs
            system.run('{bin} {prefix}/ {max_images} {cores}'.format(**kwargs))
        else:
            log.ODM_WARNING('Found a valid CMVS file in: %s' % 
                            tree.pmvs_bundle)

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'CMVS')

        log.ODM_INFO('Running ODM CMVS Cell - Finished')
        return ecto.OK if args.end_with != 'cmvs' else ecto.QUIT
Esempio n. 40
0
    def process(self, inputs, outputs):
        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running ODM Georeferencing Cell')

        # get inputs
        args = self.inputs.args
        tree = self.inputs.tree
        gcpfile = io.join_paths(tree.root_path, self.params.gcp_file)

        # define paths and create working directories
        system.mkdir_p(tree.odm_georeferencing)

        # in case a gcp file it's not provided, let's try to generate it using
        # images metadata. Internally calls jhead.
        if not self.params.use_gcp and \
           not io.file_exists(tree.odm_georeferencing_coords):
            
            log.ODM_WARNING('Warning: No coordinates file. '
                            'Generating coordinates file in: %s'
                            % tree.odm_georeferencing_coords)
            try:
                # odm_georeference definitions
                kwargs = {
                    'bin': context.odm_modules_path,
                    'imgs': tree.dataset_raw,
                    'imgs_list': tree.opensfm_bundle_list,
                    'coords': tree.odm_georeferencing_coords,
                    'log': tree.odm_georeferencing_utm_log
                }

                # run UTM extraction binary
                system.run('{bin}/odm_extract_utm -imagesPath {imgs}/ '
                           '-imageListFile {imgs_list} -outputCoordFile {coords} '
                           '-logFile {log}'.format(**kwargs))

            except Exception, e:
                log.ODM_ERROR('Could not generate GCP file from images metadata.'
                              'Consider rerunning with argument --odm_georeferencing-useGcp'
                              ' and provide a proper GCP file')
                log.ODM_ERROR(e)
                return ecto.QUIT
Esempio n. 41
0
def filter(input_point_cloud, output_point_cloud, standard_deviation=2.5, meank=16, confidence=None, verbose=False):
    """
    Filters a point cloud
    """
    if standard_deviation <= 0 or meank <= 0:
        log.ODM_INFO("Skipping point cloud filtering")
        return

    log.ODM_INFO("Filtering point cloud (statistical, meanK {}, standard deviation {})".format(meank, standard_deviation))
    if confidence:
        log.ODM_INFO("Keeping only points with > %s confidence" % confidence)

    if not os.path.exists(input_point_cloud):
        log.ODM_ERROR("{} does not exist, cannot filter point cloud. The program will now exit.".format(input_point_cloud))
        sys.exit(1)

    filter_program = os.path.join(context.odm_modules_path, 'odm_filterpoints')
    if not os.path.exists(filter_program):
        log.ODM_WARNING("{} program not found. Will skip filtering, but this installation should be fixed.")
        shutil.copy(input_point_cloud, output_point_cloud)
        return

    filterArgs = {
      'bin': filter_program,
      'inputFile': input_point_cloud,
      'outputFile': output_point_cloud,
      'sd': standard_deviation,
      'meank': meank,
      'verbose': '-verbose' if verbose else '',
      'confidence': '-confidence %s' % confidence if confidence else '',
    }

    system.run('{bin} -inputFile {inputFile} '
         '-outputFile {outputFile} '
         '-sd {sd} '
         '-meank {meank} {confidence} {verbose} '.format(**filterArgs))

    # Remove input file, swap temp file
    if not os.path.exists(output_point_cloud):
        log.ODM_WARNING("{} not found, filtering has failed.".format(output_point_cloud))
Esempio n. 42
0
def run_pipeline(json, verbose=False):
    """ Run PDAL Pipeline with provided JSON """
    if verbose:
        json_print(json)

    # write to temp file
    f, jsonfile = tempfile.mkstemp(suffix='.json')
    if verbose:
        print 'Pipeline file: %s' % jsonfile
    os.write(f, jsonlib.dumps(json))
    os.close(f)

    cmd = [
        'pdal',
        'pipeline',
        '-i %s' % jsonfile
    ]
    if verbose:
        out = system.run(' '.join(cmd))
    else:
        out = system.run(' '.join(cmd) + ' > /dev/null 2>&1')
    os.remove(jsonfile)
Esempio n. 43
0
def run_pdaltranslate_smrf(fin, fout, scalar, slope, threshold, window, verbose=False):
    """ Run PDAL translate  """
    cmd = [
        'pdal',
        'translate',
        '-i %s' % fin,
        '-o %s' % fout,
        'smrf',
        '--filters.smrf.scalar=%s' % scalar,
        '--filters.smrf.slope=%s' % slope,
        '--filters.smrf.threshold=%s' % threshold,
        '--filters.smrf.window=%s' % window,
    ]

    if verbose:
        print ' '.join(cmd)

    out = system.run(' '.join(cmd))
    if verbose:
        print out
Esempio n. 44
0
def get_extent(input_point_cloud):
    fd, json_file = tempfile.mkstemp(suffix='.json')
    os.close(fd)
    
    # Get point cloud extent
    fallback = False

    # We know PLY files do not have --summary support
    if input_point_cloud.lower().endswith(".ply"):
        fallback = True
        run('pdal info {0} > {1}'.format(input_point_cloud, json_file))

    try:
        if not fallback:
            run('pdal info --summary {0} > {1}'.format(input_point_cloud, json_file))
    except:
        fallback = True
        run('pdal info {0} > {1}'.format(input_point_cloud, json_file))

    bounds = {}
    with open(json_file, 'r') as f:
        result = json.loads(f.read())
        
        if not fallback:
            summary = result.get('summary')
            if summary is None: raise Exception("Cannot compute summary for %s (summary key missing)" % input_point_cloud)
            bounds = summary.get('bounds')
        else:
            stats = result.get('stats')
            if stats is None: raise Exception("Cannot compute bounds for %s (stats key missing)" % input_point_cloud)
            bbox = stats.get('bbox')
            if bbox is None: raise Exception("Cannot compute bounds for %s (bbox key missing)" % input_point_cloud)
            native = bbox.get('native')
            if native is None: raise Exception("Cannot compute bounds for %s (native key missing)" % input_point_cloud)
            bounds = native.get('bbox')

        if bounds is None: raise Exception("Cannot compute bounds for %s (bounds key missing)" % input_point_cloud)
        
        if bounds.get('maxx', None) is None or \
            bounds.get('minx', None) is None or \
            bounds.get('maxy', None) is None or \
            bounds.get('miny', None) is None or \
            bounds.get('maxz', None) is None or \
            bounds.get('minz', None) is None:
            raise Exception("Cannot compute bounds for %s (invalid keys) %s" % (input_point_cloud, str(bounds)))
            
    os.remove(json_file)
    return bounds    
Esempio n. 45
0
            if io.file_exists(tif_file):
                ortho_tifs[folder_number] = tif_file

    kwargs = {
            'f_out': out_tif,
            'files': ' '.join(ortho_tifs.values()),
            'clusters': voronoi_file
        }

    if io.file_exists(kwargs['f_out']) and not args.overwrite:
        log.ODM_ERROR("File {f_out} exists, use --overwrite to force overwrite of file.".format(**kwargs))
    else:
        # use bounds as cutlines (blending)
        system.run('gdal_merge.py -o {f_out} '
                   '-createonly '
                   '-co "BIGTIFF=YES" '
                   '-co "BLOCKXSIZE=512" '
                   '-co "BLOCKYSIZE=512" {files}'.format(**kwargs)
                   )

        for tif in ortho_tifs:
            kwargs['name'] = '0' if tif == '0000' else tif.lstrip('0')  # is tif a tuple?
            kwargs['file'] = ortho_tifs[tif]
            system.run('gdalwarp -cutline {clusters} '
                       '-cwhere "submodel = \'{name}\'" '
                       '-r lanczos -multi -wo NUM_THREADS=ALL_CPUS '
                       ' {file} {f_out}'.format(**kwargs)
            )

        log.ODM_INFO("Building Overviews")
        kwargs = {
            'orthophoto': out_tif,
Esempio n. 46
0
    def process(self, inputs, outputs):

        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running ODM OpenSfM Cell')

        # get inputs
        tree = self.inputs.tree
        args = self.inputs.args
        photos = self.inputs.photos

        if not photos:
            log.ODM_ERROR('Not enough photos in photos array to start OpenSfM')
            return ecto.QUIT

        # create working directories     
        system.mkdir_p(tree.opensfm)
        system.mkdir_p(tree.pmvs)

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'opensfm') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'opensfm' in args.rerun_from)

        # check if reconstruction was done before

        if not io.file_exists(tree.opensfm_reconstruction) or rerun_cell:
            # create file list
            list_path = io.join_paths(tree.opensfm, 'image_list.txt')
            with open(list_path, 'w') as fout:
                for photo in photos:
                    fout.write('%s\n' % photo.path_file)

            # create config file for OpenSfM
            config = [
                "use_exif_size: %s" % ('no' if not self.params.use_exif_size else 'yes'),
                "feature_process_size: %s" % self.params.feature_process_size,
                "feature_min_frames: %s" % self.params.feature_min_frames,
                "processes: %s" % self.params.processes,
                "matching_gps_neighbors: %s" % self.params.matching_gps_neighbors
            ]

            if args.matcher_distance > 0:
                config.append("matching_gps_distance: %s" % self.params.matching_gps_distance)

            # write config file
            config_filename = io.join_paths(tree.opensfm, 'config.yaml')
            with open(config_filename, 'w') as fout:
                fout.write("\n".join(config))

            # run OpenSfM reconstruction
            system.run('PYTHONPATH=%s %s/bin/run_all %s' %
                       (context.pyopencv_path, context.opensfm_path, tree.opensfm))
        else:
            log.ODM_WARNING('Found a valid OpenSfM file in: %s' %
                            tree.opensfm_reconstruction)

        # check if reconstruction was exported to bundler before

        if not io.file_exists(tree.opensfm_bundle_list) or rerun_cell:
            # convert back to bundler's format
            system.run('PYTHONPATH=%s %s/bin/export_bundler %s' %
                       (context.pyopencv_path, context.opensfm_path, tree.opensfm))
        else:
            log.ODM_WARNING('Found a valid Bundler file in: %s' %
                            tree.opensfm_reconstruction)

        # check if reconstruction was exported to pmvs before

        if not io.file_exists(tree.pmvs_visdat) or rerun_cell:
            # run PMVS converter
            system.run('PYTHONPATH=%s %s/bin/export_pmvs %s --output %s' %
                       (context.pyopencv_path, context.opensfm_path, tree.opensfm, tree.pmvs))
        else:
            log.ODM_WARNING('Found a valid CMVS file in: %s' % tree.pmvs_visdat)

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'OpenSfM')

        log.ODM_INFO('Running ODM OpenSfM Cell - Finished')
        return ecto.OK if args.end_with != 'opensfm' else ecto.QUIT
Esempio n. 47
0
    def process(self, inputs, outputs):

        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running OMD OrthoPhoto Cell')

        # get inputs
        args = self.inputs.args
        tree = self.inputs.tree

        # define paths and create working directories
        system.mkdir_p(tree.odm_orthophoto)

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'odm_orthophoto') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'odm_orthophoto' in args.rerun_from)

        if not io.file_exists(tree.odm_orthophoto_file) or rerun_cell:

            # odm_orthophoto definitions
            kwargs = {
                'bin': context.odm_modules_path,
                'model_geo': tree.odm_georeferencing_model_obj_geo,
                'log': tree.odm_orthophoto_log,
                'ortho': tree.odm_orthophoto_file,
                'corners': tree.odm_orthophoto_corners,
                'res': self.params.resolution
            }

            # run odm_orthophoto
            system.run('{bin}/odm_orthophoto -inputFile {model_geo} '
                       '-logFile {log} -outputFile {ortho} -resolution {res} '
                       '-outputCornerFile {corners}'.format(**kwargs))

            # Create georeferenced GeoTiff
            geotiffcreated = False
            georef = types.ODM_GeoRef()
            # creates the coord refs # TODO I don't want to have to do this twice- after odm_georef
            georef.parse_coordinate_system(tree.odm_georeferencing_coords)

            if georef.epsg and georef.utm_east_offset and georef.utm_north_offset:
                ulx = uly = lrx = lry = 0.0
                with open(tree.odm_orthophoto_corners) as f:
                    for lineNumber, line in enumerate(f):
                        if lineNumber == 0:
                            tokens = line.split(' ')
                            if len(tokens) == 4:
                                ulx = float(tokens[0]) + \
                                    float(georef.utm_east_offset)
                                lry = float(tokens[1]) + \
                                    float(georef.utm_north_offset)
                                lrx = float(tokens[2]) + \
                                    float(georef.utm_east_offset)
                                uly = float(tokens[3]) + \
                                    float(georef.utm_north_offset)
                log.ODM_INFO('Creating GeoTIFF')

                kwargs = {
                    'ulx': ulx,
                    'uly': uly,
                    'lrx': lrx,
                    'lry': lry,
                    'epsg': georef.epsg,
                    'png': tree.odm_orthophoto_file,
                    'tiff': tree.odm_orthophoto_tif,
                    'log': tree.odm_orthophoto_tif_log
                }

                system.run('gdal_translate -a_ullr {ulx} {uly} {lrx} {lry} '
                           '-a_srs \"EPSG:{epsg}\" {png} {tiff} > {log}'.format(**kwargs))
                geotiffcreated = True
            if not geotiffcreated:
                log.ODM_WARNING('No geo-referenced orthophoto created due '
                                'to missing geo-referencing or corner coordinates.')

        else:
            log.ODM_WARNING('Found a valid orthophoto in: %s' % tree.odm_orthophoto_file)

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'Orthophoto')

        log.ODM_INFO('Running ODM OrthoPhoto Cell - Finished')
        return ecto.OK if args.end_with != 'odm_orthophoto' else ecto.QUIT
Esempio n. 48
0
def create_dem(input_point_cloud, dem_type, output_type='max', radiuses=['0.56'], gapfill=True,
                outdir='', resolution=0.1, max_workers=1, max_tile_size=2048,
                verbose=False, decimation=None):
    """ Create DEM from multiple radii, and optionally gapfill """
    global error
    error = None

    start = datetime.now()

    if not os.path.exists(outdir):
        log.ODM_INFO("Creating %s" % outdir)
        os.mkdir(outdir)

    extent = point_cloud.get_extent(input_point_cloud)
    log.ODM_INFO("Point cloud bounds are [minx: %s, maxx: %s] [miny: %s, maxy: %s]" % (extent['minx'], extent['maxx'], extent['miny'], extent['maxy']))
    ext_width = extent['maxx'] - extent['minx']
    ext_height = extent['maxy'] - extent['miny']

    final_dem_resolution = (int(math.ceil(ext_width / float(resolution))),
                            int(math.ceil(ext_height / float(resolution))))
    final_dem_pixels = final_dem_resolution[0] * final_dem_resolution[1]

    num_splits = int(max(1, math.ceil(math.log(math.ceil(final_dem_pixels / float(max_tile_size * max_tile_size)))/math.log(2))))
    num_tiles = num_splits * num_splits
    log.ODM_INFO("DEM resolution is %s, max tile size is %s, will split DEM generation into %s tiles" % (final_dem_resolution, max_tile_size, num_tiles))

    tile_bounds_width = ext_width / float(num_splits)
    tile_bounds_height = ext_height / float(num_splits)

    tiles = []

    for r in radiuses:
        minx = extent['minx']

        for x in range(num_splits):
            miny = extent['miny']
            if x == num_splits - 1:
                maxx = extent['maxx']
            else:
                maxx = minx + tile_bounds_width

            for y in range(num_splits):
                if y == num_splits - 1:
                    maxy = extent['maxy']
                else:
                    maxy = miny + tile_bounds_height

                filename = os.path.join(os.path.abspath(outdir), '%s_r%s_x%s_y%s.tif' % (dem_type, r, x, y))

                tiles.append({
                    'radius': r,
                    'bounds': {
                        'minx': minx,
                        'maxx': maxx,
                        'miny': miny,
                        'maxy': maxy 
                    },
                    'filename': filename
                })

                miny = maxy
            minx = maxx

    # Sort tiles by increasing radius
    tiles.sort(key=lambda t: float(t['radius']), reverse=True)

    def process_one(q):
        log.ODM_INFO("Generating %s (%s, radius: %s, resolution: %s)" % (q['filename'], output_type, q['radius'], resolution))
        
        d = pdal.json_gdal_base(q['filename'], output_type, q['radius'], resolution, q['bounds'])

        if dem_type == 'dsm':
            d = pdal.json_add_classification_filter(d, 2, equality='max')
        elif dem_type == 'dtm':
            d = pdal.json_add_classification_filter(d, 2)

        if decimation is not None:
            d = pdal.json_add_decimation_filter(d, decimation)

        pdal.json_add_readers(d, [input_point_cloud])
        pdal.run_pipeline(d, verbose=verbose)

    def worker():
        global error

        while True:
            (num, q) = pq.get()
            if q is None or error is not None:
                pq.task_done()
                break

            try:
                process_one(q)
            except Exception as e:
                error = e
            finally:
                pq.task_done()

    if max_workers > 1:
        use_single_thread = False
        pq = queue.PriorityQueue()
        threads = []
        for i in range(max_workers):
            t = threading.Thread(target=worker)
            t.start()
            threads.append(t)

        for t in tiles:
            pq.put((i, t.copy()))

        def stop_workers():
            for i in range(len(threads)):
                pq.put((-1, None))
            for t in threads:
                t.join()

        # block until all tasks are done
        try:
            while pq.unfinished_tasks > 0:
                time.sleep(0.5)
        except KeyboardInterrupt:
            print("CTRL+C terminating...")
            stop_workers()
            sys.exit(1)

        stop_workers()

        if error is not None:
            # Try to reprocess using a single thread
            # in case this was a memory error
            log.ODM_WARNING("DEM processing failed with multiple threads, let's retry with a single thread...")
            use_single_thread = True
    else:
        use_single_thread = True

    if use_single_thread:
        # Boring, single thread processing
        for q in tiles:
            process_one(q)

    output_file = "%s.tif" % dem_type
    output_path = os.path.abspath(os.path.join(outdir, output_file))

    # Verify tile results
    for t in tiles: 
        if not os.path.exists(t['filename']):
            raise Exception("Error creating %s, %s failed to be created" % (output_file, t['filename']))
    
    # Create virtual raster
    vrt_path = os.path.abspath(os.path.join(outdir, "merged.vrt"))
    run('gdalbuildvrt "%s" "%s"' % (vrt_path, '" "'.join(map(lambda t: t['filename'], tiles))))

    geotiff_tmp_path = os.path.abspath(os.path.join(outdir, 'merged.tmp.tif'))
    geotiff_path = os.path.abspath(os.path.join(outdir, 'merged.tif'))

    # Build GeoTIFF
    kwargs = {
        'max_memory': get_max_memory(),
        'threads': max_workers if max_workers else 'ALL_CPUS',
        'vrt': vrt_path,
        'geotiff': geotiff_path,
        'geotiff_tmp': geotiff_tmp_path
    }

    if gapfill:
        # Sometimes, for some reason gdal_fillnodata.py
        # behaves strangely when reading data directly from a .VRT
        # so we need to convert to GeoTIFF first.
        run('gdal_translate '
                '-co NUM_THREADS={threads} '
                '--config GDAL_CACHEMAX {max_memory}% '
                '{vrt} {geotiff_tmp}'.format(**kwargs))

        run('gdal_fillnodata.py '
            '-co NUM_THREADS={threads} '
            '--config GDAL_CACHEMAX {max_memory}% '
            '-b 1 '
            '-of GTiff '
            '{geotiff_tmp} {geotiff}'.format(**kwargs))
    else:
        run('gdal_translate '
                '-co NUM_THREADS={threads} '
                '--config GDAL_CACHEMAX {max_memory}% '
                '{vrt} {geotiff}'.format(**kwargs))

    post_process(geotiff_path, output_path)
    os.remove(geotiff_path)

    if os.path.exists(geotiff_tmp_path): os.remove(geotiff_tmp_path)
    if os.path.exists(vrt_path): os.remove(vrt_path)
    for t in tiles:
        if os.path.exists(t['filename']): os.remove(t['filename'])
    
    log.ODM_INFO('Completed %s in %s' % (output_file, datetime.now() - start))
    def process(self, inputs, outputs):

        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running ODM Texturing Cell')

        # get inputs
        args = self.inputs.args
        tree = self.inputs.tree
        verbose = '-verbose' if self.params.verbose else ''

        # define paths and create working directories
        system.mkdir_p(tree.odm_texturing)

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'odm_texturing') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'odm_texturing' in args.rerun_from)

        # Undistort radial distortion
        if not os.path.isdir(tree.odm_texturing_undistorted_image_path) or rerun_cell:
            system.run(' '.join([
                'cd {} &&'.format(tree.opensfm),
                'PYTHONPATH={}:{}'.format(context.pyopencv_path,
                                          context.opensfm_path),
                'python',
                os.path.join(context.odm_modules_src_path,
                             'odm_slam/src/undistort_radial.py'),
                '--output',
                tree.odm_texturing_undistorted_image_path,
                tree.opensfm,
            ]))

            system.run(
                'PYTHONPATH=%s %s/bin/export_bundler %s' %
                (context.pyopencv_path, context.opensfm_path, tree.opensfm))
        else:
            log.ODM_WARNING(
                'Found a valid Bundler file in: %s' %
                (tree.opensfm_reconstruction))

        if not io.file_exists(tree.odm_textured_model_obj) or rerun_cell:
            log.ODM_DEBUG('Writing ODM Textured file in: %s'
                          % tree.odm_textured_model_obj)

            # odm_texturing definitions
            kwargs = {
                'bin': context.odm_modules_path,
                'out_dir': tree.odm_texturing,
                'bundle': tree.opensfm_bundle,
                'imgs_path': tree.odm_texturing_undistorted_image_path,
                'imgs_list': tree.opensfm_bundle_list,
                'model': tree.odm_mesh,
                'log': tree.odm_texuring_log,
                'resize': self.params.resize,
                'resolution': self.params.resolution,
                'size': self.params.size,
                'verbose': verbose
            }

            # run texturing binary
            system.run('{bin}/odm_texturing -bundleFile {bundle} '
                       '-imagesPath {imgs_path} -imagesListPath {imgs_list} '
                       '-inputModelPath {model} -outputFolder {out_dir}/ '
                       '-textureResolution {resolution} -bundleResizedTo {resize} {verbose} '
                       '-textureWithSize {size} -logFile {log}'.format(**kwargs))
        else:
            log.ODM_WARNING('Found a valid ODM Texture file in: %s'
                            % tree.odm_textured_model_obj)

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'Texturing')

        log.ODM_INFO('Running ODM Texturing Cell - Finished')
        return ecto.OK if args.end_with != 'odm_texturing' else ecto.QUIT
Esempio n. 50
0
    def create_bounds_geojson(self, pointcloud_path, buffer_distance = 0, decimation_step=40, outlier_radius=20):
        """
        Compute a buffered polygon around the data extents (not just a bounding box)
        of the given point cloud.

        @return filename to GeoJSON containing the polygon
        """
        if not os.path.exists(pointcloud_path):
            log.ODM_WARNING('Point cloud does not exist, cannot generate shapefile bounds {}'.format(pointcloud_path))
            return ''

        # Do basic outlier removal prior to extracting boundary information
        filtered_pointcloud_path = self.path('filtered.las')

        run("pdal translate -i \"{}\" "
            "-o \"{}\" "
            "decimation outlier range "
            "--filters.decimation.step={} "
            "--filters.outlier.method=radius "
            "--filters.outlier.radius={} "
            "--filters.outlier.min_k=2 "
            "--filters.range.limits='Classification![7:7]'".format(pointcloud_path, filtered_pointcloud_path, decimation_step, outlier_radius))

        if not os.path.exists(filtered_pointcloud_path):
            log.ODM_WARNING('Could not filter point cloud, cannot generate shapefile bounds {}'.format(filtered_pointcloud_path))
            return ''

        # Use PDAL to dump boundary information
        # then read the information back

        boundary_file_path = self.path('boundary.json')

        run('pdal info --boundary --filters.hexbin.edge_length=1 --filters.hexbin.threshold=0 {0} > {1}'.format(filtered_pointcloud_path,  boundary_file_path))
        
        pc_geojson_boundary_feature = None

        with open(boundary_file_path, 'r') as f:
            json_f = json.loads(f.read())
            pc_geojson_boundary_feature = json_f['boundary']['boundary_json']

        if pc_geojson_boundary_feature is None: raise RuntimeError("Could not determine point cloud boundaries")

        # Write bounds to GeoJSON
        bounds_geojson_path = self.path('bounds.geojson')
        with open(bounds_geojson_path, "w") as f:
            f.write(json.dumps({
                "type": "FeatureCollection",
                "features": [{
                    "type": "Feature",
                    "geometry": pc_geojson_boundary_feature
                }]
            }))

        # Create a convex hull around the boundary
        # as to encompass the entire area (no holes)    
        driver = ogr.GetDriverByName('GeoJSON')
        ds = driver.Open(bounds_geojson_path, 0) # ready-only
        layer = ds.GetLayer()

        # Collect all Geometry
        geomcol = ogr.Geometry(ogr.wkbGeometryCollection)
        for feature in layer:
            geomcol.AddGeometry(feature.GetGeometryRef())

        # Calculate convex hull
        convexhull = geomcol.ConvexHull()

        # If buffer distance is specified
        # Create two buffers, one shrinked by
        # N + 3 and then that buffer expanded by 3
        # so that we get smooth corners. \m/
        BUFFER_SMOOTH_DISTANCE = 3

        if buffer_distance > 0:
            convexhull = convexhull.Buffer(-(buffer_distance + BUFFER_SMOOTH_DISTANCE))
            convexhull = convexhull.Buffer(BUFFER_SMOOTH_DISTANCE)

        # Save to a new file
        bounds_geojson_path = self.path('bounds.geojson')
        if os.path.exists(bounds_geojson_path):
            driver.DeleteDataSource(bounds_geojson_path)

        out_ds = driver.CreateDataSource(bounds_geojson_path)
        layer = out_ds.CreateLayer("convexhull", geom_type=ogr.wkbPolygon)

        feature_def = layer.GetLayerDefn()
        feature = ogr.Feature(feature_def)
        feature.SetGeometry(convexhull)
        layer.CreateFeature(feature)
        feature = None

        # Save and close data sources
        out_ds = ds = None

        # Remove filtered point cloud
        if os.path.exists(filtered_pointcloud_path):
            os.remove(filtered_pointcloud_path)

        return bounds_geojson_path
Esempio n. 51
0
    def process(self, inputs, outputs):

        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running ODM OpenSfM Cell')

        # get inputs
        tree = inputs.tree
        args = inputs.args
        reconstruction = inputs.reconstruction
        photos = reconstruction.photos

        if not photos:
            log.ODM_ERROR('Not enough photos in photos array to start OpenSfM')
            return ecto.QUIT

        # create working directories
        system.mkdir_p(tree.opensfm)

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'opensfm') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'opensfm' in args.rerun_from)

        if args.fast_orthophoto:
            output_file = io.join_paths(tree.opensfm, 'reconstruction.ply')
        elif args.use_opensfm_dense:
            output_file = tree.opensfm_model
        else:
            output_file = tree.opensfm_reconstruction

        # check if reconstruction was done before
        if not io.file_exists(output_file) or rerun_cell:
            # create file list
            list_path = io.join_paths(tree.opensfm, 'image_list.txt')
            has_alt = True
            with open(list_path, 'w') as fout:
                for photo in photos:
                    if not photo.altitude:
                        has_alt = False
                    fout.write('%s\n' % io.join_paths(tree.dataset_raw, photo.filename))

            # create config file for OpenSfM
            config = [
                "use_exif_size: %s" % ('no' if not self.params.use_exif_size else 'yes'),
                "feature_process_size: %s" % self.params.feature_process_size,
                "feature_min_frames: %s" % self.params.feature_min_frames,
                "processes: %s" % self.params.processes,
                "matching_gps_neighbors: %s" % self.params.matching_gps_neighbors,
                "depthmap_method: %s" % args.opensfm_depthmap_method,
                "depthmap_resolution: %s" % args.depthmap_resolution,
                "depthmap_min_patch_sd: %s" % args.opensfm_depthmap_min_patch_sd,
                "depthmap_min_consistent_views: %s" % args.opensfm_depthmap_min_consistent_views,
                "optimize_camera_parameters: %s" % ('no' if self.params.fixed_camera_params else 'yes')
            ]

            if has_alt:
                log.ODM_DEBUG("Altitude data detected, enabling it for GPS alignment")
                config.append("use_altitude_tag: yes")
                config.append("align_method: naive")
            else:
                config.append("align_method: orientation_prior")
                config.append("align_orientation_prior: vertical")

            if args.use_hybrid_bundle_adjustment:
                log.ODM_DEBUG("Enabling hybrid bundle adjustment")
                config.append("bundle_interval: 100")          # Bundle after adding 'bundle_interval' cameras
                config.append("bundle_new_points_ratio: 1.2")  # Bundle when (new points) / (bundled points) > bundle_new_points_ratio
                config.append("local_bundle_radius: 1")        # Max image graph distance for images to be included in local bundle adjustment

            if args.matcher_distance > 0:
                config.append("matching_gps_distance: %s" % self.params.matching_gps_distance)

            if tree.odm_georeferencing_gcp:
                config.append("bundle_use_gcp: yes")
                io.copy(tree.odm_georeferencing_gcp, tree.opensfm)

            # write config file
            log.ODM_DEBUG(config)
            config_filename = io.join_paths(tree.opensfm, 'config.yaml')
            with open(config_filename, 'w') as fout:
                fout.write("\n".join(config))

            # run OpenSfM reconstruction
            matched_done_file = io.join_paths(tree.opensfm, 'matching_done.txt')
            if not io.file_exists(matched_done_file) or rerun_cell:
                system.run('PYTHONPATH=%s %s/bin/opensfm extract_metadata %s' %
                           (context.pyopencv_path, context.opensfm_path, tree.opensfm))
                system.run('PYTHONPATH=%s %s/bin/opensfm detect_features %s' %
                           (context.pyopencv_path, context.opensfm_path, tree.opensfm))
                system.run('PYTHONPATH=%s %s/bin/opensfm match_features %s' %
                           (context.pyopencv_path, context.opensfm_path, tree.opensfm))
                with open(matched_done_file, 'w') as fout:
                    fout.write("Matching done!\n")
            else:
                log.ODM_WARNING('Found a feature matching done progress file in: %s' %
                                matched_done_file)

            if not io.file_exists(tree.opensfm_tracks) or rerun_cell:
                system.run('PYTHONPATH=%s %s/bin/opensfm create_tracks %s' %
                           (context.pyopencv_path, context.opensfm_path, tree.opensfm))
            else:
                log.ODM_WARNING('Found a valid OpenSfM tracks file in: %s' %
                                tree.opensfm_tracks)

            if not io.file_exists(tree.opensfm_reconstruction) or rerun_cell:
                system.run('PYTHONPATH=%s %s/bin/opensfm reconstruct %s' %
                           (context.pyopencv_path, context.opensfm_path, tree.opensfm))
            else:
                log.ODM_WARNING('Found a valid OpenSfM reconstruction file in: %s' %
                                tree.opensfm_reconstruction)

            # Check that a reconstruction file has been created
            if not io.file_exists(tree.opensfm_reconstruction):
                log.ODM_ERROR("The program could not process this dataset using the current settings. "
                                "Check that the images have enough overlap, "
                                "that there are enough recognizable features "
                                "and that the images are in focus. "
                                "You could also try to increase the --min-num-features parameter."
                                "The program will now exit.")
                sys.exit(1)


            # Always export VisualSFM's reconstruction and undistort images
            # as we'll use these for texturing (after GSD estimation and resizing)
            if not args.ignore_gsd:
                image_scale = gsd.image_scale_factor(args.orthophoto_resolution, tree.opensfm_reconstruction)
            else:
                image_scale = 1.0

            if not io.file_exists(tree.opensfm_reconstruction_nvm) or rerun_cell:
                system.run('PYTHONPATH=%s %s/bin/opensfm export_visualsfm --image_extension png --scale_focal %s %s' %
                            (context.pyopencv_path, context.opensfm_path, image_scale, tree.opensfm))
            else:
                log.ODM_WARNING('Found a valid OpenSfM NVM reconstruction file in: %s' %
                                tree.opensfm_reconstruction_nvm)

            # These will be used for texturing
            system.run('PYTHONPATH=%s %s/bin/opensfm undistort --image_format png --image_scale %s %s' %
                        (context.pyopencv_path, context.opensfm_path, image_scale, tree.opensfm))

            # Skip dense reconstruction if necessary and export
            # sparse reconstruction instead
            if args.fast_orthophoto:
                system.run('PYTHONPATH=%s %s/bin/opensfm export_ply --no-cameras %s' %
                        (context.pyopencv_path, context.opensfm_path, tree.opensfm))
            elif args.use_opensfm_dense:
                # Undistort images at full scale in JPG
                # (TODO: we could compare the size of the PNGs if they are < than depthmap_resolution
                # and use those instead of re-exporting full resolution JPGs)
                system.run('PYTHONPATH=%s %s/bin/opensfm undistort %s' %
                        (context.pyopencv_path, context.opensfm_path, tree.opensfm))
                system.run('PYTHONPATH=%s %s/bin/opensfm compute_depthmaps %s' %
                        (context.pyopencv_path, context.opensfm_path, tree.opensfm))
        else:
            log.ODM_WARNING('Found a valid OpenSfM reconstruction file in: %s' %
                            tree.opensfm_reconstruction)

        # check if reconstruction was exported to bundler before
        if not io.file_exists(tree.opensfm_bundle_list) or rerun_cell:
            # convert back to bundler's format
            system.run('PYTHONPATH=%s %s/bin/export_bundler %s' %
                       (context.pyopencv_path, context.opensfm_path, tree.opensfm))
        else:
            log.ODM_WARNING('Found a valid Bundler file in: %s' %
                            tree.opensfm_reconstruction)

        if reconstruction.georef:
            system.run('PYTHONPATH=%s %s/bin/opensfm export_geocoords %s --transformation --proj \'%s\'' %
                       (context.pyopencv_path, context.opensfm_path, tree.opensfm, reconstruction.georef.projection.srs))

        outputs.reconstruction = reconstruction

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'OpenSfM')

        log.ODM_INFO('Running ODM OpenSfM Cell - Finished')
        return ecto.OK if args.end_with != 'opensfm' else ecto.QUIT
Esempio n. 52
0
    def process(self, inputs, outputs):
        """Run the cell."""
        log.ODM_INFO('Running OMD Slam Cell')

        # get inputs
        tree = self.inputs.tree
        args = self.inputs.args
        video = os.path.join(tree.root_path, args.video)
        slam_config = os.path.join(tree.root_path, args.slam_config)

        if not video:
            log.ODM_ERROR('No video provided')
            return ecto.QUIT

        # create working directories
        system.mkdir_p(tree.opensfm)
        system.mkdir_p(tree.pmvs)

        vocabulary = os.path.join(context.orb_slam2_path,
                                  'Vocabulary/ORBvoc.txt')
        orb_slam_cmd = os.path.join(context.odm_modules_path, 'odm_slam')
        trajectory = os.path.join(tree.opensfm, 'KeyFrameTrajectory.txt')
        map_points = os.path.join(tree.opensfm, 'MapPoints.txt')

        # check if we rerun cell or not
        rerun_cell = args.rerun == 'slam'

        # check if slam was run before
        if not io.file_exists(trajectory) or rerun_cell:
            # run slam binary
            system.run(' '.join([
                'cd {} &&'.format(tree.opensfm),
                orb_slam_cmd,
                vocabulary,
                slam_config,
                video,
            ]))
        else:
            log.ODM_WARNING('Found a valid slam trajectory in: {}'.format(
                trajectory))

        # check if trajectory was exported to opensfm before
        if not io.file_exists(tree.opensfm_reconstruction) or rerun_cell:
            # convert slam to opensfm
            system.run(' '.join([
                'cd {} &&'.format(tree.opensfm),
                'PYTHONPATH={}:{}'.format(context.pyopencv_path,
                                          context.opensfm_path),
                'python',
                os.path.join(context.odm_modules_src_path,
                             'odm_slam/src/orb_slam_to_opensfm.py'),
                video,
                trajectory,
                map_points,
                slam_config,
            ]))
            # link opensfm images to resized images
            os.symlink(tree.opensfm + '/images', tree.dataset_resize)
        else:
            log.ODM_WARNING('Found a valid OpenSfM file in: {}'.format(
                tree.opensfm_reconstruction))

        # check if reconstruction was exported to bundler before
        if not io.file_exists(tree.opensfm_bundle_list) or rerun_cell:
            # convert back to bundler's format
            system.run(
                'PYTHONPATH={} {}/bin/export_bundler {}'.format(
                    context.pyopencv_path, context.opensfm_path, tree.opensfm))
        else:
            log.ODM_WARNING(
                'Found a valid Bundler file in: {}'.format(
                    tree.opensfm_reconstruction))

        # check if reconstruction was exported to pmvs before
        if not io.file_exists(tree.pmvs_visdat) or rerun_cell:
            # run PMVS converter
            system.run(
                'PYTHONPATH={} {}/bin/export_pmvs {} --output {}'.format(
                    context.pyopencv_path, context.opensfm_path, tree.opensfm,
                    tree.pmvs))
        else:
            log.ODM_WARNING('Found a valid CMVS file in: {}'.format(
                tree.pmvs_visdat))

        log.ODM_INFO('Running OMD Slam Cell - Finished')
        return ecto.OK if args.end_with != 'odm_slam' else ecto.QUIT
Esempio n. 53
0
    def process(self, inputs, outputs):
        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running ODM DEM Cell')

        # get inputs
        args = self.inputs.args
        tree = self.inputs.tree
        las_model_found = io.file_exists(tree.odm_georeferencing_model_las)
        env_paths = [context.superbuild_bin_path]

        # Just to make sure
        l2d_module_installed = True
        try:
            system.run('l2d_classify --help > /dev/null', env_paths)
        except:
            log.ODM_WARNING('lidar2dems is not installed properly')
            l2d_module_installed = False

        log.ODM_INFO('Create DSM: ' + str(args.dsm))
        log.ODM_INFO('Create DTM: ' + str(args.dtm))
        log.ODM_INFO('DEM input file {0} found: {1}'.format(tree.odm_georeferencing_model_las, str(las_model_found)))

        # Do we need to process anything here?
        if (args.dsm or args.dtm) and las_model_found and l2d_module_installed:

            # define paths and create working directories
            odm_dem_root = tree.path('odm_dem')
            system.mkdir_p(odm_dem_root)

            dsm_output_filename = os.path.join(odm_dem_root, 'dsm.tif')
            dtm_output_filename = os.path.join(odm_dem_root, 'dtm.tif')

            # check if we rerun cell or not
            rerun_cell = (args.rerun is not None and
                          args.rerun == 'odm_dem') or \
                         (args.rerun_all) or \
                         (args.rerun_from is not None and
                          'odm_dem' in args.rerun_from)

            if (args.dtm and not io.file_exists(dtm_output_filename)) or \
                (args.dsm and not io.file_exists(dsm_output_filename)) or \
                rerun_cell:

                # Extract boundaries and srs of point cloud
                summary_file_path = os.path.join(odm_dem_root, 'odm_georeferenced_model.summary.json')
                boundary_file_path = os.path.join(odm_dem_root, 'odm_georeferenced_model.boundary.json')

                system.run('pdal info --summary {0} > {1}'.format(tree.odm_georeferencing_model_las, summary_file_path), env_paths)
                system.run('pdal info --boundary {0} > {1}'.format(tree.odm_georeferencing_model_las,  boundary_file_path), env_paths)

                pc_proj4 = ""
                pc_geojson_bounds_feature = None

                with open(summary_file_path, 'r') as f:
                    json_f = json.loads(f.read())
                    pc_proj4 = json_f['summary']['srs']['proj4']

                with open(boundary_file_path, 'r') as f:
                    json_f = json.loads(f.read())
                    pc_geojson_boundary_feature = json_f['boundary']['boundary_json']

                # Write bounds to GeoJSON
                bounds_geojson_path = os.path.join(odm_dem_root, 'odm_georeferenced_model.bounds.geojson')
                with open(bounds_geojson_path, "w") as f:
                    f.write(json.dumps({
                        "type": "FeatureCollection",
                        "features": [{
                            "type": "Feature",
                            "geometry": pc_geojson_boundary_feature
                        }]
                    }))

                bounds_shapefile_path = os.path.join(odm_dem_root, 'bounds.shp')

                # Convert bounds to Shapefile
                kwargs = {
                    'input': bounds_geojson_path,
                    'output': bounds_shapefile_path,
                    'proj4': pc_proj4
                }
                system.run('ogr2ogr -overwrite -a_srs "{proj4}" {output} {input}'.format(**kwargs))

                # Process with lidar2dems
                terrain_params_map = {
                    'flatnonforest': (1, 3), 
                    'flatforest': (1, 2), 
                    'complexnonforest': (5, 2), 
                    'complexforest': (10, 2)
                }
                terrain_params = terrain_params_map[args.dem_terrain_type.lower()]             

                kwargs = {
                    'verbose': '-v' if self.params.verbose else '',
                    'slope': terrain_params[0],
                    'cellsize': terrain_params[1],
                    'outdir': odm_dem_root,
                    'site': bounds_shapefile_path
                }

                l2d_params = '--slope {slope} --cellsize {cellsize} ' \
                             '{verbose} ' \
                             '-o -s {site} ' \
                             '--outdir {outdir}'.format(**kwargs)

                approximate = '--approximate' if args.dem_approximate else ''

                # Classify only if we need a DTM
                run_classification = args.dtm

                if run_classification:
                    system.run('l2d_classify {0} --decimation {1} '
                               '{2} --initialDistance {3} {4}'.format(
                        l2d_params, args.dem_decimation, approximate, 
                        args.dem_initial_distance, tree.odm_georeferencing), env_paths)
                else:
                    log.ODM_INFO("Will skip classification, only DSM is needed")
                    copyfile(tree.odm_georeferencing_model_las, os.path.join(odm_dem_root, 'bounds-0_l2d_s{slope}c{cellsize}.las'.format(**kwargs)))

                products = []
                if args.dsm: products.append('dsm') 
                if args.dtm: products.append('dtm')

                radius_steps = [args.dem_resolution]
                for _ in range(args.dem_gapfill_steps - 1):
                    radius_steps.append(radius_steps[-1] * 3) # 3 is arbitrary, maybe there's a better value?

                for product in products:
                    demargs = {
                        'product': product,
                        'indir': odm_dem_root,
                        'l2d_params': l2d_params,
                        'maxsd': args.dem_maxsd,
                        'maxangle': args.dem_maxangle,
                        'resolution': args.dem_resolution,
                        'radius_steps': ' '.join(map(str, radius_steps)),
                        'gapfill': '--gapfill' if args.dem_gapfill_steps > 0 else '',
                        
                        # If we didn't run a classification, we should pass the decimate parameter here
                        'decimation': '--decimation {0}'.format(args.dem_decimation) if not run_classification else ''
                    }

                    system.run('l2d_dems {product} {indir} {l2d_params} '
                               '--maxsd {maxsd} --maxangle {maxangle} '
                               '--resolution {resolution} --radius {radius_steps} '
                               '{decimation} '
                               '{gapfill} '.format(**demargs), env_paths)

                    # Rename final output
                    if product == 'dsm':
                        os.rename(os.path.join(odm_dem_root, 'bounds-0_dsm.idw.tif'), dsm_output_filename)
                    elif product == 'dtm':
                        os.rename(os.path.join(odm_dem_root, 'bounds-0_dtm.idw.tif'), dtm_output_filename)

            else:
                log.ODM_WARNING('Found existing outputs in: %s' % odm_dem_root)
        else:
            log.ODM_WARNING('DEM will not be generated')

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'Dem')

        log.ODM_INFO('Running ODM DEM Cell - Finished')
        return ecto.OK if args.end_with != 'odm_dem' else ecto.QUIT
Esempio n. 54
0
    def process(self, inputs, outputs):

        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running ODM Orthophoto Cell')

        # get inputs
        args = self.inputs.args
        tree = self.inputs.tree
        verbose = '-verbose' if self.params.verbose else ''

        # define paths and create working directories
        system.mkdir_p(tree.odm_orthophoto)

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'odm_orthophoto') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'odm_orthophoto' in args.rerun_from)

        if not io.file_exists(tree.odm_orthophoto_file) or rerun_cell:

            # odm_orthophoto definitions
            kwargs = {
                'bin': context.odm_modules_path,
                'log': tree.odm_orthophoto_log,
                'ortho': tree.odm_orthophoto_file,
                'corners': tree.odm_orthophoto_corners,
                'res': self.params.resolution,
                'verbose': verbose
            }

            kwargs['model_geo'] = tree.odm_georeferencing_model_obj_geo \
                if io.file_exists(tree.odm_georeferencing_coords) \
                else tree.odm_textured_model_obj


            # run odm_orthophoto
            system.run('{bin}/odm_orthophoto -inputFile {model_geo} '
                       '-logFile {log} -outputFile {ortho} -resolution {res} {verbose} '
                       '-outputCornerFile {corners}'.format(**kwargs))

            if not io.file_exists(tree.odm_georeferencing_coords):
                log.ODM_WARNING('No coordinates file. A georeferenced raster '
                                'will not be created')
            else:
                # Create georeferenced GeoTiff
                geotiffcreated = False
                georef = types.ODM_GeoRef()
                # creates the coord refs # TODO I don't want to have to do this twice- after odm_georef
                georef.parse_coordinate_system(tree.odm_georeferencing_coords)

                if georef.epsg and georef.utm_east_offset and georef.utm_north_offset:
                    ulx = uly = lrx = lry = 0.0
                    with open(tree.odm_orthophoto_corners) as f:
                        for lineNumber, line in enumerate(f):
                            if lineNumber == 0:
                                tokens = line.split(' ')
                                if len(tokens) == 4:
                                    ulx = float(tokens[0]) + \
                                        float(georef.utm_east_offset)
                                    lry = float(tokens[1]) + \
                                        float(georef.utm_north_offset)
                                    lrx = float(tokens[2]) + \
                                        float(georef.utm_east_offset)
                                    uly = float(tokens[3]) + \
                                        float(georef.utm_north_offset)
                    log.ODM_INFO('Creating GeoTIFF')

                    kwargs = {
                        'ulx': ulx,
                        'uly': uly,
                        'lrx': lrx,
                        'lry': lry,
                        'tiled': '' if self.params.no_tiled else '-co TILED=yes ',
                        'compress': self.params.compress,
                        'predictor': '-co PREDICTOR=2 ' if self.params.compress in
                                                           ['LZW', 'DEFLATE'] else '',
                        'epsg': georef.epsg,
                        't_srs': self.params.t_srs or "EPSG:{0}".format(georef.epsg),
                        'bigtiff': self.params.bigtiff,
                        'png': tree.odm_orthophoto_file,
                        'tiff': tree.odm_orthophoto_tif,
                        'log': tree.odm_orthophoto_tif_log
                    }

                    system.run('gdal_translate -a_ullr {ulx} {uly} {lrx} {lry} '
                               '{tiled} '
                               '-co BIGTIFF={bigtiff} '
                               '-co COMPRESS={compress} '
                               '{predictor} '
                               '-co BLOCKXSIZE=512 '
                               '-co BLOCKYSIZE=512 '
                               '-co NUM_THREADS=ALL_CPUS '
                               '-a_srs \"EPSG:{epsg}\" '
                               '{png} {tiff} > {log}'.format(**kwargs))

                    if self.params.build_overviews:
                        log.ODM_DEBUG("Building Overviews")
                        kwargs = {
                            'orthophoto': tree.odm_orthophoto_tif,
                            'log': tree.odm_orthophoto_gdaladdo_log
                        }
                        # Run gdaladdo
                        system.run('gdaladdo -ro -r average '
                                   '--config BIGTIFF_OVERVIEW IF_SAFER '
                                   '--config COMPRESS_OVERVIEW JPEG '
                                   '{orthophoto} 2 4 8 16 > {log}'.format(**kwargs))

                    geotiffcreated = True
                if not geotiffcreated:
                    log.ODM_WARNING('No geo-referenced orthophoto created due '
                                    'to missing geo-referencing or corner coordinates.')

        else:
            log.ODM_WARNING('Found a valid orthophoto in: %s' % tree.odm_orthophoto_file)

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'Orthophoto')

        log.ODM_INFO('Running ODM OrthoPhoto Cell - Finished')
        return ecto.OK if args.end_with != 'odm_orthophoto' else ecto.QUIT
    def process(self, inputs, outputs):

        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running ODM Georeferencing Cell')

        # get inputs
        args = inputs.args
        tree = inputs.tree
        reconstruction = inputs.reconstruction
        gcpfile = tree.odm_georeferencing_gcp
        doPointCloudGeo = True
        transformPointCloud = True
        verbose = '-verbose' if self.params.verbose else ''
        geo_ref = reconstruction.georef

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'odm_georeferencing') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'odm_georeferencing' in args.rerun_from)

        runs = [{
            'georeferencing_dir': tree.odm_georeferencing,
            'texturing_dir': tree.odm_texturing,
            'model': os.path.join(tree.odm_texturing, tree.odm_textured_model_obj)
        }]

        if args.skip_3dmodel:
            runs = []

        if not args.use_3dmesh:
            # Make sure 2.5D mesh is georeferenced before the 3D mesh
            # Because it will be used to calculate a transform
            # for the point cloud. If we use the 3D model transform,
            # DEMs and orthophoto might not align!
            runs.insert(0, {
                    'georeferencing_dir': tree.odm_25dgeoreferencing,
                    'texturing_dir': tree.odm_25dtexturing,
                    'model': os.path.join(tree.odm_25dtexturing, tree.odm_textured_model_obj)
                })

        for r in runs:
            odm_georeferencing_model_obj_geo = os.path.join(r['texturing_dir'], tree.odm_georeferencing_model_obj_geo)
            odm_georeferencing_log = os.path.join(r['georeferencing_dir'], tree.odm_georeferencing_log)
            odm_georeferencing_transform_file = os.path.join(r['georeferencing_dir'], tree.odm_georeferencing_transform_file)
            odm_georeferencing_model_txt_geo_file = os.path.join(r['georeferencing_dir'], tree.odm_georeferencing_model_txt_geo)

            if not io.file_exists(odm_georeferencing_model_obj_geo) or \
               not io.file_exists(tree.odm_georeferencing_model_laz) or rerun_cell:

                # odm_georeference definitions
                kwargs = {
                    'bin': context.odm_modules_path,
                    'input_pc_file': tree.filtered_point_cloud,
                    'bundle': tree.opensfm_bundle,
                    'imgs': tree.dataset_raw,
                    'imgs_list': tree.opensfm_bundle_list,
                    'model': r['model'],
                    'log': odm_georeferencing_log,
                    'input_trans_file': tree.opensfm_transformation,
                    'transform_file': odm_georeferencing_transform_file,
                    'coords': tree.odm_georeferencing_coords,
                    'output_pc_file': tree.odm_georeferencing_model_laz,
                    'geo_sys': odm_georeferencing_model_txt_geo_file,
                    'model_geo': odm_georeferencing_model_obj_geo,
                    'gcp': gcpfile,
                    'verbose': verbose
                }

                if transformPointCloud:
                    kwargs['pc_params'] = '-inputPointCloudFile {input_pc_file} -outputPointCloudFile {output_pc_file}'.format(**kwargs)

                    if geo_ref and geo_ref.projection and geo_ref.projection.srs:
                        kwargs['pc_params'] += ' -outputPointCloudSrs %s' % pipes.quote(geo_ref.projection.srs)
                    else:
                        log.ODM_WARNING('NO SRS: The output point cloud will not have a SRS.')
                else:
                    kwargs['pc_params'] = ''
 
                # Check to see if the GCP file exists

                if not self.params.use_exif and (self.params.gcp_file or tree.odm_georeferencing_gcp):
                   log.ODM_INFO('Found %s' % gcpfile)
                   try:
                       system.run('{bin}/odm_georef -bundleFile {bundle} -imagesPath {imgs} -imagesListPath {imgs_list} '
                                  '-inputFile {model} -outputFile {model_geo} '
                                  '{pc_params} {verbose} '
                                  '-logFile {log} -outputTransformFile {transform_file} -georefFileOutputPath {geo_sys} -gcpFile {gcp} '
                                  '-outputCoordFile {coords}'.format(**kwargs))
                   except Exception:
                       log.ODM_EXCEPTION('Georeferencing failed. ')
                       return ecto.QUIT
                elif io.file_exists(tree.opensfm_transformation) and io.file_exists(tree.odm_georeferencing_coords):
                    log.ODM_INFO('Running georeferencing with OpenSfM transformation matrix')
                    system.run('{bin}/odm_georef -bundleFile {bundle} -inputTransformFile {input_trans_file} -inputCoordFile {coords} '
                               '-inputFile {model} -outputFile {model_geo} '
                               '{pc_params} {verbose} '
                               '-logFile {log} -outputTransformFile {transform_file} -georefFileOutputPath {geo_sys}'.format(**kwargs))
                elif io.file_exists(tree.odm_georeferencing_coords):
                    log.ODM_INFO('Running georeferencing with generated coords file.')
                    system.run('{bin}/odm_georef -bundleFile {bundle} -inputCoordFile {coords} '
                               '-inputFile {model} -outputFile {model_geo} '
                               '{pc_params} {verbose} '
                               '-logFile {log} -outputTransformFile {transform_file} -georefFileOutputPath {geo_sys}'.format(**kwargs))
                else:
                    log.ODM_WARNING('Georeferencing failed. Make sure your '
                                    'photos have geotags in the EXIF or you have '
                                    'provided a GCP file. ')
                    doPointCloudGeo = False # skip the rest of the georeferencing

                if doPointCloudGeo:
                    # update images metadata
                    geo_ref.extract_offsets(odm_georeferencing_model_txt_geo_file)
                    reconstruction.georef = geo_ref

                    # XYZ point cloud output
                    if args.pc_csv:
                        log.ODM_INFO("Creating geo-referenced CSV file (XYZ format)")
                        
                        system.run("pdal translate -i \"{}\" "
                            "-o \"{}\" "
                            "--writers.text.format=csv "
                            "--writers.text.order=\"X,Y,Z\" "
                            "--writers.text.keep_unspecified=false ".format(
                                tree.odm_georeferencing_model_laz,
                                tree.odm_georeferencing_xyz_file))
                    
                    # LAS point cloud output
                    if args.pc_las:
                        log.ODM_INFO("Creating geo-referenced LAS file")
                        
                        system.run("pdal translate -i \"{}\" "
                            "-o \"{}\" ".format(
                                tree.odm_georeferencing_model_laz,
                                tree.odm_georeferencing_model_las))
                    
                    if args.crop > 0:
                        log.ODM_INFO("Calculating cropping area and generating bounds shapefile from point cloud")
                        cropper = Cropper(tree.odm_georeferencing, 'odm_georeferenced_model')
                        
                        decimation_step = 40 if args.fast_orthophoto or args.use_opensfm_dense else 90
                        
                        # More aggressive decimation for large datasets
                        if not args.fast_orthophoto:
                            decimation_step *= int(len(reconstruction.photos) / 1000) + 1

                        cropper.create_bounds_shapefile(tree.odm_georeferencing_model_laz, args.crop, 
                                                    decimation_step=decimation_step,
                                                    outlier_radius=20 if args.fast_orthophoto else 2)

                    # Do not execute a second time, since
                    # We might be doing georeferencing for
                    # multiple models (3D, 2.5D, ...)
                    doPointCloudGeo = False
                    transformPointCloud = False
            else:
                log.ODM_WARNING('Found a valid georeferenced model in: %s'
                                % tree.odm_georeferencing_model_laz)

        outputs.reconstruction = reconstruction

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'Georeferencing')

        log.ODM_INFO('Running ODM Georeferencing Cell - Finished')
        return ecto.OK if args.end_with != 'odm_georeferencing' else ecto.QUIT
Esempio n. 56
0
    def process(self, inputs, outputs):

        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running MVS Texturing Cell')

        # get inputs
        args = self.inputs.args
        tree = self.inputs.tree

        # define paths and create working directories
        system.mkdir_p(tree.odm_texturing)

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'mvs_texturing') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'mvs_texturing' in args.rerun_from)

        if not io.file_exists(tree.odm_textured_model_obj) or rerun_cell:
            log.ODM_DEBUG('Writing MVS Textured file in: %s'
                          % tree.odm_textured_model_obj)
            
            
            # Format arguments to fit Mvs-Texturing app
            skipGeometricVisibilityTest = ""
            skipGlobalSeamLeveling = ""
            skipLocalSeamLeveling = ""
            skipHoleFilling = ""
            keepUnseenFaces = ""
            
            if (self.params.skip_vis_test):
                skipGeometricVisibilityTest = "--skip_geometric_visibility_test"
            if (self.params.skip_glob_seam_leveling):
                skipGlobalSeamLeveling = "--skip_global_seam_leveling"
            if (self.params.skip_loc_seam_leveling):
                skipLocalSeamLeveling = "--skip_local_seam_leveling"
            if (self.params.skip_hole_fill):
                skipHoleFilling = "--skip_hole_filling"
            if (self.params.keep_unseen_faces):
                keepUnseenFaces = "--keep_unseen_faces"

            # mvstex definitions
            kwargs = {
                'bin': context.mvstex_path,
                'out_dir': io.join_paths(tree.odm_texturing, "odm_textured_model"),
                'pmvs_folder': tree.pmvs_rec_path,
                'nvm_file': io.join_paths(tree.pmvs_rec_path, "nvmCams.nvm"),
                'model': tree.odm_mesh,
                'dataTerm': self.params.data_term,
                'outlierRemovalType': self.params.outlier_rem_type,
                'skipGeometricVisibilityTest': skipGeometricVisibilityTest,
                'skipGlobalSeamLeveling': skipGlobalSeamLeveling,
                'skipLocalSeamLeveling': skipLocalSeamLeveling,
                'skipHoleFilling': skipHoleFilling,
                'keepUnseenFaces': keepUnseenFaces,
                'toneMapping': self.params.tone_mapping
            }

            if not args.use_pmvs:
                kwargs['nvm_file'] = io.join_paths(tree.opensfm,
                                                   "reconstruction.nvm")
            else:
                log.ODM_DEBUG('Generating .nvm file from pmvs output: %s'
                              % '{nvm_file}'.format(**kwargs))

                # Create .nvm camera file.
                pmvs2nvmcams.run('{pmvs_folder}'.format(**kwargs),
                                 '{nvm_file}'.format(**kwargs))

            # run texturing binary
            system.run('{bin} {nvm_file} {model} {out_dir} '
                       '-d {dataTerm} -o {outlierRemovalType} '
                       '-t {toneMapping} '
                       '{skipGeometricVisibilityTest} '
                       '{skipGlobalSeamLeveling} '
                       '{skipLocalSeamLeveling} '
                       '{skipHoleFilling} '
                       '{keepUnseenFaces}'.format(**kwargs))
        else:
            log.ODM_WARNING('Found a valid ODM Texture file in: %s'
                            % tree.odm_textured_model_obj)

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'Texturing')

        log.ODM_INFO('Running ODM Texturing Cell - Finished')
        return ecto.OK if args.end_with != 'odm_texturing' else ecto.QUIT
Esempio n. 57
0
    def process(self, inputs, outputs):

        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running MVS Texturing Cell')

        # get inputs
        args = inputs.args
        tree = inputs.tree
        reconstruction = inputs.reconstruction

        # define paths and create working directories
        system.mkdir_p(tree.odm_texturing)
        if not args.use_3dmesh: system.mkdir_p(tree.odm_25dtexturing)

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'mvs_texturing') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'mvs_texturing' in args.rerun_from)

        runs = [{
            'out_dir': tree.odm_texturing,
            'model': tree.odm_mesh,
            'nadir': False
        }]

        if args.skip_3dmodel:
            runs = []

        if not args.use_3dmesh:
            runs += [{
                    'out_dir': tree.odm_25dtexturing,
                    'model': tree.odm_25dmesh,
                    'nadir': True
                }]

        for r in runs:
            odm_textured_model_obj = os.path.join(r['out_dir'], tree.odm_textured_model_obj)

            if not io.file_exists(odm_textured_model_obj) or rerun_cell:
                log.ODM_DEBUG('Writing MVS Textured file in: %s'
                              % odm_textured_model_obj)

                # Format arguments to fit Mvs-Texturing app
                skipGeometricVisibilityTest = ""
                skipGlobalSeamLeveling = ""
                skipLocalSeamLeveling = ""
                skipHoleFilling = ""
                keepUnseenFaces = ""
                nadir = ""

                if (self.params.skip_vis_test):
                    skipGeometricVisibilityTest = "--skip_geometric_visibility_test"
                if (self.params.skip_glob_seam_leveling):
                    skipGlobalSeamLeveling = "--skip_global_seam_leveling"
                if (self.params.skip_loc_seam_leveling):
                    skipLocalSeamLeveling = "--skip_local_seam_leveling"
                if (self.params.skip_hole_fill):
                    skipHoleFilling = "--skip_hole_filling"
                if (self.params.keep_unseen_faces):
                    keepUnseenFaces = "--keep_unseen_faces"
                if (r['nadir']):
                    nadir = '--nadir_mode'

                # mvstex definitions
                kwargs = {
                    'bin': context.mvstex_path,
                    'out_dir': io.join_paths(r['out_dir'], "odm_textured_model"),
                    'model': r['model'],
                    'dataTerm': self.params.data_term,
                    'outlierRemovalType': self.params.outlier_rem_type,
                    'skipGeometricVisibilityTest': skipGeometricVisibilityTest,
                    'skipGlobalSeamLeveling': skipGlobalSeamLeveling,
                    'skipLocalSeamLeveling': skipLocalSeamLeveling,
                    'skipHoleFilling': skipHoleFilling,
                    'keepUnseenFaces': keepUnseenFaces,
                    'toneMapping': self.params.tone_mapping,
                    'nadirMode': nadir,
                    'nadirWeight': 2 ** args.texturing_nadir_weight - 1,
                    'nvm_file': io.join_paths(tree.opensfm, "reconstruction.nvm")
                }

                # Make sure tmp directory is empty
                mvs_tmp_dir = os.path.join(r['out_dir'], 'tmp')
                if io.dir_exists(mvs_tmp_dir):
                    log.ODM_INFO("Removing old tmp directory {}".format(mvs_tmp_dir))
                    shutil.rmtree(mvs_tmp_dir)

                # run texturing binary
                system.run('{bin} {nvm_file} {model} {out_dir} '
                           '-d {dataTerm} -o {outlierRemovalType} '
                           '-t {toneMapping} '
                           '{skipGeometricVisibilityTest} '
                           '{skipGlobalSeamLeveling} '
                           '{skipLocalSeamLeveling} '
                           '{skipHoleFilling} '
                           '{keepUnseenFaces} '
                           '{nadirMode} '
                           '-n {nadirWeight}'.format(**kwargs))
            else:
                log.ODM_WARNING('Found a valid ODM Texture file in: %s'
                                % odm_textured_model_obj)

        outputs.reconstruction = reconstruction

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'Texturing')

        log.ODM_INFO('Running ODM Texturing Cell - Finished')
        return ecto.OK if args.end_with != 'mvs_texturing' else ecto.QUIT
Esempio n. 58
0
    def process(self, inputs, outputs):

        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running ODM Orthophoto Cell')

        # get inputs
        args = self.inputs.args
        tree = self.inputs.tree
        reconstruction = inputs.reconstruction
        verbose = '-verbose' if self.params.verbose else ''

        # define paths and create working directories
        system.mkdir_p(tree.odm_orthophoto)

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'odm_orthophoto') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'odm_orthophoto' in args.rerun_from)

        if not io.file_exists(tree.odm_orthophoto_file) or rerun_cell:

            # odm_orthophoto definitions
            kwargs = {
                'bin': context.odm_modules_path,
                'log': tree.odm_orthophoto_log,
                'ortho': tree.odm_orthophoto_file,
                'corners': tree.odm_orthophoto_corners,
                'res': 1.0 / (gsd.cap_resolution(self.params.resolution, tree.opensfm_reconstruction, ignore_gsd=args.ignore_gsd) / 100.0),
                'verbose': verbose
            }

            # Have geo coordinates?
            georef = reconstruction.georef

            # Check if the georef object is initialized
            # (during a --rerun this might not be)
            # TODO: we should move this to a more central
            # location (perhaps during the dataset initialization)
            if georef and not georef.utm_east_offset:
                georeferencing_dir = tree.odm_georeferencing if args.use_3dmesh and not args.skip_3dmodel else tree.odm_25dgeoreferencing
                odm_georeferencing_model_txt_geo_file = os.path.join(georeferencing_dir, tree.odm_georeferencing_model_txt_geo)

                if io.file_exists(odm_georeferencing_model_txt_geo_file):
                    georef.extract_offsets(odm_georeferencing_model_txt_geo_file)
                else:
                    log.ODM_WARNING('Cannot read UTM offset from {}. An orthophoto will not be generated.'.format(odm_georeferencing_model_txt_geo_file))


            if georef:
                if args.use_3dmesh:
                    kwargs['model_geo'] = os.path.join(tree.odm_texturing, tree.odm_georeferencing_model_obj_geo)
                else:
                    kwargs['model_geo'] = os.path.join(tree.odm_25dtexturing, tree.odm_georeferencing_model_obj_geo)
            else:
                if args.use_3dmesh:
                    kwargs['model_geo'] = os.path.join(tree.odm_texturing, tree.odm_textured_model_obj)
                else:
                    kwargs['model_geo'] = os.path.join(tree.odm_25dtexturing, tree.odm_textured_model_obj)

            # run odm_orthophoto
            system.run('{bin}/odm_orthophoto -inputFile {model_geo} '
                       '-logFile {log} -outputFile {ortho} -resolution {res} {verbose} '
                       '-outputCornerFile {corners}'.format(**kwargs))

            # Create georeferenced GeoTiff
            geotiffcreated = False

            if georef and georef.projection and georef.utm_east_offset and georef.utm_north_offset:
                ulx = uly = lrx = lry = 0.0
                with open(tree.odm_orthophoto_corners) as f:
                    for lineNumber, line in enumerate(f):
                        if lineNumber == 0:
                            tokens = line.split(' ')
                            if len(tokens) == 4:
                                ulx = float(tokens[0]) + \
                                    float(georef.utm_east_offset)
                                lry = float(tokens[1]) + \
                                    float(georef.utm_north_offset)
                                lrx = float(tokens[2]) + \
                                    float(georef.utm_east_offset)
                                uly = float(tokens[3]) + \
                                    float(georef.utm_north_offset)
                log.ODM_INFO('Creating GeoTIFF')

                kwargs = {
                    'ulx': ulx,
                    'uly': uly,
                    'lrx': lrx,
                    'lry': lry,
                    'tiled': '' if self.params.no_tiled else '-co TILED=yes ',
                    'compress': self.params.compress,
                    'predictor': '-co PREDICTOR=2 ' if self.params.compress in
                                                       ['LZW', 'DEFLATE'] else '',
                    'proj': georef.projection.srs,
                    'bigtiff': self.params.bigtiff,
                    'png': tree.odm_orthophoto_file,
                    'tiff': tree.odm_orthophoto_tif,
                    'log': tree.odm_orthophoto_tif_log,
                    'max_memory': get_max_memory(),
                    'threads': self.params.max_concurrency
                }

                system.run('gdal_translate -a_ullr {ulx} {uly} {lrx} {lry} '
                           '{tiled} '
                           '-co BIGTIFF={bigtiff} '
                           '-co COMPRESS={compress} '
                           '{predictor} '
                           '-co BLOCKXSIZE=512 '
                           '-co BLOCKYSIZE=512 '
                           '-co NUM_THREADS={threads} '
                           '-a_srs \"{proj}\" '
                           '--config GDAL_CACHEMAX {max_memory}% '
                           '{png} {tiff} > {log}'.format(**kwargs))

                if args.crop > 0:
                    shapefile_path = os.path.join(tree.odm_georeferencing, 'odm_georeferenced_model.bounds.shp')
                    Cropper.crop(shapefile_path, tree.odm_orthophoto_tif, {
                            'TILED': 'NO' if self.params.no_tiled else 'YES',
                            'COMPRESS': self.params.compress,
                            'PREDICTOR': '2' if self.params.compress in ['LZW', 'DEFLATE'] else '1',
                            'BIGTIFF': self.params.bigtiff,
                            'BLOCKXSIZE': 512,
                            'BLOCKYSIZE': 512,
                            'NUM_THREADS': self.params.max_concurrency
                        })

                if self.params.build_overviews:
                    log.ODM_DEBUG("Building Overviews")
                    kwargs = {
                        'orthophoto': tree.odm_orthophoto_tif,
                        'log': tree.odm_orthophoto_gdaladdo_log
                    }
                    # Run gdaladdo
                    system.run('gdaladdo -ro -r average '
                               '--config BIGTIFF_OVERVIEW IF_SAFER '
                               '--config COMPRESS_OVERVIEW JPEG '
                               '{orthophoto} 2 4 8 16 > {log}'.format(**kwargs))

                geotiffcreated = True
            if not geotiffcreated:
                log.ODM_WARNING('No geo-referenced orthophoto created due '
                                'to missing geo-referencing or corner coordinates.')

        else:
            log.ODM_WARNING('Found a valid orthophoto in: %s' % tree.odm_orthophoto_file)

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'Orthophoto')

        log.ODM_INFO('Running ODM OrthoPhoto Cell - Finished')
        return ecto.OK if args.end_with != 'odm_orthophoto' else ecto.QUIT
Esempio n. 59
0
    def process(self, inputs, outputs):
        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running MVE Cell')

        # get inputs
        tree = inputs.tree
        args = inputs.args
        reconstruction = inputs.reconstruction
        photos = reconstruction.photos

        if not photos:
            log.ODM_ERROR('Not enough photos in photos array to start MVE')
            return ecto.QUIT

        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'mve') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'mve' in args.rerun_from)

        # check if reconstruction was done before
        if not io.file_exists(tree.mve_model) or rerun_cell:
            # cleanup if a rerun
            if io.dir_exists(tree.mve_path) and rerun_cell:
                shutil.rmtree(tree.mve_path)

            # make bundle directory
            if not io.file_exists(tree.mve_bundle):
                system.mkdir_p(tree.mve_path)
                system.mkdir_p(io.join_paths(tree.mve_path, 'bundle'))
                io.copy(tree.opensfm_image_list, tree.mve_image_list)
                io.copy(tree.opensfm_bundle, tree.mve_bundle)

            # mve makescene wants the output directory
            # to not exists before executing it (otherwise it
            # will prompt the user for confirmation)
            if io.dir_exists(tree.mve):
                shutil.rmtree(tree.mve)

            # run mve makescene
            if not io.dir_exists(tree.mve_views):
                system.run('%s %s %s' % (context.makescene_path, tree.mve_path, tree.mve), env_vars={'OMP_NUM_THREADS': args.max_concurrency})

            # Compute mve output scale based on depthmap_resolution
            max_width = 0
            max_height = 0
            for photo in photos:
                max_width = max(photo.width, max_width)
                max_height = max(photo.height, max_height)

            max_pixels = args.depthmap_resolution * args.depthmap_resolution
            if max_width * max_height <= max_pixels:
                mve_output_scale = 0
            else:
                ratio = float(max_width * max_height) / float(max_pixels)
                mve_output_scale = int(math.ceil(math.log(ratio) / math.log(4.0)))

            dmrecon_config = [
                "-s%s" % mve_output_scale,
	            "--progress=silent",
                "--local-neighbors=2",
                "--force",
            ]

            # Run MVE's dmrecon
            log.ODM_INFO('                                                                               ')
            log.ODM_INFO('                                    ,*/**                                      ')
            log.ODM_INFO('                                  ,*@%*/@%*                                    ')
            log.ODM_INFO('                                ,/@%******@&*.                                 ')
            log.ODM_INFO('                              ,*@&*********/@&*                                ')
            log.ODM_INFO('                            ,*@&**************@&*                              ')
            log.ODM_INFO('                          ,/@&******************@&*.                           ')
            log.ODM_INFO('                        ,*@&*********************/@&*                          ')
            log.ODM_INFO('                      ,*@&**************************@&*.                       ')
            log.ODM_INFO('                    ,/@&******************************&&*,                     ')
            log.ODM_INFO('                  ,*&&**********************************@&*.                   ')
            log.ODM_INFO('                ,*@&**************************************@&*.                 ')
            log.ODM_INFO('              ,*@&***************#@@@@@@@@@%****************&&*,               ')
            log.ODM_INFO('            .*&&***************&@@@@@@@@@@@@@@****************@@*.             ')
            log.ODM_INFO('          .*@&***************&@@@@@@@@@@@@@@@@@%****(@@%********@@*.           ')
            log.ODM_INFO('        .*@@***************%@@@@@@@@@@@@@@@@@@@@@#****&@@@@%******&@*,         ')
            log.ODM_INFO('      .*&@****************@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@/*****@@*.       ')
            log.ODM_INFO('    .*@@****************@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@%*************@@*.     ')
            log.ODM_INFO('  .*@@****/***********@@@@@&**(@@@@@@@@@@@@@@@@@@@@@@@#*****************%@*,   ')
            log.ODM_INFO(' */@*******@*******#@@@@%*******/@@@@@@@@@@@@@@@@@@@@********************/@(,  ')
            log.ODM_INFO(' ,*@(********&@@@@@@#**************/@@@@@@@#**(@@&/**********************@&*   ')
            log.ODM_INFO('   *#@/*******************************@@@@@***&@&**********************&@*,    ')
            log.ODM_INFO('     *#@#******************************&@@@***@#*********************&@*,      ')
            log.ODM_INFO('       */@#*****************************@@@************************@@*.        ')
            log.ODM_INFO('         *#@/***************************/@@/*********************%@*,          ')
            log.ODM_INFO('           *#@#**************************#@@%******************%@*,            ')
            log.ODM_INFO('             */@#*************************(@@@@@@@&%/********&@*.              ')
            log.ODM_INFO('               *(@(*********************************/%@@%**%@*,                ')
            log.ODM_INFO('                 *(@%************************************%@**                  ')
            log.ODM_INFO('                   **@%********************************&@*,                    ')
            log.ODM_INFO('                     *(@(****************************%@/*                      ')
            log.ODM_INFO('                       ,(@%************************#@/*                        ')
            log.ODM_INFO('                         ,*@%********************&@/,                          ')
            log.ODM_INFO('                           */@#****************#@/*                            ')
            log.ODM_INFO('                             ,/@&************#@/*                              ')
            log.ODM_INFO('                               ,*@&********%@/,                                ')
            log.ODM_INFO('                                 */@#****(@/*                                  ')
            log.ODM_INFO('                                   ,/@@@@(*                                    ')
            log.ODM_INFO('                                     .**,                                      ')
            log.ODM_INFO('')
            log.ODM_INFO("Running dense reconstruction. This might take a while. Please be patient, the process is not dead or hung.")
            log.ODM_INFO("                              Process is running")
            system.run('%s %s %s' % (context.dmrecon_path, ' '.join(dmrecon_config), tree.mve), env_vars={'OMP_NUM_THREADS': args.max_concurrency})

            scene2pset_config = [
                "-F%s" % mve_output_scale
            ]

            # run scene2pset
            system.run('%s %s "%s" "%s"' % (context.scene2pset_path, ' '.join(scene2pset_config), tree.mve, tree.mve_model), env_vars={'OMP_NUM_THREADS': args.max_concurrency})
        else:
            log.ODM_WARNING('Found a valid MVE reconstruction file in: %s' %
                            tree.mve_model)

        outputs.reconstruction = reconstruction

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'MVE')

        log.ODM_INFO('Running ODM MVE Cell - Finished')
        return ecto.OK if args.end_with != 'mve' else ecto.QUIT
    def process(self, inputs, outputs):

        # find a file in the root directory
        def find(file, dir):
            for root, dirs, files in os.walk(dir):
                return '/'.join((root, file)) if file in files else None

        # Benchmarking
        start_time = system.now_raw()

        log.ODM_INFO('Running ODM Georeferencing Cell')

        # get inputs
        args = self.inputs.args
        tree = self.inputs.tree
        gcpfile = io.join_paths(tree.root_path, self.params.gcp_file) \
            if self.params.gcp_file else find('gcp_list.txt', tree.root_path)
        geocreated = True
        verbose = '-verbose' if self.params.verbose else ''

        # define paths and create working directories
        system.mkdir_p(tree.odm_georeferencing)

        # in case a gcp file it's not provided, let's try to generate it using
        # images metadata. Internally calls jhead.
        log.ODM_DEBUG(self.params.gcp_file)
        if not self.params.gcp_file: # and \
        #   not io.file_exists(tree.odm_georeferencing_coords):
            
            log.ODM_WARNING('No coordinates file. '
                            'Generating coordinates file: %s'
                            % tree.odm_georeferencing_coords)

            # odm_georeference definitions
            kwargs = {
                'bin': context.odm_modules_path,
                'imgs': tree.dataset_resize,
                'imgs_list': tree.opensfm_bundle_list,
                'coords': tree.odm_georeferencing_coords,
                'log': tree.odm_georeferencing_utm_log,
                'verbose': verbose
            }

            # run UTM extraction binary
            extract_utm = system.run_and_return('{bin}/odm_extract_utm -imagesPath {imgs}/ '
                       '-imageListFile {imgs_list} -outputCoordFile {coords} {verbose} '
                       '-logFile {log}'.format(**kwargs))

            if extract_utm != '':
                log.ODM_WARNING('Could not generate coordinates file. '
                                'Ignore if there is a GCP file. Error: %s'
                                % extract_utm)


        # check if we rerun cell or not
        rerun_cell = (args.rerun is not None and
                      args.rerun == 'odm_georeferencing') or \
                     (args.rerun_all) or \
                     (args.rerun_from is not None and
                      'odm_georeferencing' in args.rerun_from)

        if not io.file_exists(tree.odm_georeferencing_model_obj_geo) or \
           not io.file_exists(tree.odm_georeferencing_model_ply_geo) or rerun_cell:

            # odm_georeference definitions
            kwargs = {
                'bin': context.odm_modules_path,
                'bundle': tree.opensfm_bundle,
                'imgs': tree.dataset_resize,
                'imgs_list': tree.opensfm_bundle_list,
                'model': tree.odm_textured_model_obj,
                'log': tree.odm_georeferencing_log,
                'coords': tree.odm_georeferencing_coords,
                'pc_geo': tree.odm_georeferencing_model_ply_geo,
                'geo_sys': tree.odm_georeferencing_model_txt_geo,
                'model_geo': tree.odm_georeferencing_model_obj_geo,
                'size': self.params.img_size,
                'gcp': gcpfile,
                'verbose': verbose

            }
            if args.use_opensfm_pointcloud:
                kwargs['pc'] = tree.opensfm_model
            else:
                kwargs['pc'] = tree.pmvs_model

            # Check to see if the GCP file exists

            if not self.params.use_exif and (self.params.gcp_file or find('gcp_list.txt', tree.root_path)):
                log.ODM_INFO('Found %s' % gcpfile)
                try:
                    system.run('{bin}/odm_georef -bundleFile {bundle} -imagesPath {imgs} -imagesListPath {imgs_list} '
                               '-bundleResizedTo {size} -inputFile {model} -outputFile {model_geo} '
                               '-inputPointCloudFile {pc} -outputPointCloudFile {pc_geo} {verbose} '
                               '-logFile {log} -georefFileOutputPath {geo_sys} -gcpFile {gcp} '
                               '-outputCoordFile {coords}'.format(**kwargs))
                except Exception:
                    log.ODM_EXCEPTION('Georeferencing failed. ')
                    return ecto.QUIT
            elif io.file_exists(tree.odm_georeferencing_coords):
                log.ODM_INFO('Running georeferencing with generated coords file.')
                system.run('{bin}/odm_georef -bundleFile {bundle} -inputCoordFile {coords} '
                           '-inputFile {model} -outputFile {model_geo} '
                           '-inputPointCloudFile {pc} -outputPointCloudFile {pc_geo} {verbose} '
                           '-logFile {log} -georefFileOutputPath {geo_sys}'.format(**kwargs))
            else:
                log.ODM_WARNING('Georeferencing failed. Make sure your '
                                'photos have geotags in the EXIF or you have '
                                'provided a GCP file. ')
                geocreated = False # skip the rest of the georeferencing

            if geocreated:
                # update images metadata
                geo_ref = types.ODM_GeoRef()
                geo_ref.parse_coordinate_system(tree.odm_georeferencing_coords)

                for idx, photo in enumerate(self.inputs.photos):
                    geo_ref.utm_to_latlon(tree.odm_georeferencing_latlon, photo, idx)

                # convert ply model to LAS reference system
                geo_ref.convert_to_las(tree.odm_georeferencing_model_ply_geo,
                                       tree.odm_georeferencing_pdal)

                # XYZ point cloud output
                log.ODM_INFO("Creating geo-referenced CSV file (XYZ format, can be used with GRASS to create DEM)")
                with open(tree.odm_georeferencing_xyz_file, "wb") as csvfile:
                    csvfile_writer = csv.writer(csvfile, delimiter=",")
                    reachedpoints = False
                    with open(tree.odm_georeferencing_model_ply_geo) as f:
                        for lineNumber, line in enumerate(f):
                            if reachedpoints:
                                tokens = line.split(" ")
                                csv_line = [float(tokens[0])+geo_ref.utm_east_offset,
                                            float(tokens[1])+geo_ref.utm_north_offset,
                                            tokens[2]]
                                csvfile_writer.writerow(csv_line)
                            if line.startswith("end_header"):
                                reachedpoints = True
                csvfile.close()

        else:
            log.ODM_WARNING('Found a valid georeferenced model in: %s'
                            % tree.odm_georeferencing_model_ply_geo)

        if args.time:
            system.benchmark(start_time, tree.benchmarking, 'Georeferencing')

        log.ODM_INFO('Running ODM Georeferencing Cell - Finished')
        return ecto.OK if args.end_with != 'odm_georeferencing' else ecto.QUIT