Esempio n. 1
0
def run_pdaltranslate_smrf(fin, fout, scalar, slope, threshold, window, verbose=False):
    """ Run PDAL translate  """
    cmd = [
        'pdal',
        'translate',
        '-i %s' % fin,
        '-o %s' % fout,
        'smrf',
        '--filters.smrf.scalar=%s' % scalar,
        '--filters.smrf.slope=%s' % slope,
        '--filters.smrf.threshold=%s' % threshold,
        '--filters.smrf.window=%s' % window,
    ]

    if verbose:
        log.ODM_INFO(' '.join(cmd))

    system.run(' '.join(cmd))
Esempio n. 2
0
    def process(self, local, done):
        def handle_result(error = None, partial=False):
            done(self, local, error, partial)

        log.ODM_INFO("LRE: About to process %s %s" % (self, 'locally' if local else 'remotely'))
        
        if local:
            self._process_local(handle_result) # Block until complete
        else:
            now = datetime.datetime.now()
            if self.wait_until > now:
                wait_for = (self.wait_until - now).seconds + 1
                log.ODM_DEBUG("LRE: Waiting %s seconds before processing %s" % (wait_for, self))
                time.sleep(wait_for)

            # TODO: we could consider uploading multiple tasks
            # in parallel. But since we are using the same node
            # perhaps this wouldn't be a big speedup.
            self._process_remote(handle_result) # Block until upload is complete
def parse_srs_header(header):
    """
    Parse a header coming from GCP or coordinate file
    :param header (str) line
    :return Proj object
    """
    log.ODM_INFO('Parsing SRS header: %s' % header)
    header = header.strip()
    ref = header.split(' ')
    try:
        if ref[0] == 'WGS84' and ref[1] == 'UTM':
            datum = ref[0]
            utm_pole = (ref[2][len(ref[2]) - 1]).upper()
            utm_zone = int(ref[2][:len(ref[2]) - 1])
            
            proj_args = {
                'zone': utm_zone, 
                'datum': datum
            }

            proj4 = '+proj=utm +zone={zone} +datum={datum} +units=m +no_defs=True'
            if utm_pole == 'S':
                proj4 += ' +south=True'

            srs = CRS.from_proj4(proj4.format(**proj_args))
        elif '+proj' in header:
            srs = CRS.from_proj4(header.strip('\''))
        elif header.lower().startswith("epsg:"):
            srs = CRS.from_epsg(header.lower()[5:])
        else:
            raise RuntimeError('Could not parse coordinates. Bad SRS supplied: %s' % header)
    except RuntimeError as e:
        log.ODM_ERROR('Uh oh! There seems to be a problem with your coordinates/GCP file.\n\n'
                            'The line: %s\n\n'
                            'Is not valid. Projections that are valid include:\n'
                            ' - EPSG:*****\n'
                            ' - WGS84 UTM **(N|S)\n'
                            ' - Any valid proj4 string (for example, +proj=utm +zone=32 +north +ellps=WGS84 +datum=WGS84 +units=m +no_defs)\n\n'
                            'Modify your input and try again.' % header)
        raise RuntimeError(e)
    
    return srs
Esempio n. 4
0
def compute_euclidean_map(geotiff_path, output_path, overwrite=False):
    if not os.path.exists(geotiff_path):
        log.ODM_WARNING("Cannot compute euclidean map (file does not exist: %s)" % geotiff_path)
        return

    nodata = -9999
    with rasterio.open(geotiff_path) as f:
        nodata = f.nodatavals[0]

    if not os.path.exists(output_path) or overwrite:
        log.ODM_INFO("Computing euclidean distance: %s" % output_path)
        run('gdal_proximity.py "%s" "%s" -values %s' % (geotiff_path, output_path, nodata))

        if os.path.exists(output_path):
            return output_path
        else:
            log.ODM_WARNING("Cannot compute euclidean distance file: %s" % output_path)
    else:
        log.ODM_WARNING("Already found a euclidean distance map: %s" % output_path)
        return output_path
Esempio n. 5
0
File: mesh.py Progetto: silky/ODM
def dem_to_mesh_gridded(inGeotiff,
                        outPointCloud,
                        maxVertexCount,
                        verbose=False):
    log.ODM_INFO('Creating mesh from DSM: %s' % inGeotiff)

    kwargs = {
        'bin': context.dem2mesh_path,
        'outfile': outPointCloud,
        'infile': inGeotiff,
        'maxVertexCount': maxVertexCount,
        'verbose': '-verbose' if verbose else ''
    }

    system.run('{bin} -inputFile {infile} '
               '-outputFile {outfile} '
               '-maxVertexCount {maxVertexCount} '
               ' {verbose} '.format(**kwargs))

    return outPointCloud
Esempio n. 6
0
    def georeference_with_gps(self,
                              images_path,
                              output_coords_file,
                              rerun=False):
        try:
            if not io.file_exists(output_coords_file) or rerun:
                location.extract_utm_coords(self.photos, images_path,
                                            output_coords_file)
            else:
                log.ODM_INFO("Coordinates file already exist: %s" %
                             output_coords_file)

            self.georef = ODM_GeoRef.FromCoordsFile(output_coords_file)
        except:
            log.ODM_WARNING(
                'Could not generate coordinates file. The orthophoto will not be georeferenced.'
            )

        self.gcp = GCPFile(None)
        return self.georef
Esempio n. 7
0
    def convert_and_undistort(self,
                              rerun=False,
                              imageFilter=None,
                              image_list=None,
                              runId="nominal"):
        log.ODM_INFO("Undistorting %s ..." % self.opensfm_project_path)
        done_flag_file = self.path("undistorted", "%s_done.txt" % runId)

        if not io.file_exists(done_flag_file) or rerun:
            ds = DataSet(self.opensfm_project_path)

            if image_list is not None:
                ds._set_image_list(image_list)

            undistort.run_dataset(ds, "reconstruction.json", 0, None,
                                  "undistorted", imageFilter)

            self.touch(done_flag_file)
        else:
            log.ODM_WARNING("Already undistorted (%s)" % runId)
Esempio n. 8
0
def load_images_database(database_file):
    # Empty is used to create types.ODM_Photo class
    # instances without calling __init__
    class Empty:
        pass

    result = []

    log.ODM_INFO("Loading images database: %s" % database_file)

    with open(database_file, 'r') as f:
        photos_json = json.load(f)
        for photo_json in photos_json:
            p = Empty()
            for k in photo_json:
                setattr(p, k, photo_json[k])
            p.__class__ = types.ODM_Photo
            result.append(p)

    return result
Esempio n. 9
0
def run(cmd, env_paths=[context.superbuild_bin_path], env_vars={}):
    """Run a system command"""
    global running_subprocesses

    log.ODM_INFO('running %s' % cmd)

    env = os.environ.copy()
    if len(env_paths) > 0:
        env["PATH"] = env["PATH"] + ":" + ":".join(env_paths)

    for k in env_vars:
        env[k] = str(env_vars[k])

    p = subprocess.Popen(cmd, shell=True, env=env, preexec_fn=os.setsid)
    running_subprocesses.append(p)
    retcode = p.wait()
    running_subprocesses.remove(p)
    if retcode < 0:
        raise Exception("Child was terminated by signal {}".format(-retcode))
    elif retcode > 0:
        raise Exception("Child returned {}".format(retcode))
Esempio n. 10
0
def run_pipeline(json, verbose=False):
    """ Run PDAL Pipeline with provided JSON """
    if verbose:
        json_print(json)

    # write to temp file
    f, jsonfile = tempfile.mkstemp(suffix='.json')
    if verbose:
        log.ODM_INFO('Pipeline file: %s' % jsonfile)
    os.write(f, jsonlib.dumps(json).encode('utf8'))
    os.close(f)

    cmd = [
        'pdal',
        'pipeline',
        '-i %s' % jsonfile
    ]
    if verbose:
        system.run(' '.join(cmd))
    else:
        system.run(' '.join(cmd) + ' > /dev/null 2>&1')
    os.remove(jsonfile)
Esempio n. 11
0
 def process_remote(self, done):
     octx = OSFMContext(self.path("opensfm"))
     if not octx.is_feature_matching_done(
     ) or not octx.is_reconstruction_done() or self.params['rerun']:
         self.execute_remote_task(
             done,
             seed_files=[
                 "opensfm/exif", "opensfm/camera_models.json",
                 "opensfm/reference_lla.json"
             ],
             seed_touch_files=[
                 "opensfm/split_merge_stop_at_reconstruction.txt"
             ],
             outputs=[
                 "opensfm/matches", "opensfm/features",
                 "opensfm/reconstruction.json", "opensfm/tracks.csv"
             ])
     else:
         log.ODM_INFO(
             "Already processed feature matching and reconstruction for %s"
             % octx.name())
         done()
Esempio n. 12
0
def run(cmd, env_paths=[context.superbuild_bin_path], env_vars={}, packages_paths=context.python_packages_paths):
    """Run a system command"""
    global running_subprocesses

    log.ODM_INFO('running %s' % cmd)
    env = os.environ.copy()

    sep = ":"
    if sys.platform == 'win32':
        sep = ";"

    if len(env_paths) > 0:
        env["PATH"] = env["PATH"] + sep + sep.join(env_paths)
    
    if len(packages_paths) > 0:
        env["PYTHONPATH"] = env.get("PYTHONPATH", "") + sep + sep.join(packages_paths) 
    
    for k in env_vars:
        env[k] = str(env_vars[k])

    p = subprocess.Popen(cmd, shell=True, env=env, start_new_session=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
    running_subprocesses.append(p)
    lines = deque()
    for line in io.TextIOWrapper(p.stdout):
        print(line, end="")

        lines.append(line.strip())
        if len(lines) == 11:
            lines.popleft()

    retcode = p.wait()

    log.logger.log_json_process(cmd, retcode, list(lines))

    running_subprocesses.remove(p)
    if retcode < 0:
        raise SubprocessException("Child was terminated by signal {}".format(-retcode), -retcode)
    elif retcode > 0:
        raise SubprocessException("Child returned {}".format(retcode), retcode)
Esempio n. 13
0
def add_pseudo_georeferencing(geotiff, scale=1.0):
    if not io.file_exists(geotiff):
        log.ODM_WARNING("Cannot add pseudo georeferencing, %s does not exist" %
                        geotiff)
        return

    try:
        log.ODM_INFO(
            "Adding pseudo georeferencing (raster should show up at the equator) to %s"
            % geotiff)

        dst_ds = gdal.Open(geotiff, GA_Update)
        srs = osr.SpatialReference()
        srs.ImportFromProj4(
            '+proj=utm +zone=30 +ellps=WGS84 +datum=WGS84 +units=m +no_defs')
        dst_ds.SetProjection(srs.ExportToWkt())
        dst_ds.SetGeoTransform([0.0, scale, 0.0, 0.0, 0.0, -scale])
        dst_ds = None

    except Exception as e:
        log.ODM_WARNING(
            "Cannot add psuedo georeferencing to %s (%s), skipping..." %
            (geotiff, str(e)))
Esempio n. 14
0
    def save_absolute_image_list_to(self, file):
        """
        Writes a copy of the image_list.txt file and makes sure that all paths
        written in it are absolute paths and not relative paths.
        """
        image_list_file = self.path("image_list.txt")

        if io.file_exists(image_list_file):
            with open(image_list_file, 'r') as f:
                content = f.read()
            
            lines = []
            for line in map(str.strip, content.split('\n')):
                if line and not line.startswith("/"):
                    line = os.path.abspath(os.path.join(self.opensfm_project_path, line))
                lines.append(line)

            with open(file, 'w') as f:
                f.write("\n".join(lines))

            log.ODM_INFO("Wrote %s with absolute paths" % file)
        else:
            log.ODM_WARNING("No %s found, cannot create %s" % (image_list_file, file))
Esempio n. 15
0
def mesh_3d(odm_mesh_folder, odm_mesh_ply, filter_point_cloud_path,
            max_concurrency):
    if not io.file_exists(odm_mesh_ply):
        log.ODM_INFO('Writing ODM Mesh file in: %s' % odm_mesh_ply)
        oct_tree = 10
        samples = 1.0
        max_vertex = 200000
        point_weight = 4
        verbose = False
        mesh.screened_poisson_reconstruction(
            filter_point_cloud_path,
            odm_mesh_ply,
            depth=oct_tree,
            samples=samples,
            maxVertexCount=max_vertex,
            pointWeight=point_weight,
            threads=max(
                1, max_concurrency - 1
            ),  # poissonrecon can get stuck on some machines if --threads == all cores
            verbose=verbose)

    else:
        log.ODM_WARNING('Found a valid ODM Mesh file in: %s' % odm_mesh_ply)
Esempio n. 16
0
def get_rolling_shutter_readout(make, model, override_value=0):
    global warn_db_missing
    global info_db_found

    if override_value > 0:
        return override_value

    key = make_model_key(make, model)
    if key in RS_DATABASE:
        if not key in info_db_found:
            log.ODM_INFO(
                "Rolling shutter profile for \"%s %s\" selected, using %sms as --rolling-shutter-readout."
                % (make, model, RS_DATABASE[key]))
            info_db_found[key] = True
        return float(RS_DATABASE[key])
    else:
        # Warn once
        if not key in warn_db_missing:
            log.ODM_WARNING(
                "Rolling shutter readout time for \"%s %s\" is not in our database, using default of %sms which might be incorrect. Use --rolling-shutter-readout to set an actual value (see https://github.com/OpenDroneMap/RSCalibration for instructions on how to calculate this value)"
                % (make, model, DEFAULT_RS_READOUT))
            warn_db_missing[key] = True
        return float(DEFAULT_RS_READOUT)
Esempio n. 17
0
        def remote_worker():
            while True:
                had_semaphore = bool(nonloc.semaphore)

                # If we've found an estimate of the limit on the maximum number of tasks
                # a node can process, we block until some tasks have completed
                if nonloc.semaphore: nonloc.semaphore.acquire()

                # Block until a new queue item is available
                task = q.get()

                if task is None or nonloc.error is not None:
                    q.task_done()
                    if nonloc.semaphore: nonloc.semaphore.release()
                    break
                
                # Special case in which we've just created a semaphore
                if not had_semaphore and nonloc.semaphore:
                    log.ODM_INFO("LRE: Just found semaphore, sending %s back to the queue" % task)
                    q.put(task)
                    q.task_done()
                    continue

                # Yield to local processing
                if not nonloc.local_processing:
                    log.ODM_DEBUG("LRE: Yielding to local processing, sending %s back to the queue" % task)
                    q.put(task)
                    q.task_done()
                    if nonloc.semaphore: nonloc.semaphore.release()
                    time.sleep(0.05)
                    continue

                # Process remote
                try:
                    task.process(False, handle_result)
                except Exception as e:
                    handle_result(task, False, e)
Esempio n. 18
0
def build_pointcloud(input_pointcloud,
                     output_path,
                     max_concurrency,
                     rerun=False):
    if not os.path.isfile(input_pointcloud):
        log.ODM_WARNING("No input point cloud file to process")
        return

    if rerun and io.dir_exists(output_path):
        log.ODM_WARNING("Removing previous 3D tiles directory: %s" %
                        output_path)
        shutil.rmtree(output_path)

    log.ODM_INFO("Generating OGC 3D Tiles point cloud")

    try:
        if not os.path.isdir(output_path):
            os.mkdir(output_path)

        tmpdir = os.path.join(output_path, "tmp")
        entwine_output = os.path.join(output_path, "entwine")

        build_entwine([input_pointcloud], tmpdir, entwine_output,
                      max_concurrency, "EPSG:4978")

        kwargs = {
            'input': entwine_output,
            'output': output_path,
        }
        system.run(
            'entwine convert -i "{input}" -o "{output}"'.format(**kwargs))

        for d in [tmpdir, entwine_output]:
            if os.path.isdir(d):
                shutil.rmtree(d)
    except Exception as e:
        log.ODM_WARNING("Cannot build 3D tiles point cloud: %s" % str(e))
Esempio n. 19
0
def split(input_point_cloud, outdir, filename_template, capacity, dims=None):
    log.ODM_INFO("Splitting point cloud filtering in chunks of {} vertices".format(capacity))

    if not os.path.exists(input_point_cloud):
        log.ODM_ERROR("{} does not exist, cannot split point cloud. The program will now exit.".format(input_point_cloud))
        sys.exit(1)

    if not os.path.exists(outdir):
        system.mkdir_p(outdir)

    if len(os.listdir(outdir)) != 0:
        log.ODM_ERROR("%s already contains some files. The program will now exit.".format(outdir))
        sys.exit(1)

    cmd = 'pdal split -i "%s" -o "%s" --capacity %s ' % (input_point_cloud, os.path.join(outdir, filename_template), capacity)
    
    if filename_template.endswith(".ply"):
        cmd += ("--writers.ply.sized_types=false "
                "--writers.ply.storage_mode='little endian' ")
    if dims is not None:
        cmd += '--writers.ply.dims="%s"' % dims
    system.run(cmd)

    return [os.path.join(outdir, f) for f in os.listdir(outdir)]
Esempio n. 20
0
def feather_raster(input_raster, output_raster, blend_distance=20):
    if not os.path.exists(input_raster):
        log.ODM_WARNING("Cannot feather raster, %s does not exist" % input_raster)
        return

    log.ODM_INFO("Computing feather raster: %s" % output_raster)
    
    with rasterio.open(input_raster, 'r') as rast:
        out_image = rast.read()
        if blend_distance > 0:
            if out_image.shape[0] >= 4:
                alpha_band = out_image[-1]
                dist_t = edt(alpha_band, black_border=True, parallel=0)
                dist_t[dist_t <= blend_distance] /= blend_distance
                dist_t[dist_t > blend_distance] = 1
                np.multiply(alpha_band, dist_t, out=alpha_band, casting="unsafe")
            else:
                log.ODM_WARNING("%s does not have an alpha band, cannot feather raster!" % input_raster)

        with rasterio.open(output_raster, 'w', BIGTIFF="IF_SAFER", **rast.profile) as dst:
            dst.colorinterp = rast.colorinterp
            dst.write(out_image)

        return output_raster
Esempio n. 21
0
    def reconstruct(self, rolling_shutter_correct=False, rerun=False):
        reconstruction_file = os.path.join(self.opensfm_project_path,
                                           'reconstruction.json')
        if not io.file_exists(reconstruction_file) or rerun:
            self.run('reconstruct')
            self.check_merge_partial_reconstructions()
        else:
            log.ODM_WARNING(
                'Found a valid OpenSfM reconstruction file in: %s' %
                reconstruction_file)

        # Check that a reconstruction file has been created
        if not self.reconstructed():
            raise system.ExitException(
                "The program could not process this dataset using the current settings. "
                "Check that the images have enough overlap, "
                "that there are enough recognizable features "
                "and that the images are in focus. "
                "You could also try to increase the --min-num-features parameter."
                "The program will now exit.")

        if rolling_shutter_correct:
            rs_file = self.path('rs_done.txt')

            if not io.file_exists(rs_file) or rerun:
                self.run('rs_correct')

                log.ODM_INFO("Re-running the reconstruction pipeline")

                self.match_features(True)
                self.create_tracks(True)
                self.reconstruct(rolling_shutter_correct=False, rerun=True)

                self.touch(rs_file)
            else:
                log.ODM_WARNING("Rolling shutter correction already applied")
Esempio n. 22
0
def gap_fill(filenames, fout, interpolation='nearest'):
    """ Gap fill from higher radius DTMs, then fill remainder with interpolation """
    start = datetime.now()
    from scipy.interpolate import griddata
    if len(filenames) == 0:
        raise Exception('No filenames provided!')

    filenames = sorted(filenames)

    imgs = gippy.GeoImages(filenames)
    nodata = imgs[0][0].NoDataValue()
    arr = imgs[0][0].Read()

    for i in range(1, imgs.size()):
        locs = numpy.where(arr == nodata)
        arr[locs] = imgs[i][0].Read()[locs]

    # interpolation at bad points
    goodlocs = numpy.where(arr != nodata)
    badlocs = numpy.where(arr == nodata)
    arr[badlocs] = griddata(goodlocs,
                            arr[goodlocs],
                            badlocs,
                            method=interpolation)

    # write output
    imgout = gippy.GeoImage(fout, imgs[0])
    imgout.SetNoData(nodata)
    imgout[0].Write(arr)
    fout = imgout.Filename()
    imgout = None

    log.ODM_INFO('Completed gap-filling to create %s in %s' %
                 (os.path.relpath(fout), datetime.now() - start))

    return fout
Esempio n. 23
0
def has_gpu(args):
    if gpu_disabled_by_user_env():
        log.ODM_INFO("Disabling GPU features (ODM_NO_GPU is set)")
        return False
    if args.no_gpu:
        log.ODM_INFO("Disabling GPU features (--no-gpu is set)")
        return False

    if sys.platform == 'win32':
        nvcuda_path = os.path.join(os.environ.get('SYSTEMROOT'), 'system32',
                                   'nvcuda.dll')
        if os.path.isfile(nvcuda_path):
            log.ODM_INFO("CUDA drivers detected")
            return True
        else:
            log.ODM_INFO("No CUDA drivers detected, using CPU")
            return False
    else:
        if shutil.which('nvidia-smi') is not None:
            log.ODM_INFO("nvidia-smi detected")
            return True
        else:
            log.ODM_INFO("nvidia-smi not found in PATH, using CPU")
            return False
Esempio n. 24
0
        def handle_result(task, local, error=None, partial=False):
            def cleanup_remote():
                if not partial and task.remote_task:
                    log.ODM_INFO(
                        "LRE: Cleaning up remote task (%s)... %s" %
                        (task.remote_task.uuid,
                         'OK' if remove_task_safe(task.remote_task) else 'NO'))
                    self.params['tasks'].remove(task.remote_task)
                    task.remote_task = None

            if error:
                log.ODM_WARNING("LRE: %s failed with: %s" % (task, str(error)))

                # Special case in which the error is caused by a SIGTERM signal
                # this means a local processing was terminated either by CTRL+C or
                # by canceling the task.
                if str(error) == "Child was terminated by signal 15":
                    system.exit_gracefully()

                task_limit_reached = isinstance(error,
                                                NodeTaskLimitReachedException)
                if task_limit_reached:
                    # Estimate the maximum number of tasks based on how many tasks
                    # are currently running
                    with calculate_task_limit_lock:
                        if nonloc.max_remote_tasks is None:
                            node_task_limit = 0
                            for t in self.params['tasks']:
                                try:
                                    info = t.info(with_output=-3)
                                    if info.status == TaskStatus.RUNNING and info.processing_time >= 0 and len(
                                            info.output) >= 3:
                                        node_task_limit += 1
                                except exceptions.OdmError:
                                    pass

                            nonloc.max_remote_tasks = max(1, node_task_limit)
                            log.ODM_INFO(
                                "LRE: Node task limit reached. Setting max remote tasks to %s"
                                % node_task_limit)

                # Retry, but only if the error is not related to a task failure
                if task.retries < task.max_retries and not isinstance(
                        error, exceptions.TaskFailedError):
                    # Put task back in queue
                    # Don't increment the retry counter if this task simply reached the task
                    # limit count.
                    if not task_limit_reached:
                        task.retries += 1
                    task.wait_until = datetime.datetime.now(
                    ) + datetime.timedelta(seconds=task.retries *
                                           task.retry_timeout)
                    cleanup_remote()
                    q.task_done()

                    log.ODM_INFO("LRE: Re-queueing %s (retries: %s)" %
                                 (task, task.retries))
                    q.put(task)
                    if not local: remote_running_tasks.increment(-1)
                    return
                else:
                    nonloc.error = error
                    finished_tasks.increment()
                    if not local: remote_running_tasks.increment(-1)
            else:
                if not partial:
                    log.ODM_INFO("LRE: %s finished successfully" % task)
                    finished_tasks.increment()
                    if not local: remote_running_tasks.increment(-1)

            cleanup_remote()
            if not partial: q.task_done()
Esempio n. 25
0
    def process(self, args, outputs):
        tree = outputs['tree']
        reconstruction = outputs['reconstruction']

        max_dim = find_largest_photo_dim(reconstruction.photos)
        max_texture_size = 8 * 1024 # default

        if max_dim > 8000:
            log.ODM_INFO("Large input images (%s pixels), increasing maximum texture size." % max_dim)
            max_texture_size *= 3

        class nonloc:
            runs = []

        def add_run(nvm_file, primary=True, band=None):
            subdir = ""
            if not primary and band is not None:
                subdir = band
            
            if not args.skip_3dmodel and (primary or args.use_3dmesh):
                nonloc.runs += [{
                    'out_dir': os.path.join(tree.odm_texturing, subdir),
                    'model': tree.odm_mesh,
                    'nadir': False,
                    'primary': primary,
                    'nvm_file': nvm_file,
                    'labeling_file': os.path.join(tree.odm_texturing, "odm_textured_model_geo_labeling.vec") if subdir else None
                }]

            if not args.use_3dmesh:
                nonloc.runs += [{
                    'out_dir': os.path.join(tree.odm_25dtexturing, subdir),
                    'model': tree.odm_25dmesh,
                    'nadir': True,
                    'primary': primary,
                    'nvm_file': nvm_file,
                    'labeling_file': os.path.join(tree.odm_25dtexturing, "odm_textured_model_geo_labeling.vec") if subdir else None
                }]

        if reconstruction.multi_camera:

            for band in reconstruction.multi_camera:
                primary = band['name'] == get_primary_band_name(reconstruction.multi_camera, args.primary_band)
                nvm_file = os.path.join(tree.opensfm, "undistorted", "reconstruction_%s.nvm" % band['name'].lower())
                add_run(nvm_file, primary, band['name'].lower())
            
            # Sort to make sure primary band is processed first
            nonloc.runs.sort(key=lambda r: r['primary'], reverse=True)
        else:
            add_run(tree.opensfm_reconstruction_nvm)
        
        progress_per_run = 100.0 / len(nonloc.runs)
        progress = 0.0

        for r in nonloc.runs:
            if not io.dir_exists(r['out_dir']):
                system.mkdir_p(r['out_dir'])

            odm_textured_model_obj = os.path.join(r['out_dir'], tree.odm_textured_model_obj)

            if not io.file_exists(odm_textured_model_obj) or self.rerun():
                log.ODM_INFO('Writing MVS Textured file in: %s'
                              % odm_textured_model_obj)

                # Format arguments to fit Mvs-Texturing app
                skipGlobalSeamLeveling = ""
                skipLocalSeamLeveling = ""
                keepUnseenFaces = ""
                nadir = ""

                if args.texturing_skip_global_seam_leveling:
                    skipGlobalSeamLeveling = "--skip_global_seam_leveling"
                if args.texturing_skip_local_seam_leveling:
                    skipLocalSeamLeveling = "--skip_local_seam_leveling"
                if args.texturing_keep_unseen_faces:
                    keepUnseenFaces = "--keep_unseen_faces"
                if (r['nadir']):
                    nadir = '--nadir_mode'

                # mvstex definitions
                kwargs = {
                    'bin': context.mvstex_path,
                    'out_dir': os.path.join(r['out_dir'], "odm_textured_model_geo"),
                    'model': r['model'],
                    'dataTerm': args.texturing_data_term,
                    'outlierRemovalType': args.texturing_outlier_removal_type,
                    'skipGlobalSeamLeveling': skipGlobalSeamLeveling,
                    'skipLocalSeamLeveling': skipLocalSeamLeveling,
                    'keepUnseenFaces': keepUnseenFaces,
                    'toneMapping': args.texturing_tone_mapping,
                    'nadirMode': nadir,
                    'maxTextureSize': '--max_texture_size=%s' % max_texture_size,
                    'nvm_file': r['nvm_file'],
                    'intermediate': '--no_intermediate_results' if (r['labeling_file'] or not reconstruction.multi_camera) else '',
                    'labelingFile': '-L "%s"' % r['labeling_file'] if r['labeling_file'] else ''
                }

                mvs_tmp_dir = os.path.join(r['out_dir'], 'tmp')

                # Make sure tmp directory is empty
                if io.dir_exists(mvs_tmp_dir):
                    log.ODM_INFO("Removing old tmp directory {}".format(mvs_tmp_dir))
                    shutil.rmtree(mvs_tmp_dir)

                # run texturing binary
                system.run('"{bin}" "{nvm_file}" "{model}" "{out_dir}" '
                        '-d {dataTerm} -o {outlierRemovalType} '
                        '-t {toneMapping} '
                        '{intermediate} '
                        '{skipGlobalSeamLeveling} '
                        '{skipLocalSeamLeveling} '
                        '{keepUnseenFaces} '
                        '{nadirMode} '
                        '{labelingFile} '
                        '{maxTextureSize} '.format(**kwargs))
                
                # Backward compatibility: copy odm_textured_model_geo.mtl to odm_textured_model.mtl
                # for certain older WebODM clients which expect a odm_textured_model.mtl
                # to be present for visualization
                # We should remove this at some point in the future
                geo_mtl = os.path.join(r['out_dir'], 'odm_textured_model_geo.mtl')
                if io.file_exists(geo_mtl):
                    nongeo_mtl = os.path.join(r['out_dir'], 'odm_textured_model.mtl')
                    shutil.copy(geo_mtl, nongeo_mtl)
                
                progress += progress_per_run
                self.update_progress(progress)
            else:
                log.ODM_WARNING('Found a valid ODM Texture file in: %s'
                                % odm_textured_model_obj)
        
        if args.optimize_disk_space:
            for r in nonloc.runs:
                if io.file_exists(r['model']):
                    os.remove(r['model'])
            
            undistorted_images_path = os.path.join(tree.opensfm, "undistorted", "images")
            if io.dir_exists(undistorted_images_path):
                shutil.rmtree(undistorted_images_path)
Esempio n. 26
0
    def run(self, taskClass):
        if not self.project_paths:
            return

        # Shared variables across threads
        class nonloc:
            error = None
            local_processing = False
            max_remote_tasks = None

        calculate_task_limit_lock = threading.Lock()
        finished_tasks = AtomicCounter(0)
        remote_running_tasks = AtomicCounter(0)

        # Create queue
        q = queue.Queue()
        for pp in self.project_paths:
            log.ODM_INFO("LRE: Adding to queue %s" % pp)
            q.put(taskClass(pp, self.node, self.params))

        def remove_task_safe(task):
            try:
                removed = task.remove()
            except exceptions.OdmError:
                removed = False
            return removed

        def cleanup_remote_tasks():
            if self.params['tasks']:
                log.ODM_WARNING("LRE: Attempting to cleanup remote tasks")
            else:
                log.ODM_INFO("LRE: No remote tasks left to cleanup")

            for task in self.params['tasks']:
                log.ODM_INFO(
                    "LRE: Removing remote task %s... %s" %
                    (task.uuid, 'OK' if remove_task_safe(task) else 'NO'))

        def handle_result(task, local, error=None, partial=False):
            def cleanup_remote():
                if not partial and task.remote_task:
                    log.ODM_INFO(
                        "LRE: Cleaning up remote task (%s)... %s" %
                        (task.remote_task.uuid,
                         'OK' if remove_task_safe(task.remote_task) else 'NO'))
                    self.params['tasks'].remove(task.remote_task)
                    task.remote_task = None

            if error:
                log.ODM_WARNING("LRE: %s failed with: %s" % (task, str(error)))

                # Special case in which the error is caused by a SIGTERM signal
                # this means a local processing was terminated either by CTRL+C or
                # by canceling the task.
                if str(error) == "Child was terminated by signal 15":
                    system.exit_gracefully()

                task_limit_reached = isinstance(error,
                                                NodeTaskLimitReachedException)
                if task_limit_reached:
                    # Estimate the maximum number of tasks based on how many tasks
                    # are currently running
                    with calculate_task_limit_lock:
                        if nonloc.max_remote_tasks is None:
                            node_task_limit = 0
                            for t in self.params['tasks']:
                                try:
                                    info = t.info(with_output=-3)
                                    if info.status == TaskStatus.RUNNING and info.processing_time >= 0 and len(
                                            info.output) >= 3:
                                        node_task_limit += 1
                                except exceptions.OdmError:
                                    pass

                            nonloc.max_remote_tasks = max(1, node_task_limit)
                            log.ODM_INFO(
                                "LRE: Node task limit reached. Setting max remote tasks to %s"
                                % node_task_limit)

                # Retry, but only if the error is not related to a task failure
                if task.retries < task.max_retries and not isinstance(
                        error, exceptions.TaskFailedError):
                    # Put task back in queue
                    # Don't increment the retry counter if this task simply reached the task
                    # limit count.
                    if not task_limit_reached:
                        task.retries += 1
                    task.wait_until = datetime.datetime.now(
                    ) + datetime.timedelta(seconds=task.retries *
                                           task.retry_timeout)
                    cleanup_remote()
                    q.task_done()

                    log.ODM_INFO("LRE: Re-queueing %s (retries: %s)" %
                                 (task, task.retries))
                    q.put(task)
                    if not local: remote_running_tasks.increment(-1)
                    return
                else:
                    nonloc.error = error
                    finished_tasks.increment()
                    if not local: remote_running_tasks.increment(-1)
            else:
                if not partial:
                    log.ODM_INFO("LRE: %s finished successfully" % task)
                    finished_tasks.increment()
                    if not local: remote_running_tasks.increment(-1)

            cleanup_remote()
            if not partial: q.task_done()

        def local_worker():
            while True:
                # Block until a new queue item is available
                task = q.get()

                if task is None or nonloc.error is not None:
                    q.task_done()
                    break

                # Process local
                try:
                    nonloc.local_processing = True
                    task.process(True, handle_result)
                except Exception as e:
                    handle_result(task, True, e)
                finally:
                    nonloc.local_processing = False

        def remote_worker():
            while True:
                # Block until a new queue item is available
                task = q.get()

                if task is None or nonloc.error is not None:
                    q.task_done()
                    break

                # Yield to local processing
                if not nonloc.local_processing:
                    log.ODM_INFO(
                        "LRE: Yielding to local processing, sending %s back to the queue"
                        % task)
                    q.put(task)
                    q.task_done()
                    time.sleep(0.05)
                    continue

                # If we've found an estimate of the limit on the maximum number of tasks
                # a node can process, we block until some tasks have completed
                if nonloc.max_remote_tasks is not None and remote_running_tasks.value >= nonloc.max_remote_tasks:
                    q.put(task)
                    q.task_done()
                    time.sleep(2)
                    continue

                # Process remote
                try:
                    remote_running_tasks.increment()
                    task.process(False, handle_result)
                except Exception as e:
                    handle_result(task, False, e)

        # Create queue thread
        local_thread = threading.Thread(target=local_worker)
        if self.node_online:
            remote_thread = threading.Thread(target=remote_worker)

        system.add_cleanup_callback(cleanup_remote_tasks)

        # Start workers
        local_thread.start()
        if self.node_online:
            remote_thread.start()

        # block until all tasks are done (or CTRL+C)
        try:
            while finished_tasks.value < len(
                    self.project_paths) and nonloc.error is None:
                time.sleep(0.5)
        except KeyboardInterrupt:
            log.ODM_WARNING("LRE: CTRL+C")
            system.exit_gracefully()

        # stop workers
        q.put(None)
        if self.node_online:
            q.put(None)

        # Wait for queue thread
        local_thread.join()
        if self.node_online:
            remote_thread.join()

        # Wait for all remains threads
        for thrds in self.params['threads']:
            thrds.join()

        system.remove_cleanup_callback(cleanup_remote_tasks)
        cleanup_remote_tasks()

        if nonloc.error is not None:
            # Try not to leak access token
            if isinstance(nonloc.error, exceptions.NodeConnectionError):
                raise exceptions.NodeConnectionError(
                    "A connection error happened. Check the connection to the processing node and try again."
                )
            else:
                raise nonloc.error
Esempio n. 27
0
    def process(self, args, outputs):
        tree = outputs['tree']
        las_model_found = io.file_exists(tree.odm_georeferencing_model_laz)

        log.ODM_INFO('Classify: ' + str(args.pc_classify))
        log.ODM_INFO('Create DSM: ' + str(args.dsm))
        log.ODM_INFO('Create DTM: ' + str(args.dtm))
        log.ODM_INFO('DEM input file {0} found: {1}'.format(tree.odm_georeferencing_model_laz, str(las_model_found)))

        # define paths and create working directories
        odm_dem_root = tree.path('odm_dem')
        if not io.dir_exists(odm_dem_root):
            system.mkdir_p(odm_dem_root)

        if args.pc_classify and las_model_found:
            pc_classify_marker = os.path.join(odm_dem_root, 'pc_classify_done.txt')

            if not io.file_exists(pc_classify_marker) or self.rerun():
                log.ODM_INFO("Classifying {} using Simple Morphological Filter".format(tree.odm_georeferencing_model_laz))
                commands.classify(tree.odm_georeferencing_model_laz,
                                  args.smrf_scalar, 
                                  args.smrf_slope, 
                                  args.smrf_threshold, 
                                  args.smrf_window,
                                  verbose=args.verbose
                                )

                with open(pc_classify_marker, 'w') as f:
                    f.write('Classify: smrf\n')
                    f.write('Scalar: {}\n'.format(args.smrf_scalar))
                    f.write('Slope: {}\n'.format(args.smrf_slope))
                    f.write('Threshold: {}\n'.format(args.smrf_threshold))
                    f.write('Window: {}\n'.format(args.smrf_window))
            
        progress = 20
        self.update_progress(progress)

        # Do we need to process anything here?
        if (args.dsm or args.dtm) and las_model_found:
            dsm_output_filename = os.path.join(odm_dem_root, 'dsm.tif')
            dtm_output_filename = os.path.join(odm_dem_root, 'dtm.tif')

            if (args.dtm and not io.file_exists(dtm_output_filename)) or \
                (args.dsm and not io.file_exists(dsm_output_filename)) or \
                self.rerun():

                products = []
                if args.dsm: products.append('dsm')
                if args.dtm: products.append('dtm')
                
                resolution = gsd.cap_resolution(args.dem_resolution, tree.opensfm_reconstruction, gsd_error_estimate=-3, ignore_gsd=args.ignore_gsd)
                radius_steps = [(resolution / 100.0) / 2.0]
                for _ in range(args.dem_gapfill_steps - 1):
                    radius_steps.append(radius_steps[-1] * 2) # 2 is arbitrary, maybe there's a better value?

                for product in products:
                    commands.create_dem(
                            tree.odm_georeferencing_model_laz,
                            product,
                            output_type='idw' if product == 'dtm' else 'max',
                            radiuses=map(str, radius_steps),
                            gapfill=args.dem_gapfill_steps > 0,
                            outdir=odm_dem_root,
                            resolution=resolution / 100.0,
                            decimation=args.dem_decimation,
                            verbose=args.verbose,
                            max_workers=args.max_concurrency,
                            keep_unfilled_copy=args.dem_euclidean_map
                        )

                    dem_geotiff_path = os.path.join(odm_dem_root, "{}.tif".format(product))
                    bounds_file_path = os.path.join(tree.odm_georeferencing, 'odm_georeferenced_model.bounds.gpkg')

                    if args.crop > 0:
                        # Crop DEM
                        Cropper.crop(bounds_file_path, dem_geotiff_path, utils.get_dem_vars(args))

                    if args.dem_euclidean_map:
                        unfilled_dem_path = io.related_file_path(dem_geotiff_path, postfix=".unfilled")
                        
                        if args.crop > 0:
                            # Crop unfilled DEM
                            Cropper.crop(bounds_file_path, unfilled_dem_path, utils.get_dem_vars(args))

                        commands.compute_euclidean_map(unfilled_dem_path, 
                                            io.related_file_path(dem_geotiff_path, postfix=".euclideand"), 
                                            overwrite=True)
                    
                    progress += 30
                    self.update_progress(progress)
            else:
                log.ODM_WARNING('Found existing outputs in: %s' % odm_dem_root)
        else:
            log.ODM_WARNING('DEM will not be generated')
Esempio n. 28
0
 def print_progress(percentage):
     if (time.time() - nonloc.last_update >=
             2) or int(percentage) == 100:
         log.ODM_INFO("LRE: Download of %s at [%s%%]" %
                      (self, int(percentage)))
         nonloc.last_update = time.time()
Esempio n. 29
0
    def process(self, args, outputs):
        outputs['start_time'] = system.now_raw()
        tree = types.ODM_Tree(args.project_path, args.gcp, args.geo)
        outputs['tree'] = tree

        if args.time and io.file_exists(tree.benchmarking):
            # Delete the previously made file
            os.remove(tree.benchmarking)
            with open(tree.benchmarking, 'a') as b:
                b.write(
                    'ODM Benchmarking file created %s\nNumber of Cores: %s\n\n'
                    % (system.now(), context.num_cores))

        # check if the image filename is supported
        def valid_image_filename(filename):
            (pathfn, ext) = os.path.splitext(filename)
            return ext.lower(
            ) in context.supported_extensions and pathfn[-5:] != "_mask"

        # Get supported images from dir
        def get_images(in_dir):
            log.ODM_DEBUG(in_dir)
            entries = os.listdir(in_dir)
            valid, rejects = [], []
            for f in entries:
                if valid_image_filename(f):
                    valid.append(f)
                else:
                    rejects.append(f)
            return valid, rejects

        def find_mask(photo_path, masks):
            (pathfn, ext) = os.path.splitext(os.path.basename(photo_path))
            k = "{}_mask".format(pathfn)

            mask = masks.get(k)
            if mask:
                # Spaces are not supported due to OpenSfM's mask_list.txt format reqs
                if not " " in mask:
                    return mask
                else:
                    log.ODM_WARNING(
                        "Image mask {} has a space. Spaces are currently not supported for image masks."
                        .format(mask))

        # get images directory
        images_dir = tree.dataset_raw

        # define paths and create working directories
        system.mkdir_p(tree.odm_georeferencing)

        log.ODM_INFO('Loading dataset from: %s' % images_dir)

        # check if we rerun cell or not
        images_database_file = os.path.join(tree.root_path, 'images.json')
        if not io.file_exists(images_database_file) or self.rerun():
            if not os.path.exists(images_dir):
                log.ODM_ERROR(
                    "There are no images in %s! Make sure that your project path and dataset name is correct. The current is set to: %s"
                    % (images_dir, args.project_path))
                exit(1)

            files, rejects = get_images(images_dir)
            if files:
                # create ODMPhoto list
                path_files = [os.path.join(images_dir, f) for f in files]

                # Lookup table for masks
                masks = {}
                for r in rejects:
                    (p, ext) = os.path.splitext(r)
                    if p[-5:] == "_mask" and ext.lower(
                    ) in context.supported_extensions:
                        masks[p] = r

                photos = []
                with open(tree.dataset_list, 'w') as dataset_list:
                    log.ODM_INFO("Loading %s images" % len(path_files))
                    for f in path_files:
                        p = types.ODM_Photo(f)
                        p.set_mask(find_mask(f, masks))
                        photos += [p]
                        dataset_list.write(photos[-1].filename + '\n')

                # Check if a geo file is available
                if tree.odm_geo_file is not None and os.path.exists(
                        tree.odm_geo_file):
                    log.ODM_INFO("Found image geolocation file")
                    gf = GeoFile(tree.odm_geo_file)
                    updated = 0
                    for p in photos:
                        entry = gf.get_entry(p.filename)
                        if entry:
                            p.update_with_geo_entry(entry)
                            updated += 1
                    log.ODM_INFO("Updated %s image positions" % updated)

                # Save image database for faster restart
                save_images_database(photos, images_database_file)
            else:
                log.ODM_ERROR('Not enough supported images in %s' % images_dir)
                exit(1)
        else:
            # We have an images database, just load it
            photos = load_images_database(images_database_file)

        log.ODM_INFO('Found %s usable images' % len(photos))

        # Create reconstruction object
        reconstruction = types.ODM_Reconstruction(photos)

        if tree.odm_georeferencing_gcp and not args.use_exif:
            reconstruction.georeference_with_gcp(
                tree.odm_georeferencing_gcp,
                tree.odm_georeferencing_coords,
                tree.odm_georeferencing_gcp_utm,
                tree.odm_georeferencing_model_txt_geo,
                rerun=self.rerun())
        else:
            reconstruction.georeference_with_gps(
                tree.dataset_raw,
                tree.odm_georeferencing_coords,
                tree.odm_georeferencing_model_txt_geo,
                rerun=self.rerun())

        reconstruction.save_proj_srs(
            os.path.join(tree.odm_georeferencing,
                         tree.odm_georeferencing_proj))
        outputs['reconstruction'] = reconstruction
Esempio n. 30
0
def save_images_database(photos, database_file):
    with open(database_file, 'w') as f:
        f.write(json.dumps([p.__dict__ for p in photos]))

    log.ODM_INFO("Wrote images database: %s" % database_file)