Esempio n. 1
0
    def __init__(self):

        #configuration
        args  = config.config()
        self.datapath = 'datasets'
        ataset = ODMLoadDatasetStage(self.datapath , args,
                                          verbose=args.verbose)
        # run the dataset layer
        dataset.run()

        #now we have photo metadata such as gps, camera,

        #need configuration of IP address of clients

        #https://stackoverflow.com/questions/45071567/how-to-send-custom-header-metadata-with-python-grpc













        


        class Servicer(chunk_pb2_grpc.FileServerServicer):
            def __init__(self):
                self.tmp_file_name = './temp/IMG_2359.JPG'


            def upload(self, request_iterator, context):
                save_chunks_to_file(request_iterator, self.tmp_file_name)
                return chunk_pb2.Reply(length=os.path.getsize(self.tmp_file_name))

            def download(self, request, context):
                if request.name:
                    return get_file_chunks(self.tmp_file_name)

        self.server = grpc.server(futures.ThreadPoolExecutor(max_workers=1))
        chunk_pb2_grpc.add_FileServerServicer_to_server(Servicer(), self.server)
Esempio n. 2
0
def configuration():
    args = config.config()
    args_dict = vars(args)
    args.split = 5
    args.split_overlap = 10
    args.rerun_all = True

    for k in sorted(args_dict.keys()):
        # Skip _is_set keys
        if k.endswith("_is_set"):
            continue

        # Don't leak token
        if k == 'sm_cluster' and args_dict[k] is not None:
            log.ODM_INFO('%s: True' % k)
        else:
            log.ODM_INFO('%s: %s' % (k, args_dict[k]))

    args.project_path = io.join_paths(args.project_path, args.name)
    print(args.project_path)
    args.project_path = '/home/j/ODM-master/dataset/images'
    if not io.dir_exists(args.project_path):
        log.ODM_WARNING('Directory %s does not exist. Creating it now.' %
                        args.name)
        system.mkdir_p(os.path.abspath(args.project_path))

    dataset = ODMLoadDatasetStage('dataset',
                                  args,
                                  progress=5.0,
                                  verbose=args.verbose)

    dataset.run()

    #upload images to server 2

    #blocking call
    #run distance measuremeants

    #exchange images that are required by 2 and images required by 1

    #opensfm in map reduce mode

    opensfm = ODMOpenSfMStage('opensfm', args, progress=25.0)
    opensfm.run()
    def test_get_submodel_argv(self):
        # Base
        args = config.config(["--project-path", "/datasets"])
        
        self.assertEqual(get_submodel_argv(args)[1:], 
            ['--orthophoto-cutline', '--dem-euclidean-map', '--skip-3dmodel'])
        self.assertEqual(get_submodel_argv(args, "/submodels", "submodel_0000")[1:], 
            ['--orthophoto-cutline', '--dem-euclidean-map', '--skip-3dmodel', '--project-path', '/submodels', 'submodel_0000'])        

        # Base + project name
        args = config.config(["--project-path", "/datasets", "brighton"])
        self.assertEqual(get_submodel_argv(args)[1:], 
            ['--orthophoto-cutline', '--dem-euclidean-map', '--skip-3dmodel'])
        self.assertEqual(get_submodel_argv(args, "/submodels", "submodel_0000")[1:], 
            ['--orthophoto-cutline', '--dem-euclidean-map', '--skip-3dmodel', '--project-path', '/submodels', 'submodel_0000'])        

        # Project name + base
        args = config.config(["brighton", "--project-path", "/datasets"])
        self.assertEqual(get_submodel_argv(args)[1:], 
            ['--orthophoto-cutline', '--dem-euclidean-map', '--skip-3dmodel'])
        self.assertEqual(get_submodel_argv(args, "/submodels", "submodel_0000")[1:], 
            ['--orthophoto-cutline', '--dem-euclidean-map', '--skip-3dmodel', '--project-path', '/submodels', 'submodel_0000'])        

        # Crop
        args = config.config(["brighton", "--project-path", "/datasets", "--crop", "0"])
        self.assertEqual(get_submodel_argv(args)[1:], 
            ['--crop', '0.015625', '--orthophoto-cutline', '--dem-euclidean-map', '--skip-3dmodel'])
        self.assertEqual(get_submodel_argv(args, "/submodels", "submodel_0000")[1:], 
            ['--crop', '0.015625', '--orthophoto-cutline', '--dem-euclidean-map', '--skip-3dmodel', '--project-path', '/submodels', 'submodel_0000'])        

        # With sm-cluster, pc-csv and others
        args = config.config(["--project-path", "/datasets", "--split", "200", "--pc-csv"])
        self.assertEqual(get_submodel_argv(args)[1:], 
            ['--orthophoto-cutline', '--dem-euclidean-map', '--skip-3dmodel'])
        self.assertEqual(get_submodel_argv(args, "/submodels", "submodel_0000")[1:], 
            ['--orthophoto-cutline', '--dem-euclidean-map', '--skip-3dmodel', '--project-path', '/submodels', 'submodel_0000'])        

        # Cameras JSON
        args = config.config(["--project-path", "/datasets", "--cameras", os.path.join(os.path.dirname(os.path.realpath(__file__)), "assets", "sample.json")])
        self.assertEqual(get_submodel_argv(args)[1:], 
            ['--cameras', '{"test": "1"}', '--orthophoto-cutline', '--dem-euclidean-map', '--skip-3dmodel'])
        
        # Camera JSON string
        args = config.config(["--project-path", "/datasets", "--cameras", '{"test": "1"}'])
        self.assertEqual(get_submodel_argv(args)[1:], 
            ['--cameras', '{"test": "1"}', '--orthophoto-cutline', '--dem-euclidean-map', '--skip-3dmodel'])
Esempio n. 4
0
    def process_local(self):
        completed_file = self.path("toolchain_completed.txt")
        submodel_name = os.path.basename(self.project_path)
        
        if not os.path.exists(completed_file) or self.params['rerun']:
            log.ODM_INFO("=============================")
            log.ODM_INFO("Local Toolchain %s" % self)
            log.ODM_INFO("=============================")

            submodels_path = os.path.abspath(self.path(".."))
            argv = get_submodel_argv(config.config(), submodels_path, submodel_name)

            # Re-run the ODM toolchain on the submodel
            system.run(" ".join(map(double_quote, map(str, argv))), env_vars=os.environ.copy())

            # This will only get executed if the command above succeeds
            self.touch(completed_file)
        else:
            log.ODM_INFO("Already processed toolchain for %s" % submodel_name)
def perform_work():
    """Prepares for and runs the ODM code
    """
    print("[worker] Starting")

    arg_file = os.environ.get('ODM_SETTINGS')
    project_path = os.environ.get('ODM_PROJECT')

    if not project_path:
        print("[worker] raising missing environment variable ODM_PROJECT")
        raise ValueError("Missing project path environment variable")

    print("[worker] settings file: " + str(arg_file))

    new_settings = None
    print("[worker] loading settings")
    if arg_file:
        with open(arg_file, encoding='utf-8') as in_f:
            new_settings = yaml.safe_load(in_f)

    print("[worker] getting config using our settings: %s" %
          context.settings_path)
    args = config.config()

    print("[worker] merging config")
    if new_settings:
        for name in new_settings:
            if name not in NO_OVERRIDE_SETTINGS:
                setattr(args, name, new_settings[name])
    print("[worker] config %s" % str(args))

    print("[worker] setting project path")
    args.project_path = project_path

    os.chdir(project_path)

    print("[worker] Starting")
    app = ODMApp(args=args)
    app.execute()

    print("[worker] finishing")
Esempio n. 6
0
            self.logger.exception("Could not stich image.")
        finally:
            # Restore any settings that might have changed
            self.opendrone_args = original_settings

            try:
                # Clean up the working environment by removing links and created folders
                self.logger.debug("[Cleanup] remove computing folder: %s" %
                                  self.opendrone_args.project_path)
                for path in paths:
                    inputfile = os.path.basename(path)
                    odmfile = os.path.join("/tmp", inputfile + ".jpg")
                    if os.path.isfile(odmfile):
                        self.logger.debug("[Cleanup] remove odm .jpg: %s" %
                                          odmfile)
                        os.remove(odmfile)
                shutil.rmtree(self.opendrone_args.project_path)
            except OSError:
                pass
            finally:
                self.opendrone_args.project_path = original_project_path


if __name__ == "__main__":
    args = config.config()
    args.project_path = tempfile.mkdtemp()

    extractor = OpenDroneMapStitch()
    extractor.dosetup(args)
    extractor.start()
Esempio n. 7
0
    def execute_remote_task(
        self,
        done,
        seed_files=[],
        seed_touch_files=[],
        outputs=[],
    ):
        """
        Run a task by creating a seed file with all files in seed_files, optionally
        creating empty files (for flag checks) specified in seed_touch_files
        and returning the results specified in outputs. Yeah it's pretty cool!
        """
        seed_file = self.create_seed_payload(seed_files,
                                             touch_files=seed_touch_files)

        # Find all images
        images = glob.glob(self.path("images/**"))

        # Add GCP (optional)
        if os.path.exists(self.path("gcp_list.txt")):
            images.append(self.path("gcp_list.txt"))

        # Add seed file
        images.append(seed_file)

        class nonloc:
            last_update = 0

        def print_progress(percentage):
            if (time.time() - nonloc.last_update >=
                    2) or int(percentage) == 100:
                log.ODM_INFO("LRE: Upload of %s at [%s%%]" %
                             (self, int(percentage)))
                nonloc.last_update = time.time()

        # Upload task
        task = self.node.create_task(images,
                                     get_submodel_args_dict(config.config()),
                                     progress_callback=print_progress,
                                     skip_post_processing=True,
                                     outputs=outputs)
        self.remote_task = task

        # Cleanup seed file
        os.remove(seed_file)

        # Keep track of tasks for cleanup
        self.params['tasks'].append(task)

        # Check status
        info = task.info()
        if info.status in [TaskStatus.RUNNING, TaskStatus.COMPLETED]:

            def monitor():
                class nonloc:
                    status_callback_calls = 0
                    last_update = 0

                def status_callback(info):
                    # If a task switches from RUNNING to QUEUED, then we need to
                    # stop the process and re-add the task to the queue.
                    if info.status == TaskStatus.QUEUED:
                        log.ODM_WARNING(
                            "LRE: %s (%s) turned from RUNNING to QUEUED. Re-adding to back of the queue."
                            % (self, task.uuid))
                        raise NodeTaskLimitReachedException(
                            "Delayed task limit reached")
                    elif info.status == TaskStatus.RUNNING:
                        # Print a status message once in a while
                        nonloc.status_callback_calls += 1
                        if nonloc.status_callback_calls > 30:
                            log.ODM_INFO("LRE: %s (%s) is still running" %
                                         (self, task.uuid))
                            nonloc.status_callback_calls = 0

                try:

                    def print_progress(percentage):
                        if (time.time() - nonloc.last_update >=
                                2) or int(percentage) == 100:
                            log.ODM_INFO("LRE: Download of %s at [%s%%]" %
                                         (self, int(percentage)))
                            nonloc.last_update = time.time()

                    task.wait_for_completion(status_callback=status_callback)
                    log.ODM_INFO("LRE: Downloading assets for %s" % self)
                    task.download_assets(self.project_path,
                                         progress_callback=print_progress)
                    log.ODM_INFO(
                        "LRE: Downloaded and extracted assets for %s" % self)
                    done()
                except exceptions.TaskFailedError as e:
                    # Try to get output
                    try:
                        output_lines = task.output()

                        # Save to file
                        error_log_path = self.path("error.log")
                        with open(error_log_path, 'w') as f:
                            f.write('\n'.join(output_lines) + '\n')

                        msg = "(%s) failed with task output: %s\nFull log saved at %s" % (
                            task.uuid, "\n".join(
                                output_lines[-10:]), error_log_path)
                        done(exceptions.TaskFailedError(msg))
                    except:
                        log.ODM_WARNING(
                            "LRE: Could not retrieve task output for %s (%s)" %
                            (self, task.uuid))
                        done(e)
                except Exception as e:
                    done(e)

            # Launch monitor thread and return
            t = threading.Thread(target=monitor)
            self.params['threads'].append(t)
            t.start()
        elif info.status == TaskStatus.QUEUED:
            raise NodeTaskLimitReachedException("Task limit reached")
        else:
            raise Exception("Could not send task to node, task status is %s" %
                            str(info.status))
Esempio n. 8
0
#!/usr/bin/python

from opendm import log
from opendm import config
from opendm import system
from opendm import io

import ecto
import os

from scripts.odm_app import ODMApp

if __name__ == '__main__':

    args = config.config()

    log.ODM_INFO('Initializing OpenDroneMap app - %s' % system.now())

    # Add project dir if doesn't exist
    args.project_path = io.join_paths(args.project_path, args.name)
    if not io.dir_exists(args.project_path):
        log.ODM_WARNING('Directory %s does not exist. Creating it now.' % args.name)
        system.mkdir_p(os.path.abspath(args.project_path))

    # If user asks to rerun everything, delete all of the existing progress directories.
    # TODO: Move this somewhere it's not hard-coded
    if args.rerun_all:
        os.system("rm -rf "
                  + args.project_path + "images_resize/ "
                  + args.project_path + "odm_georeferencing/ "
                  + args.project_path + "odm_meshing/ "
Esempio n. 9
0
import unittest
import os
import shutil

import ecto
from opendm import config
from opendm import context
from scripts.odm_app import ODMApp
from ecto.opts import scheduler_options, run_plasm

parser = config.parser
scheduler_options(parser)
options = config.config()


def appSetup(options):
    app = ODMApp(args=options)
    plasm = ecto.Plasm()
    plasm.insert(app)
    return app, plasm


def setup_module():
    # Run tests
    print '%s' % options
    options.project_path = context.tests_data_path
    # options.rerun_all = True
    app, plasm = appSetup(options)
    print 'Run Setup: Initial Run'
    run_plasm(options, plasm)
    # options.rerun_all = False
Esempio n. 10
0
import unittest
import os
import shutil

import ecto
from opendm import config
from opendm import context
from scripts.odm_app import ODMApp
from ecto.opts import scheduler_options, run_plasm

parser = config.parser
scheduler_options(parser)
options = config.config()


def appSetup(options):
    app = ODMApp(args=options)
    plasm = ecto.Plasm()
    plasm.insert(app)
    return app, plasm


def setup_module():
    # Run tests
    print '%s' % options
    options.project_path = context.tests_data_path
    # options.rerun_all = True
    app, plasm = appSetup(options)
    print 'Run Setup: Initial Run'
    run_plasm(options, plasm)
    # options.rerun_all = False
 def test_get_submodel_argv_dict(self):
     # Base
     args = config.config(["--project-path", "/datasets"])
     
     self.assertEqual(get_submodel_args_dict(args), 
         {'orthophoto-cutline': True, 'skip-3dmodel': True, 'dem-euclidean-map': True})