def test_get_bit_depth(self):
     self.assertEqual(utils.get_bit_depth("Byte"), "u08")
     self.assertEqual(utils.get_bit_depth("UInt16"), "u16")
     self.assertEqual(utils.get_bit_depth("Float32"), "f32")
     self.assertEqual(utils.get_bit_depth("Uint16"), None)  # function logs error, and returns None
Esempio n. 2
0
def main():

    #### Set Up Arguments
    parent_parser, pos_arg_keys = ortho_functions.buildParentArgumentParser()
    parser = argparse.ArgumentParser(
        parents=[parent_parser],
        description="Run/submit batch image ortho and conversion tasks")

    parser.add_argument("--pbs",
                        action='store_true',
                        default=False,
                        help="submit tasks to PBS")
    parser.add_argument("--slurm",
                        action='store_true',
                        default=False,
                        help="submit tasks to SLURM")
    parser.add_argument(
        "--parallel-processes",
        type=int,
        default=1,
        help="number of parallel processes to spawn (default 1)")
    parser.add_argument(
        "--qsubscript",
        help=
        "submission script to use in PBS/SLURM submission (PBS default is qsub_ortho.sh, SLURM "
        "default is slurm_ortho.py, in script root folder)")
    parser.add_argument(
        "-l", help="PBS resources requested (mimicks qsub syntax, PBS only)")
    parser.add_argument("--dryrun",
                        action='store_true',
                        default=False,
                        help='print actions without executing')

    #### Parse Arguments
    args = parser.parse_args()
    scriptpath = os.path.abspath(sys.argv[0])
    src = os.path.abspath(args.src)
    dstdir = os.path.abspath(args.dst)

    #### Validate Required Arguments
    if os.path.isdir(src):
        srctype = 'dir'
    elif os.path.isfile(src) and os.path.splitext(src)[1].lower() == '.txt':
        srctype = 'textfile'
    elif os.path.isfile(src) and os.path.splitext(
            src)[1].lower() in ortho_functions.exts:
        srctype = 'image'
    elif os.path.isfile(src.replace('msi', 'blu')) and os.path.splitext(
            src)[1].lower() in ortho_functions.exts:
        srctype = 'image'
    else:
        parser.error(
            "Error arg1 is not a recognized file path or file type: {}".format(
                src))

    if not os.path.isdir(dstdir):
        parser.error("Error arg2 is not a valid file path: {}".format(dstdir))

    ## Verify qsubscript
    if args.pbs or args.slurm:
        if args.qsubscript is None:
            if args.pbs:
                qsubpath = os.path.join(os.path.dirname(scriptpath),
                                        'qsub_ortho.sh')
            if args.slurm:
                qsubpath = os.path.join(os.path.dirname(scriptpath),
                                        'slurm_ortho.sh')
        else:
            qsubpath = os.path.abspath(args.qsubscript)
        if not os.path.isfile(qsubpath):
            parser.error("qsub script path is not valid: {}".format(qsubpath))

    ## Verify processing options do not conflict
    if args.pbs and args.slurm:
        parser.error("Options --pbs and --slurm are mutually exclusive")
    if (args.pbs or args.slurm) and args.parallel_processes > 1:
        parser.error(
            "HPC Options (--pbs or --slurm) and --parallel-processes > 1 are mutually exclusive"
        )

    #### Verify EPSG
    try:
        spatial_ref = utils.SpatialRef(args.epsg)
    except RuntimeError as e:
        parser.error(e)

    #### Verify that dem and ortho_height are not both specified
    if args.dem is not None and args.ortho_height is not None:
        parser.error(
            "--dem and --ortho_height options are mutually exclusive.  Please choose only one."
        )

    #### Test if DEM exists
    if args.dem:
        if not os.path.isfile(args.dem):
            parser.error("DEM does not exist: {}".format(args.dem))

    #### Set up console logging handler
    lso = logging.StreamHandler()
    lso.setLevel(logging.INFO)
    formatter = logging.Formatter('%(asctime)s %(levelname)s- %(message)s',
                                  '%m-%d-%Y %H:%M:%S')
    lso.setFormatter(formatter)
    logger.addHandler(lso)

    #### Get args ready to pass to task handler
    arg_keys_to_remove = ('l', 'qsubscript', 'dryrun', 'pbs', 'slurm',
                          'parallel_processes')
    arg_str_base = taskhandler.convert_optional_args_to_string(
        args, pos_arg_keys, arg_keys_to_remove)

    ## Identify source images
    if srctype == 'dir':
        image_list1 = utils.find_images(src, False, ortho_functions.exts)
    elif srctype == 'textfile':
        image_list1 = utils.find_images(src, True, ortho_functions.exts)
    else:
        image_list1 = [src]

    ## Group Ikonos
    image_list2 = []
    for srcfp in image_list1:
        srcdir, srcfn = os.path.split(srcfp)
        if "IK01" in srcfn and sum(
            [b in srcfn for b in ortho_functions.ikMsiBands]) > 0:
            for b in ortho_functions.ikMsiBands:
                if b in srcfn:
                    newname = os.path.join(srcdir, srcfn.replace(b, "msi"))
                    break
            image_list2.append(newname)

        else:
            image_list2.append(srcfp)

    image_list = list(set(image_list2))
    logger.info('Number of src images: %i', len(image_list))

    ## Build task queue
    i = 0
    task_queue = []
    for srcfp in image_list:
        srcdir, srcfn = os.path.split(srcfp)
        dstfp = os.path.join(
            dstdir, "{}_{}{}{}{}".format(
                os.path.splitext(srcfn)[0], utils.get_bit_depth(args.outtype),
                args.stretch, spatial_ref.epsg,
                ortho_functions.formats[args.format]))

        done = os.path.isfile(dstfp)
        if done is False:
            i += 1
            task = taskhandler.Task(
                srcfn, 'Or{:04g}'.format(i), 'python',
                '{} {} {} {}'.format(scriptpath, arg_str_base, srcfp, dstdir),
                ortho_functions.process_image, [srcfp, dstfp, args])
            task_queue.append(task)

    logger.info('Number of incomplete tasks: %i', i)

    ## Run tasks
    if len(task_queue) > 0:
        logger.info("Submitting Tasks")
        if args.pbs:
            l = "-l {}".format(args.l) if args.l else ""
            try:
                task_handler = taskhandler.PBSTaskHandler(qsubpath, l)
            except RuntimeError as e:
                logger.error(e)
            else:
                if not args.dryrun:
                    task_handler.run_tasks(task_queue)

        elif args.slurm:
            try:
                task_handler = taskhandler.SLURMTaskHandler(qsubpath)
            except RuntimeError as e:
                logger.error(e)
            else:
                if not args.dryrun:
                    task_handler.run_tasks(task_queue)

        elif args.parallel_processes > 1:
            try:
                task_handler = taskhandler.ParallelTaskHandler(
                    args.parallel_processes)
            except RuntimeError as e:
                logger.error(e)
            else:
                logger.info("Number of child processes to spawn: %i",
                            task_handler.num_processes)
                if not args.dryrun:
                    task_handler.run_tasks(task_queue)

        else:

            results = {}
            for task in task_queue:

                src, dstfp, task_arg_obj = task.method_arg_list

                #### Set up processing log handler
                logfile = os.path.splitext(dstfp)[0] + ".log"
                lfh = logging.FileHandler(logfile)
                lfh.setLevel(logging.DEBUG)
                formatter = logging.Formatter(
                    '%(asctime)s %(levelname)s- %(message)s',
                    '%m-%d-%Y %H:%M:%S')
                lfh.setFormatter(formatter)
                logger.addHandler(lfh)

                if not args.dryrun:
                    results[task.name] = task.method(src, dstfp, task_arg_obj)

                #### remove existing file handler
                logger.removeHandler(lfh)

            #### Print Images with Errors
            for k, v in results.items():
                if v != 0:
                    logger.warning("Failed Image: %s", k)

        logger.info("Done")

    else:
        logger.info("No images found to process")
Esempio n. 3
0
def main():
    #### Set Up Arguments
    parent_parser, pos_arg_keys = ortho_functions.buildParentArgumentParser()
    parser = argparse.ArgumentParser(
        parents=[parent_parser],
        description="Run/submit batch image ortho and conversion tasks")

    parser.add_argument("--pbs",
                        action='store_true',
                        default=False,
                        help="submit tasks to PBS")
    parser.add_argument("--slurm",
                        action='store_true',
                        default=False,
                        help="submit tasks to SLURM")
    parser.add_argument(
        "--tasks-per-job",
        type=int,
        help=
        "Number of tasks to bundle into a single job. (requires --pbs or --slurm option) (Warning:"
        " a higher number of tasks per job may require modification of default wallclock limit.)"
    )
    parser.add_argument(
        '--scratch',
        default=ARGDEF_SCRATCH,
        help="Scratch space to build task bundle text files. (default={})".
        format(ARGDEF_SCRATCH))
    parser.add_argument(
        "--parallel-processes",
        type=int,
        default=1,
        help="number of parallel processes to spawn (default 1)")
    parser.add_argument(
        "--qsubscript",
        help=
        "submission script to use in PBS/SLURM submission (PBS default is qsub_ortho.sh, SLURM "
        "default is slurm_ortho.py, in script root folder)")
    parser.add_argument(
        "-l", help="PBS resources requested (mimicks qsub syntax, PBS only)")
    parser.add_argument("--dryrun",
                        action='store_true',
                        default=False,
                        help='print actions without executing')

    #### Parse Arguments
    args = parser.parse_args()
    scriptpath = os.path.abspath(sys.argv[0])
    src = os.path.abspath(args.src)
    dstdir = os.path.abspath(args.dst)
    scratch = os.path.abspath(args.scratch)
    bittype = utils.get_bit_depth(args.outtype)

    #### Validate Required Arguments
    if os.path.isdir(src):
        srctype = 'dir'
    elif os.path.isfile(src) and os.path.splitext(src)[1].lower() == '.txt':
        srctype = 'textfile'
    elif os.path.isfile(src) and os.path.splitext(
            src)[1].lower() in ortho_functions.exts:
        srctype = 'image'
    elif os.path.isfile(src.replace('msi', 'blu')) and os.path.splitext(
            src)[1].lower() in ortho_functions.exts:
        srctype = 'image'
    else:
        parser.error(
            "Error arg1 is not a recognized file path or file type: {}".format(
                src))

    if not os.path.isdir(dstdir):
        parser.error("Error arg2 is not a valid file path: {}".format(dstdir))

    ## Verify qsubscript
    if args.pbs or args.slurm:
        if args.qsubscript is None:
            if args.pbs:
                qsubpath = os.path.join(os.path.dirname(scriptpath),
                                        'qsub_ortho.sh')
            if args.slurm:
                qsubpath = os.path.join(os.path.dirname(scriptpath),
                                        'slurm_ortho.sh')
        else:
            qsubpath = os.path.abspath(args.qsubscript)
        if not os.path.isfile(qsubpath):
            parser.error("qsub script path is not valid: {}".format(qsubpath))

    ## Verify processing options do not conflict
    requested_threads = ortho_functions.ARGDEF_CPUS_AVAIL if args.threads == "ALL_CPUS" else args.threads
    if args.pbs and args.slurm:
        parser.error("Options --pbs and --slurm are mutually exclusive")
    if (args.pbs or args.slurm) and args.parallel_processes > 1:
        parser.error(
            "HPC Options (--pbs or --slurm) and --parallel-processes > 1 are mutually exclusive"
        )
    if (args.pbs or args.slurm) and requested_threads > 1:
        parser.error(
            "HPC Options (--pbs or --slurm) and --threads > 1 are mutually exclusive"
        )
    if requested_threads < 1:
        parser.error(
            "--threads count must be positive, nonzero integer or ALL_CPUS")
    if args.parallel_processes > 1:
        total_proc_count = requested_threads * args.parallel_processes
        if total_proc_count > ortho_functions.ARGDEF_CPUS_AVAIL:
            parser.error(
                "the (threads * number of processes requested) ({0}) exceeds number of available threads "
                "({1}); reduce --threads and/or --parallel-processes count".
                format(total_proc_count, ortho_functions.ARGDEF_CPUS_AVAIL))

    if args.tasks_per_job:
        if not (args.pbs or args.slurm):
            parser.error(
                "--tasks-per-job option requires the (--pbs or --slurm) option"
            )
        if not os.path.isdir(args.scratch):
            print("Creating --scratch directory: {}".format(args.scratch))
            os.makedirs(args.scratch)

    #### Verify EPSG
    try:
        spatial_ref = utils.SpatialRef(args.epsg)
    except RuntimeError as e:
        parser.error(e)

    #### Verify that dem and ortho_height are not both specified
    if args.dem is not None and args.ortho_height is not None:
        parser.error(
            "--dem and --ortho_height options are mutually exclusive.  Please choose only one."
        )

    #### Test if DEM exists
    if args.dem:
        if not os.path.isfile(args.dem):
            parser.error("DEM does not exist: {}".format(args.dem))
        if args.l is None:
            if args.dem.endswith('.vrt'):
                total_dem_filesz_gb = 0.0
                tree = ET.parse(args.dem)
                root = tree.getroot()
                for sourceFilename in root.iter('SourceFilename'):
                    dem_filename = sourceFilename.text
                    if not os.path.isfile(dem_filename):
                        parser.error(
                            "VRT DEM component raster does not exist: {}".
                            format(dem_filename))
                    dem_filesz_gb = os.path.getsize(
                        dem_filename) / 1024.0 / 1024 / 1024
                    total_dem_filesz_gb += dem_filesz_gb
                dem_filesz_gb = total_dem_filesz_gb
            else:
                dem_filesz_gb = os.path.getsize(
                    args.dem) / 1024.0 / 1024 / 1024

            pbs_req_mem_gb = int(max(math.ceil(dem_filesz_gb) + 2, 4))
            args.l = 'mem={}gb'.format(pbs_req_mem_gb)

    #### Set up console logging handler
    lso = logging.StreamHandler()
    lso.setLevel(logging.DEBUG)
    formatter = logging.Formatter('%(asctime)s %(levelname)s- %(message)s',
                                  '%m-%d-%Y %H:%M:%S')
    lso.setFormatter(formatter)
    logger.addHandler(lso)

    #### Handle thread count that exceeds system limits
    if requested_threads > ortho_functions.ARGDEF_CPUS_AVAIL:
        logger.info(
            "threads requested ({0}) exceeds number available on system ({1}), setting thread count to "
            "'ALL_CPUS'".format(requested_threads,
                                ortho_functions.ARGDEF_CPUS_AVAIL))
        args.threads = 'ALL_CPUS'

    #### Get args ready to pass to task handler
    arg_keys_to_remove = ('l', 'qsubscript', 'dryrun', 'pbs', 'slurm',
                          'parallel_processes', 'tasks_per_job')
    arg_str_base = taskhandler.convert_optional_args_to_string(
        args, pos_arg_keys, arg_keys_to_remove)

    ## Identify source images
    if srctype == 'dir':
        image_list1 = utils.find_images(src, False, ortho_functions.exts)
    elif srctype == 'textfile':
        image_list1 = utils.find_images(src, True, ortho_functions.exts)
    else:
        image_list1 = [src]

    ## Group Ikonos
    image_list2 = []
    for srcfp in image_list1:
        srcdir, srcfn = os.path.split(srcfp)
        if "IK01" in srcfn and sum(
            [b in srcfn for b in ortho_functions.ikMsiBands]) > 0:
            for b in ortho_functions.ikMsiBands:
                if b in srcfn:
                    newname = os.path.join(srcdir, srcfn.replace(b, "msi"))
                    break
            image_list2.append(newname)

        else:
            image_list2.append(srcfp)

    image_list = list(set(image_list2))
    logger.info('Number of src images: %i', len(image_list))

    ## Build task queue
    i = 0
    images_to_process = []
    for srcfp in image_list:
        srcdir, srcfn = os.path.split(srcfp)
        dstfp = os.path.join(
            dstdir, "{}_{}{}{}{}".format(
                os.path.splitext(srcfn)[0], bittype, args.stretch,
                spatial_ref.epsg, ortho_functions.formats[args.format]))

        done = os.path.isfile(dstfp)
        if done is False:
            i += 1
            images_to_process.append(srcfp)

    logger.info('Number of incomplete tasks: %i', i)

    if len(images_to_process) == 0:
        logger.info("No images found to process")
        sys.exit(0)

    task_queue = []

    if args.tasks_per_job and args.tasks_per_job > 1:
        task_srcfp_list = utils.write_task_bundles(images_to_process,
                                                   args.tasks_per_job, scratch,
                                                   'Or_src')
    else:
        task_srcfp_list = images_to_process

    for job_count, srcfp in enumerate(task_srcfp_list, 1):

        srcdir, srcfn = os.path.split(srcfp)

        if task_srcfp_list is images_to_process:
            dstfp = os.path.join(
                dstdir, "{}_{}{}{}{}".format(
                    os.path.splitext(srcfn)[0], bittype, args.stretch,
                    spatial_ref.epsg, ortho_functions.formats[args.format]))
        else:
            dstfp = None

        task = taskhandler.Task(
            srcfn, 'Or{:04g}'.format(job_count), 'python',
            '{} {} {} {}'.format(argval2str(scriptpath), arg_str_base,
                                 argval2str(srcfp), argval2str(dstdir)),
            ortho_functions.process_image, [srcfp, dstfp, args])
        task_queue.append(task)

    ## Run tasks
    if len(task_queue) > 0:
        logger.info("Submitting Tasks")
        if args.pbs:
            l = "-l {}".format(args.l) if args.l else ""
            try:
                task_handler = taskhandler.PBSTaskHandler(qsubpath, l)
            except RuntimeError as e:
                logger.error(e)
            else:
                if not args.dryrun:
                    task_handler.run_tasks(task_queue, dryrun=args.dryrun)

        elif args.slurm:
            try:
                task_handler = taskhandler.SLURMTaskHandler(qsubpath)
            except RuntimeError as e:
                logger.error(e)
            else:
                if not args.dryrun:
                    task_handler.run_tasks(task_queue)

        elif args.parallel_processes > 1:
            try:
                task_handler = taskhandler.ParallelTaskHandler(
                    args.parallel_processes)
            except RuntimeError as e:
                logger.error(e)
            else:
                logger.info("Number of child processes to spawn: %i",
                            task_handler.num_processes)
                if not args.dryrun:
                    task_handler.run_tasks(task_queue)

        else:

            results = {}
            for task in task_queue:

                src, dstfp, task_arg_obj = task.method_arg_list

                #### Set up processing log handler
                logfile = os.path.splitext(dstfp)[0] + ".log"
                lfh = logging.FileHandler(logfile)
                lfh.setLevel(logging.DEBUG)
                formatter = logging.Formatter(
                    '%(asctime)s %(levelname)s- %(message)s',
                    '%m-%d-%Y %H:%M:%S')
                lfh.setFormatter(formatter)
                logger.addHandler(lfh)

                if not args.dryrun:
                    results[task.name] = task.method(src, dstfp, task_arg_obj)

                #### remove existing file handler
                logger.removeHandler(lfh)

            #### Print Images with Errors
            for k, v in results.items():
                if v != 0:
                    logger.warning("Failed Image: %s", k)

        logger.info("Done")

    else:
        logger.info("No images found to process")
Esempio n. 4
0
            image_list2.append(newname)

        else:
            image_list2.append(srcfp)

    image_list = list(set(image_list2))
    logger.info('Number of src images: {}'.format(len(image_list)))

    ## Build task queue
    i = 0
    task_queue = []
    for srcfp in image_list:
        srcdir, srcfn = os.path.split(srcfp)
        dstfp = os.path.join(
            dstdir, "%s_%s%s%d%s" %
            (os.path.splitext(srcfn)[0], utils.get_bit_depth(
                args.outtype), args.stretch, spatial_ref.epsg,
             ortho_functions.formats[args.format]))

        done = os.path.isfile(dstfp)
        if done is False:
            i += 1
            task = taskhandler.Task(
                srcfn, 'Or{:04g}'.format(i), 'python',
                '{} {} {} {}'.format(scriptpath, arg_str_base, srcfp, dstdir),
                ortho_functions.process_image, [srcfp, dstfp, args])
            task_queue.append(task)

    logger.info('Number of incomplete tasks: {}'.format(i))

    ## Run tasks
    if len(task_queue) > 0:
Esempio n. 5
0
            image_pair = ImagePair(srcfp, spatial_ref)
        except RuntimeError, e:
            logger.error(e)
        else:
            logger.info("Image: {}, Sensor: {}".format(image_pair.mul_srcfn,
                                                       image_pair.sensor))
            pair_list.append(image_pair)

    logger.info('Number of src image pairs: {}'.format(len(pair_list)))

    ## Build task queue
    i = 0
    task_queue = []
    for image_pair in pair_list:

        bittype = utils.get_bit_depth(args.outtype)
        pansh_dstfp = os.path.join(
            dstdir, "{}_{}{}{}_pansh.tif".format(
                os.path.splitext(image_pair.mul_srcfn)[0], bittype,
                args.stretch, args.epsg))

        if not os.path.isfile(pansh_dstfp):
            i += 1
            task = taskhandler.Task(
                image_pair.mul_srcfn, 'Psh{:04g}'.format(i), 'python',
                '{} {} {} {}'.format(scriptpath, arg_str_base,
                                     image_pair.mul_srcfp, dstdir),
                exec_pansharpen, [image_pair, pansh_dstfp, args])
            task_queue.append(task)

    logger.info('Number of incomplete tasks: {}'.format(i))
Esempio n. 6
0
def exec_pansharpen(image_pair, pansh_dstfp, args):

    dstdir = os.path.dirname(pansh_dstfp)

    #### Get working dir
    if args.wd is not None:
        wd = args.wd
    else:
        wd = dstdir
    if not os.path.isdir(wd):
        try:
            os.makedirs(wd)
        except OSError:
            pass
    logger.info("Working Dir: %s" % wd)

    ####  Identify name pattern
    print "Multispectral image: %s" % image_pair.mul_srcfp
    print "Panchromatic image: %s" % image_pair.pan_srcfp

    if args.dem is not None:
        dem_arg = '-d "%s" ' % args.dem
    else:
        dem_arg = ""

    bittype = utils.get_bit_depth(args.outtype)
    pan_basename = os.path.splitext(image_pair.pan_srcfn)[0]
    mul_basename = os.path.splitext(image_pair.mul_srcfn)[0]
    pan_local_dstfp = os.path.join(
        wd, "{}_{}{}{}.tif".format(pan_basename, bittype, args.stretch,
                                   args.epsg))
    mul_local_dstfp = os.path.join(
        wd, "{}_{}{}{}.tif".format(mul_basename, bittype, args.stretch,
                                   args.epsg))
    pan_dstfp = os.path.join(
        dstdir, "{}_{}{}{}.tif".format(pan_basename, bittype, args.stretch,
                                       args.epsg))
    mul_dstfp = os.path.join(
        dstdir, "{}_{}{}{}.tif".format(mul_basename, bittype, args.stretch,
                                       args.epsg))
    pansh_tempfp = os.path.join(
        wd, "{}_{}{}{}_pansh_temp.tif".format(mul_basename, bittype,
                                              args.stretch, args.epsg))
    pansh_local_dstfp = os.path.join(
        wd, "{}_{}{}{}_pansh.tif".format(mul_basename, bittype, args.stretch,
                                         args.epsg))
    pansh_xmlfp = os.path.join(
        dstdir, "{}_{}{}{}_pansh.xml".format(mul_basename, bittype,
                                             args.stretch, args.epsg))
    mul_xmlfp = os.path.join(
        dstdir, "{}_{}{}{}.xml".format(mul_basename, bittype, args.stretch,
                                       args.epsg))

    if not os.path.isdir(wd):
        os.makedirs(wd)

    ####  Ortho pan
    logger.info("Orthorectifying panchromatic image")
    if not os.path.isfile(pan_dstfp) and not os.path.isfile(pan_local_dstfp):
        ortho_functions.process_image(image_pair.pan_srcfp, pan_dstfp, args,
                                      image_pair.intersection_geom)

    if not os.path.isfile(pan_local_dstfp) and os.path.isfile(pan_dstfp):
        shutil.copy2(pan_dstfp, pan_local_dstfp)

    logger.info("Orthorectifying multispectral image")
    ####  Ortho multi
    if not os.path.isfile(mul_dstfp) and not os.path.isfile(mul_local_dstfp):
        ## If resolution is specified in the command line, assume it's intended for the pansharpened image
        ##    and multiply the multi by 4
        if args.resolution:
            args.resolution = args.resolution * 4.0
        ortho_functions.process_image(image_pair.mul_srcfp, mul_dstfp, args,
                                      image_pair.intersection_geom)

    if not os.path.isfile(mul_local_dstfp) and os.path.isfile(mul_dstfp):
        shutil.copy2(mul_dstfp, mul_local_dstfp)

    ####  Pansharpen
    logger.info("Pansharpening multispectral image")
    if os.path.isfile(pan_local_dstfp) and os.path.isfile(mul_local_dstfp):
        if not os.path.isfile(pansh_local_dstfp):
            cmd = 'gdal_pansharpen.py -co BIGTIFF=IF_SAFER -co COMPRESS=LZW -co TILED=YES "{}" "{}" "{}"'.format(
                pan_local_dstfp, mul_local_dstfp, pansh_local_dstfp)
            taskhandler.exec_cmd(cmd)
    else:
        print "Pan or Multi warped image does not exist\n\t%s\n\t%s" % (
            pan_local_dstfp, mul_local_dstfp)

    #### Make pyramids
    if os.path.isfile(pansh_local_dstfp):
        cmd = 'gdaladdo "%s" 2 4 8 16' % (pansh_local_dstfp)
        taskhandler.exec_cmd(cmd)

    ## Copy warped multispectral xml to pansharpened output
    shutil.copy2(mul_xmlfp, pansh_xmlfp)

    #### Copy pansharpened output
    if wd <> dstdir:
        for local_path, dst_path in [(pansh_local_dstfp, pansh_dstfp),
                                     (pan_local_dstfp, pan_dstfp),
                                     (mul_local_dstfp, mul_dstfp)]:
            if os.path.isfile(local_path) and not os.path.isfile(dst_path):
                shutil.copy2(local_path, dst_path)

    #### Delete Temp Files
    wd_files = [pansh_local_dstfp, pan_local_dstfp, mul_local_dstfp]

    if not args.save_temps:
        if wd <> dstdir:
            for f in wd_files:
                try:
                    os.remove(f)
                except Exception, e:
                    logger.warning('Could not remove %s: %s' %
                                   (os.path.basename(f), e))
def exec_pansharpen(image_pair, pansh_dstfp, args):

    dstdir = os.path.dirname(pansh_dstfp)

    #### Get working dir
    if args.wd is not None:
        wd = args.wd
    else:
        wd = dstdir
    if not os.path.isdir(wd):
        try:
            os.makedirs(wd)
        except OSError:
            pass
    logger.info("Working Dir: %s", wd)

    ####  Identify name pattern
    print("Multispectral image: {}".format(image_pair.mul_srcfp))
    print("Panchromatic image: {}".format(image_pair.pan_srcfp))

    if args.dem is not None:
        dem_arg = '-d "{}" '.format(args.dem)
    else:
        dem_arg = ""

    bittype = utils.get_bit_depth(args.outtype)
    pan_basename = os.path.splitext(image_pair.pan_srcfn)[0]
    mul_basename = os.path.splitext(image_pair.mul_srcfn)[0]
    pan_local_dstfp = os.path.join(wd, "{}_{}{}{}.tif".format(pan_basename, bittype, args.stretch, args.epsg))
    mul_local_dstfp = os.path.join(wd, "{}_{}{}{}.tif".format(mul_basename, bittype, args.stretch, args.epsg))
    pan_dstfp = os.path.join(dstdir, "{}_{}{}{}.tif".format(pan_basename, bittype, args.stretch, args.epsg))
    mul_dstfp = os.path.join(dstdir, "{}_{}{}{}.tif".format(mul_basename, bittype, args.stretch, args.epsg))
    pansh_tempfp = os.path.join(wd, "{}_{}{}{}_pansh_temp.tif".format(mul_basename, bittype, args.stretch, args.epsg))
    pansh_local_dstfp = os.path.join(wd, "{}_{}{}{}_pansh.tif".format(mul_basename, bittype, args.stretch, args.epsg))
    pansh_xmlfp = os.path.join(dstdir, "{}_{}{}{}_pansh.xml".format(mul_basename, bittype, args.stretch, args.epsg))
    mul_xmlfp = os.path.join(dstdir, "{}_{}{}{}.xml".format(mul_basename, bittype, args.stretch, args.epsg))
    
    if not os.path.isdir(wd):
        os.makedirs(wd)

    ####  Ortho pan
    logger.info("Orthorectifying panchromatic image")
    if not os.path.isfile(pan_dstfp) and not os.path.isfile(pan_local_dstfp):
        ortho_functions.process_image(image_pair.pan_srcfp, pan_dstfp, args, image_pair.intersection_geom)

    if not os.path.isfile(pan_local_dstfp) and os.path.isfile(pan_dstfp):
        shutil.copy2(pan_dstfp, pan_local_dstfp)

    logger.info("Orthorectifying multispectral image")
    ####  Ortho multi
    if not os.path.isfile(mul_dstfp) and not os.path.isfile(mul_local_dstfp):
        ## If resolution is specified in the command line, assume it's intended for the pansharpened image
        ##    and multiply the multi by 4
        if args.resolution:
            args.resolution = args.resolution * 4.0
        ortho_functions.process_image(image_pair.mul_srcfp, mul_dstfp, args, image_pair.intersection_geom)

    if not os.path.isfile(mul_local_dstfp) and os.path.isfile(mul_dstfp):
        shutil.copy2(mul_dstfp, mul_local_dstfp)

    ####  Pansharpen
    ## get system info for program extension
    if platform.system() == 'Windows':
        py_ext = ''
    else:
        py_ext = '.py'
    
    logger.info("Pansharpening multispectral image")
    if os.path.isfile(pan_local_dstfp) and os.path.isfile(mul_local_dstfp):
        if not os.path.isfile(pansh_local_dstfp):
            cmd = 'gdal_pansharpen{} -co BIGTIFF=IF_SAFER -co COMPRESS=LZW -co TILED=YES "{}" "{}" "{}"'.\
                format(py_ext, pan_local_dstfp, mul_local_dstfp, pansh_local_dstfp)
            taskhandler.exec_cmd(cmd)
    else:
        print("Pan or Multi warped image does not exist\n\t{}\n\t{}").format(pan_local_dstfp, mul_local_dstfp)

    #### Make pyramids
    if os.path.isfile(pansh_local_dstfp):
        cmd = 'gdaladdo -r {} "{}" 2 4 8 16'.format(args.pyramid_type, pansh_local_dstfp)
        taskhandler.exec_cmd(cmd)
       
    ## Copy warped multispectral xml to pansharpened output
    shutil.copy2(mul_xmlfp, pansh_xmlfp)

    #### Copy pansharpened output
    if wd != dstdir:
        for local_path, dst_path in [(pansh_local_dstfp, pansh_dstfp), (pan_local_dstfp, pan_dstfp),
                                     (mul_local_dstfp, mul_dstfp)]:
            if os.path.isfile(local_path) and not os.path.isfile(dst_path):
                shutil.copy2(local_path, dst_path)

    #### Delete Temp Files
    wd_files = [
        pansh_local_dstfp,
        pan_local_dstfp,
        mul_local_dstfp
    ]

    if not args.save_temps:
        if wd != dstdir:
            for f in wd_files:
                try:
                    os.remove(f)
                except Exception as e:
                    logger.warning('Could not remove %s: %s', os.path.basename(f), e)
    
    if os.path.isfile(pansh_dstfp):
        return 0
    else:
        return 0
Esempio n. 8
0
def main():

    #### Set Up Arguments
    parent_parser, pos_arg_keys = ortho_functions.buildParentArgumentParser()
    parser = argparse.ArgumentParser(
        parents=[parent_parser],
        description="Run/Submit batch pansharpening in parallel")

    parser.add_argument("--pbs",
                        action='store_true',
                        default=False,
                        help="submit tasks to PBS")
    parser.add_argument("--slurm",
                        action='store_true',
                        default=False,
                        help="submit tasks to SLURM")
    parser.add_argument(
        "--parallel-processes",
        type=int,
        default=1,
        help="number of parallel processes to spawn (default 1)")
    parser.add_argument(
        "--qsubscript",
        help=
        "submission script to use in PBS/SLURM submission (PBS default is qsub_pansharpen.sh, "
        "SLURM default is slurm_pansharpen.py, in script root folder)")
    parser.add_argument(
        "-l", help="PBS resources requested (mimicks qsub syntax, PBS only)")
    parser.add_argument("--dryrun",
                        action="store_true",
                        default=False,
                        help="print actions without executing")

    #### Parse Arguments
    args = parser.parse_args()
    scriptpath = os.path.abspath(sys.argv[0])
    src = os.path.abspath(args.src)
    dstdir = os.path.abspath(args.dst)

    #### Validate Required Arguments
    if os.path.isdir(src):
        srctype = 'dir'
    elif os.path.isfile(src) and os.path.splitext(src)[1].lower() == '.txt':
        srctype = 'textfile'
    elif os.path.isfile(src) and os.path.splitext(
            src)[1].lower() in ortho_functions.exts:
        srctype = 'image'
    elif os.path.isfile(src.replace('msi', 'blu')) and os.path.splitext(
            src)[1].lower() in ortho_functions.exts:
        srctype = 'image'
    else:
        parser.error(
            "Error arg1 is not a recognized file path or file type: {}".format(
                src))

    if not os.path.isdir(dstdir):
        parser.error("Error arg2 is not a valid file path: {}".format(dstdir))

    # Verify qsubscript
    if args.pbs or args.slurm:
        if args.qsubscript is None:
            if args.pbs:
                qsubpath = os.path.join(os.path.dirname(scriptpath),
                                        'qsub_pansharpen.sh')
            if args.slurm:
                qsubpath = os.path.join(os.path.dirname(scriptpath),
                                        'slurm_pansharpen.sh')
        else:
            qsubpath = os.path.abspath(args.qsubscript)
        if not os.path.isfile(qsubpath):
            parser.error("qsub script path is not valid: {}".format(qsubpath))

    ### Verify processing options do not conflict
    if args.pbs and args.slurm:
        parser.error("Options --pbs and --slurm are mutually exclusive")
    if (args.pbs or args.slurm) and args.parallel_processes > 1:
        parser.error(
            "HPC Options (--pbs or --slurm) and --parallel-processes > 1 are mutually exclusive"
        )

    #### Verify EPSG
    try:
        spatial_ref = utils.SpatialRef(args.epsg)
    except RuntimeError as e:
        parser.error(e)

    ## Check GDAL version (2.1.0 minimum)
    gdal_version = gdal.VersionInfo()
    try:
        if int(gdal_version) < 2010000:
            parser.error(
                "gdal_pansharpen requires GDAL version 2.1.0 or higher")
    except ValueError:
        parser.error("Cannot parse GDAL version: {}".format(gdal_version))

    #### Set up console logging handler
    lso = logging.StreamHandler()
    lso.setLevel(logging.DEBUG)
    formatter = logging.Formatter('%(asctime)s %(levelname)s- %(message)s',
                                  '%m-%d-%Y %H:%M:%S')
    lso.setFormatter(formatter)
    logger.addHandler(lso)

    #### Get args ready to pass to task handler
    arg_keys_to_remove = ('l', 'qsubscript', 'dryrun', 'pbs', 'slurm',
                          'parallel_processes')
    arg_str_base = taskhandler.convert_optional_args_to_string(
        args, pos_arg_keys, arg_keys_to_remove)

    ## Identify source images
    if srctype == 'dir':
        image_list1 = utils.find_images(src, False, ortho_functions.exts)
    elif srctype == 'textfile':
        image_list1 = utils.find_images(src, True, ortho_functions.exts)
    else:
        image_list1 = [src]

    pair_list = []
    for srcfp in image_list1:
        #print(srcfp)
        try:
            image_pair = ImagePair(srcfp, spatial_ref)
        except RuntimeError as e:
            logger.error(e)
        else:
            logger.info("Image: %s, Sensor: %s", image_pair.mul_srcfn,
                        image_pair.sensor)
            pair_list.append(image_pair)

    logger.info('Number of src image pairs: %i', len(pair_list))

    ## Build task queue
    i = 0
    task_queue = []
    for image_pair in pair_list:

        bittype = utils.get_bit_depth(args.outtype)
        pansh_dstfp = os.path.join(
            dstdir, "{}_{}{}{}_pansh.tif".format(
                os.path.splitext(image_pair.mul_srcfn)[0], bittype,
                args.stretch, args.epsg))

        if not os.path.isfile(pansh_dstfp):
            i += 1
            task = taskhandler.Task(
                image_pair.mul_srcfn, 'Psh{:04g}'.format(i), 'python',
                '{} {} {} {}'.format(scriptpath, arg_str_base,
                                     image_pair.mul_srcfp, dstdir),
                exec_pansharpen, [image_pair, pansh_dstfp, args])
            task_queue.append(task)

    logger.info('Number of incomplete tasks: %i', i)

    ## Run tasks
    if len(task_queue) > 0:
        logger.info("Submitting Tasks")
        if args.pbs:
            l = "-l {}".format(args.l) if args.l else ""
            try:
                task_handler = taskhandler.PBSTaskHandler(qsubpath, l)
            except RuntimeError as e:
                logger.error(e)
            else:
                if not args.dryrun:
                    task_handler.run_tasks(task_queue)

        elif args.slurm:
            try:
                task_handler = taskhandler.SLURMTaskHandler(qsubpath)
            except RuntimeError as e:
                logger.error(e)
            else:
                if not args.dryrun:
                    task_handler.run_tasks(task_queue)

        elif args.parallel_processes > 1:
            try:
                task_handler = taskhandler.ParallelTaskHandler(
                    args.parallel_processes)
            except RuntimeError as e:
                logger.error(e)
            else:
                logger.info("Number of child processes to spawn: %i",
                            task_handler.num_processes)
                if not args.dryrun:
                    task_handler.run_tasks(task_queue)

        else:
            results = {}
            lfh = None
            for task in task_queue:

                src, dstfp, task_arg_obj = task.method_arg_list

                #### Set up processing log handler
                logfile = os.path.splitext(dstfp)[0] + ".log"
                lfh = logging.FileHandler(logfile)
                lfh.setLevel(logging.DEBUG)
                formatter = logging.Formatter(
                    '%(asctime)s %(levelname)s- %(message)s',
                    '%m-%d-%Y %H:%M:%S')
                lfh.setFormatter(formatter)
                logger.addHandler(lfh)

                if not args.dryrun:
                    results[task.name] = task.method(src, dstfp, task_arg_obj)

                #### remove existing file handler
                logger.removeHandler(lfh)

                #### remove existing file handler
                logger.removeHandler(lfh)

            #### Print Images with Errors
            for k, v in results.items():
                if v != 0:
                    logger.warning("Failed Image: %s", k)

        logger.info("Done")

    else:
        logger.info("No images found to process")
def main():

    #### Set Up Arguments
    parent_parser, pos_arg_keys = ortho_functions.buildParentArgumentParser()
    parser = argparse.ArgumentParser(
        parents=[parent_parser],
        description="Run/Submit batch pansharpening in parallel"
    )

    parser.add_argument("--pbs", action='store_true', default=False,
                        help="submit tasks to PBS")
    parser.add_argument("--slurm", action='store_true', default=False,
                        help="submit tasks to SLURM")
    parser.add_argument("--parallel-processes", type=int, default=1,
                        help="number of parallel processes to spawn (default 1)")
    parser.add_argument("--qsubscript",
                        help="submission script to use in PBS/SLURM submission (PBS default is qsub_pansharpen.sh, "
                             "SLURM default is slurm_pansharpen.py, in script root folder)")
    parser.add_argument("-l",
                        help="PBS resources requested (mimicks qsub syntax, PBS only)")
    parser.add_argument("--dryrun", action="store_true", default=False,
                        help="print actions without executing")

    #### Parse Arguments
    args = parser.parse_args()
    scriptpath = os.path.abspath(sys.argv[0])
    src = os.path.abspath(args.src)
    dstdir = os.path.abspath(args.dst)

    #### Validate Required Arguments
    if os.path.isdir(src):
        srctype = 'dir'
    elif os.path.isfile(src) and os.path.splitext(src)[1].lower() == '.txt':
        srctype = 'textfile'
    elif os.path.isfile(src) and os.path.splitext(src)[1].lower() in ortho_functions.exts:
        srctype = 'image'
    elif os.path.isfile(src.replace('msi', 'blu')) and os.path.splitext(src)[1].lower() in ortho_functions.exts:
        srctype = 'image'
    else:
        parser.error("Error arg1 is not a recognized file path or file type: {}".format(src))

    if not os.path.isdir(dstdir):
        parser.error("Error arg2 is not a valid file path: {}".format(dstdir))

    # Verify qsubscript
    if args.pbs or args.slurm:
        if args.qsubscript is None:
            if args.pbs:
                qsubpath = os.path.join(os.path.dirname(scriptpath), 'qsub_pansharpen.sh')
            if args.slurm:
                qsubpath = os.path.join(os.path.dirname(scriptpath), 'slurm_pansharpen.sh')
        else:
            qsubpath = os.path.abspath(args.qsubscript)
        if not os.path.isfile(qsubpath):
            parser.error("qsub script path is not valid: {}".format(qsubpath))

    ### Verify processing options do not conflict
    if args.pbs and args.slurm:
        parser.error("Options --pbs and --slurm are mutually exclusive")
    if (args.pbs or args.slurm) and args.parallel_processes > 1:
        parser.error("HPC Options (--pbs or --slurm) and --parallel-processes > 1 are mutually exclusive")

    #### Verify EPSG
    try:
        spatial_ref = utils.SpatialRef(args.epsg)
    except RuntimeError as e:
        parser.error(e)
        
    ## Check GDAL version (2.1.0 minimum)
    gdal_version = gdal.VersionInfo()
    try:
        if int(gdal_version) < 2010000:
            parser.error("gdal_pansharpen requires GDAL version 2.1.0 or higher")
    except ValueError:
        parser.error("Cannot parse GDAL version: {}".format(gdal_version))

    #### Set up console logging handler
    lso = logging.StreamHandler()
    lso.setLevel(logging.DEBUG)
    formatter = logging.Formatter('%(asctime)s %(levelname)s- %(message)s', '%m-%d-%Y %H:%M:%S')
    lso.setFormatter(formatter)
    logger.addHandler(lso)
    
    #### Get args ready to pass to task handler
    arg_keys_to_remove = ('l', 'qsubscript', 'dryrun', 'pbs', 'slurm', 'parallel_processes')
    arg_str_base = taskhandler.convert_optional_args_to_string(args, pos_arg_keys, arg_keys_to_remove)
    
    ## Identify source images
    if srctype == 'dir':
        image_list1 = utils.find_images(src, False, ortho_functions.exts)
    elif srctype == 'textfile':
        image_list1 = utils.find_images(src, True, ortho_functions.exts)
    else:
        image_list1 = [src]

    pair_list = []
    for srcfp in image_list1:
        #print(srcfp)
        try:
            image_pair = ImagePair(srcfp, spatial_ref)
        except RuntimeError as e:
            logger.error(e)
        else:
            logger.info("Image: %s, Sensor: %s", image_pair.mul_srcfn, image_pair.sensor)
            pair_list.append(image_pair)
                
    logger.info('Number of src image pairs: %i', len(pair_list))
    
    ## Build task queue
    i = 0
    task_queue = []
    for image_pair in pair_list:
        
        bittype = utils.get_bit_depth(args.outtype)
        pansh_dstfp = os.path.join(dstdir, "{}_{}{}{}_pansh.tif".format(os.path.splitext(image_pair.mul_srcfn)[0],
                                                                        bittype, args.stretch, args.epsg))
        
        if not os.path.isfile(pansh_dstfp):
            i += 1
            task = taskhandler.Task(
                image_pair.mul_srcfn,
                'Psh{:04g}'.format(i),
                'python',
                '{} {} {} {}'.format(scriptpath, arg_str_base, image_pair.mul_srcfp, dstdir),
                exec_pansharpen,
                [image_pair, pansh_dstfp, args]
            )
            task_queue.append(task)
            
    logger.info('Number of incomplete tasks: %i', i)

    ## Run tasks
    if len(task_queue) > 0:
        logger.info("Submitting Tasks")
        if args.pbs:
            l = "-l {}".format(args.l) if args.l else ""
            try:
                task_handler = taskhandler.PBSTaskHandler(qsubpath, l)
            except RuntimeError as e:
                logger.error(e)
            else:
                if not args.dryrun:
                    task_handler.run_tasks(task_queue)
                
        elif args.slurm:
            try:
                task_handler = taskhandler.SLURMTaskHandler(qsubpath)
            except RuntimeError as e:
                logger.error(e)
            else:
                if not args.dryrun:
                    task_handler.run_tasks(task_queue)
            
        elif args.parallel_processes > 1:
            try:
                task_handler = taskhandler.ParallelTaskHandler(args.parallel_processes)
            except RuntimeError as e:
                logger.error(e)
            else:
                logger.info("Number of child processes to spawn: %i", task_handler.num_processes)
                if not args.dryrun:
                    task_handler.run_tasks(task_queue)
    
        else:
            results = {}
            lfh = None
            for task in task_queue:
                           
                src, dstfp, task_arg_obj = task.method_arg_list
                
                #### Set up processing log handler
                logfile = os.path.splitext(dstfp)[0] + ".log"
                lfh = logging.FileHandler(logfile)
                lfh.setLevel(logging.DEBUG)
                formatter = logging.Formatter('%(asctime)s %(levelname)s- %(message)s', '%m-%d-%Y %H:%M:%S')
                lfh.setFormatter(formatter)
                logger.addHandler(lfh)
                
                if not args.dryrun:
                    results[task.name] = task.method(src, dstfp, task_arg_obj)
                    
                #### remove existing file handler
                logger.removeHandler(lfh)
            
                #### remove existing file handler
                logger.removeHandler(lfh)
                
            #### Print Images with Errors    
            for k, v in results.items():
                if v != 0:
                    logger.warning("Failed Image: %s", k)
        
        logger.info("Done")
        
    else:
        logger.info("No images found to process")
Esempio n. 10
0
            image_list2.append(newname)

        else:
            image_list2.append(srcfp)

    image_list = list(set(image_list2))
    logger.info('Number of src images: {}'.format(len(image_list)))
    
    ## Build task queue
    i = 0
    task_queue = []
    for srcfp in image_list:
        srcdir, srcfn = os.path.split(srcfp)
        dstfp = os.path.join(dstdir,"%s_%s%s%d%s" %(
            os.path.splitext(srcfn)[0],
            utils.get_bit_depth(args.outtype),
            args.stretch,
            spatial_ref.epsg,
            ortho_functions.formats[args.format]
        ))
        
        done = os.path.isfile(dstfp)
        if done is False:
            i += 1
            task = utils.Task(
                srcfn,
                'Ortho{:04g}'.format(i),
                'python',
                '{} {} {} {}'.format(scriptpath, arg_str_base, srcfp, dstdir),
                ortho_functions.process_image,
                [srcfp, dstfp, args]
Esempio n. 11
0
def exec_pansharpen(image_pair, pansh_dstfp, args):

    dstdir = os.path.dirname(pansh_dstfp)

    #### Get working dir
    if args.wd is not None:
        wd = args.wd
    else:
        wd = dstdir
    if not os.path.isdir(wd):
        try:
            os.makedirs(wd)
        except OSError:
            pass
    logger.info("Working Dir: %s", wd)

    ####  Identify name pattern
    print("Multispectral image: {}".format(image_pair.mul_srcfp))
    print("Panchromatic image: {}".format(image_pair.pan_srcfp))

    if args.dem is not None:
        dem_arg = '-d "{}" '.format(args.dem)
    else:
        dem_arg = ""

    bittype = utils.get_bit_depth(args.outtype)
    pan_basename = os.path.splitext(image_pair.pan_srcfn)[0]
    mul_basename = os.path.splitext(image_pair.mul_srcfn)[0]
    pan_local_dstfp = os.path.join(
        wd, "{}_{}{}{}.tif".format(pan_basename, bittype, args.stretch,
                                   args.epsg))
    mul_local_dstfp = os.path.join(
        wd, "{}_{}{}{}.tif".format(mul_basename, bittype, args.stretch,
                                   args.epsg))
    pan_dstfp = os.path.join(
        dstdir, "{}_{}{}{}.tif".format(pan_basename, bittype, args.stretch,
                                       args.epsg))
    mul_dstfp = os.path.join(
        dstdir, "{}_{}{}{}.tif".format(mul_basename, bittype, args.stretch,
                                       args.epsg))
    pansh_tempfp = os.path.join(
        wd, "{}_{}{}{}_pansh_temp.tif".format(mul_basename, bittype,
                                              args.stretch, args.epsg))
    pansh_local_dstfp = os.path.join(
        wd, "{}_{}{}{}_pansh.tif".format(mul_basename, bittype, args.stretch,
                                         args.epsg))
    pansh_xmlfp = os.path.join(
        dstdir, "{}_{}{}{}_pansh.xml".format(mul_basename, bittype,
                                             args.stretch, args.epsg))
    mul_xmlfp = os.path.join(
        dstdir, "{}_{}{}{}.xml".format(mul_basename, bittype, args.stretch,
                                       args.epsg))

    if not os.path.isdir(wd):
        os.makedirs(wd)

    ####  Ortho pan
    logger.info("Orthorectifying panchromatic image")
    if not os.path.isfile(pan_dstfp) and not os.path.isfile(pan_local_dstfp):
        ortho_functions.process_image(image_pair.pan_srcfp, pan_dstfp, args,
                                      image_pair.intersection_geom)

    if not os.path.isfile(pan_local_dstfp) and os.path.isfile(pan_dstfp):
        shutil.copy2(pan_dstfp, pan_local_dstfp)

    logger.info("Orthorectifying multispectral image")
    ####  Ortho multi
    if not os.path.isfile(mul_dstfp) and not os.path.isfile(mul_local_dstfp):
        ## If resolution is specified in the command line, assume it's intended for the pansharpened image
        ##    and multiply the multi by 4
        if args.resolution:
            args.resolution = args.resolution * 4.0
        ortho_functions.process_image(image_pair.mul_srcfp, mul_dstfp, args,
                                      image_pair.intersection_geom)

    if not os.path.isfile(mul_local_dstfp) and os.path.isfile(mul_dstfp):
        shutil.copy2(mul_dstfp, mul_local_dstfp)

    ####  Pansharpen
    ## get system info for program extension
    if platform.system() == 'Windows':
        py_ext = ''
    else:
        py_ext = '.py'

    pan_threading = ''
    if hasattr(args, 'threads'):
        if args.threads != 1:
            pan_threading = '-threads {}'.format(args.threads)

    logger.info("Pansharpening multispectral image")
    if os.path.isfile(pan_local_dstfp) and os.path.isfile(mul_local_dstfp):
        if not os.path.isfile(pansh_local_dstfp):
            cmd = 'gdal_pansharpen{} -co BIGTIFF=IF_SAFER -co COMPRESS=LZW -co TILED=YES {} "{}" "{}" "{}"'.\
                format(py_ext, pan_threading, pan_local_dstfp, mul_local_dstfp, pansh_local_dstfp)
            taskhandler.exec_cmd(cmd)
    else:
        logger.warning(
            "Pan or Multi warped image does not exist\n\t{}\n\t{}".format(
                pan_local_dstfp, mul_local_dstfp))

    #### Make pyramids
    if os.path.isfile(pansh_local_dstfp):
        cmd = 'gdaladdo -r {} "{}" 2 4 8 16'.format(args.pyramid_type,
                                                    pansh_local_dstfp)
        taskhandler.exec_cmd(cmd)

    ## Copy warped multispectral xml to pansharpened output
    shutil.copy2(mul_xmlfp, pansh_xmlfp)

    #### Copy pansharpened output
    if wd != dstdir:
        for local_path, dst_path in [(pansh_local_dstfp, pansh_dstfp),
                                     (pan_local_dstfp, pan_dstfp),
                                     (mul_local_dstfp, mul_dstfp)]:
            if os.path.isfile(local_path) and not os.path.isfile(dst_path):
                shutil.copy2(local_path, dst_path)

    #### Delete Temp Files
    wd_files = [pansh_local_dstfp, pan_local_dstfp, mul_local_dstfp]

    if not args.save_temps:
        if wd != dstdir:
            for f in wd_files:
                try:
                    os.remove(f)
                except Exception as e:
                    logger.warning('Could not remove %s: %s',
                                   os.path.basename(f), e)

    if os.path.isfile(pansh_dstfp):
        return 0
    else:
        return 0
 def test_get_bit_depth(self):
     self.assertEqual(utils.get_bit_depth("Byte"), "u08")
     self.assertEqual(utils.get_bit_depth("UInt16"), "u16")
     self.assertEqual(utils.get_bit_depth("Float32"), "f32")
     self.assertEqual(utils.get_bit_depth("Uint16"), None)  # function logs error, and returns None
def main():
    #### Set Up Arguments
    parent_parser, pos_arg_keys = ortho_functions.buildParentArgumentParser()
    parser = argparse.ArgumentParser(
        parents=[parent_parser],
        description="Run/submit batch image ortho and conversion tasks"
    )

    parser.add_argument("--pbs", action='store_true', default=False,
                        help="submit tasks to PBS")
    parser.add_argument("--slurm", action='store_true', default=False,
                        help="submit tasks to SLURM")
    parser.add_argument("--tasks-per-job", type=int,
                        help="Number of tasks to bundle into a single job. (requires --pbs or --slurm option) (Warning:"
                             " a higher number of tasks per job may require modification of default wallclock limit.)")
    parser.add_argument('--scratch', default=ARGDEF_SCRATCH,
                        help="Scratch space to build task bundle text files. (default={})".format(ARGDEF_SCRATCH))
    parser.add_argument("--parallel-processes", type=int, default=1,
                        help="number of parallel processes to spawn (default 1)")
    parser.add_argument("--qsubscript",
                        help="submission script to use in PBS/SLURM submission (PBS default is qsub_ortho.sh, SLURM "
                             "default is slurm_ortho.py, in script root folder)")
    parser.add_argument("-l",
                        help="PBS resources requested (mimicks qsub syntax, PBS only)")
    parser.add_argument("--dryrun", action='store_true', default=False,
                        help='print actions without executing')

    #### Parse Arguments
    args = parser.parse_args()
    scriptpath = os.path.abspath(sys.argv[0])
    src = os.path.abspath(args.src)
    dstdir = os.path.abspath(args.dst)
    scratch = os.path.abspath(args.scratch)

    #### Validate Required Arguments
    if os.path.isdir(src):
        srctype = 'dir'
    elif os.path.isfile(src) and os.path.splitext(src)[1].lower() == '.txt':
        srctype = 'textfile'
    elif os.path.isfile(src) and os.path.splitext(src)[1].lower() in ortho_functions.exts:
        srctype = 'image'
    elif os.path.isfile(src.replace('msi', 'blu')) and os.path.splitext(src)[1].lower() in ortho_functions.exts:
        srctype = 'image'
    else:
        parser.error("Error arg1 is not a recognized file path or file type: {}".format(src))

    if not os.path.isdir(dstdir):
        parser.error("Error arg2 is not a valid file path: {}".format(dstdir))

    ## Verify qsubscript
    if args.pbs or args.slurm:
        if args.qsubscript is None:
            if args.pbs:
                qsubpath = os.path.join(os.path.dirname(scriptpath), 'qsub_ortho.sh')
            if args.slurm:
                qsubpath = os.path.join(os.path.dirname(scriptpath), 'slurm_ortho.sh')
        else:
            qsubpath = os.path.abspath(args.qsubscript)
        if not os.path.isfile(qsubpath):
            parser.error("qsub script path is not valid: {}".format(qsubpath))

    ## Verify processing options do not conflict
    if args.pbs and args.slurm:
        parser.error("Options --pbs and --slurm are mutually exclusive")
    if (args.pbs or args.slurm) and args.parallel_processes > 1:
        parser.error("HPC Options (--pbs or --slurm) and --parallel-processes > 1 are mutually exclusive")
    if args.tasks_per_job:
        if not (args.pbs or args.slurm):
            parser.error("--jobs-per-task option requires the (--pbs or --slurm) option")
        if not os.path.isdir(args.scratch):
            print("Creating --scratch directory: {}".format(args.scratch))
            os.makedirs(args.scratch)

    #### Verify EPSG
    try:
        spatial_ref = utils.SpatialRef(args.epsg)
    except RuntimeError as e:
        parser.error(e)

    #### Verify that dem and ortho_height are not both specified
    if args.dem is not None and args.ortho_height is not None:
        parser.error("--dem and --ortho_height options are mutually exclusive.  Please choose only one.")

    #### Test if DEM exists
    if args.dem:
        if not os.path.isfile(args.dem):
            parser.error("DEM does not exist: {}".format(args.dem))

    #### Set up console logging handler
    lso = logging.StreamHandler()
    lso.setLevel(logging.INFO)
    formatter = logging.Formatter('%(asctime)s %(levelname)s- %(message)s', '%m-%d-%Y %H:%M:%S')
    lso.setFormatter(formatter)
    logger.addHandler(lso)

    #### Get args ready to pass to task handler
    arg_keys_to_remove = ('l', 'qsubscript', 'dryrun', 'pbs', 'slurm', 'parallel_processes', 'tasks_per_job')
    arg_str_base = taskhandler.convert_optional_args_to_string(args, pos_arg_keys, arg_keys_to_remove)

    ## Identify source images
    if srctype == 'dir':
        image_list1 = utils.find_images(src, False, ortho_functions.exts)
    elif srctype == 'textfile':
        image_list1 = utils.find_images(src, True, ortho_functions.exts)
    else:
        image_list1 = [src]

    ## Group Ikonos
    image_list2 = []
    for srcfp in image_list1:
        srcdir, srcfn = os.path.split(srcfp)
        if "IK01" in srcfn and sum([b in srcfn for b in ortho_functions.ikMsiBands]) > 0:
            for b in ortho_functions.ikMsiBands:
                if b in srcfn:
                    newname = os.path.join(srcdir, srcfn.replace(b, "msi"))
                    break
            image_list2.append(newname)

        else:
            image_list2.append(srcfp)

    image_list = list(set(image_list2))
    logger.info('Number of src images: %i', len(image_list))

    ## Build task queue
    i = 0
    images_to_process = []
    for srcfp in image_list:
        srcdir, srcfn = os.path.split(srcfp)
        dstfp = os.path.join(dstdir, "{}_{}{}{}{}".format(
            os.path.splitext(srcfn)[0],
            utils.get_bit_depth(args.outtype),
            args.stretch,
            spatial_ref.epsg,
            ortho_functions.formats[args.format]
        ))

        done = os.path.isfile(dstfp)
        if done is False:
            i += 1
            images_to_process.append(srcfp)

    logger.info('Number of incomplete tasks: %i', i)

    if len(images_to_process) == 0:
        logger.info("No images found to process")
        sys.exit(0)

    tm = datetime.now()
    job_count = 0
    image_count = 0
    scenes_in_job_count = 0
    task_queue = []

    for srcfp in images_to_process:
        image_count += 1
        srcdir, srcfn = os.path.split(srcfp)
        dstfp = os.path.join(dstdir, "{}_{}{}{}{}".format(
            os.path.splitext(srcfn)[0],
            utils.get_bit_depth(args.outtype),
            args.stretch,
            spatial_ref.epsg,
            ortho_functions.formats[args.format]
        ))

        if args.tasks_per_job:
            # bundle tasks into text files in the dst dir and pass the text file in as src
            scenes_in_job_count += 1
            src_txt = os.path.join(scratch, 'Or_src_{}_{}.txt'.format(tm.strftime("%Y%m%d%H%M%S"), job_count + 1))

            if scenes_in_job_count == 1:
                # remove text file if dst already exists
                try:
                    os.remove(src_txt)
                except OSError:
                    pass

            if scenes_in_job_count <= args.tasks_per_job:
                # add to txt file
                fh = open(src_txt, 'a')
                fh.write("{}\n".format(srcfp))
                fh.close()

            if scenes_in_job_count == args.tasks_per_job or image_count == len(images_to_process):
                scenes_in_job_count = 0
                job_count += 1
                task = taskhandler.Task(
                    srcfn,
                    'Or{:04g}'.format(job_count),
                    'python',
                    '{} {} {} {}'.format(scriptpath, arg_str_base, src_txt, dstdir),
                    ortho_functions.process_image,
                    [srcfp, dstfp, args]
                )
                task_queue.append(task)

        else:
            job_count += 1
            task = taskhandler.Task(
                srcfn,
                'Or{:04g}'.format(job_count),
                'python',
                '{} {} {} {}'.format(scriptpath, arg_str_base, srcfp, dstdir),
                ortho_functions.process_image,
                [srcfp, dstfp, args]
            )
            task_queue.append(task)

    ## Run tasks
    if len(task_queue) > 0:
        logger.info("Submitting Tasks")
        if args.pbs:
            l = "-l {}".format(args.l) if args.l else ""
            try:
                task_handler = taskhandler.PBSTaskHandler(qsubpath, l)
            except RuntimeError as e:
                logger.error(e)
            else:
                if not args.dryrun:
                    task_handler.run_tasks(task_queue)

        elif args.slurm:
            try:
                task_handler = taskhandler.SLURMTaskHandler(qsubpath)
            except RuntimeError as e:
                logger.error(e)
            else:
                if not args.dryrun:
                    task_handler.run_tasks(task_queue)

        elif args.parallel_processes > 1:
            try:
                task_handler = taskhandler.ParallelTaskHandler(args.parallel_processes)
            except RuntimeError as e:
                logger.error(e)
            else:
                logger.info("Number of child processes to spawn: %i", task_handler.num_processes)
                if not args.dryrun:
                    task_handler.run_tasks(task_queue)

        else:

            results = {}
            for task in task_queue:

                src, dstfp, task_arg_obj = task.method_arg_list

                #### Set up processing log handler
                logfile = os.path.splitext(dstfp)[0] + ".log"
                lfh = logging.FileHandler(logfile)
                lfh.setLevel(logging.DEBUG)
                formatter = logging.Formatter('%(asctime)s %(levelname)s- %(message)s', '%m-%d-%Y %H:%M:%S')
                lfh.setFormatter(formatter)
                logger.addHandler(lfh)

                if not args.dryrun:
                    results[task.name] = task.method(src, dstfp, task_arg_obj)

                #### remove existing file handler
                logger.removeHandler(lfh)

            #### Print Images with Errors
            for k, v in results.items():
                if v != 0:
                    logger.warning("Failed Image: %s", k)

        logger.info("Done")

    else:
        logger.info("No images found to process")